This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 8b8be642a92 [regression](kerberos)add hive kerberos docker regression 
env (#36430)
8b8be642a92 is described below

commit 8b8be642a92950027bdf172ef04ae94141924244
Author: slothever <18522955+w...@users.noreply.github.com>
AuthorDate: Thu Jun 27 00:23:42 2024 +0800

    [regression](kerberos)add hive kerberos docker regression env (#36430)
    
    add kerberos docker regression environment
    add cases for two different hive kerberos
---
 .../create_kerberos_credential_cache_files.sh      |  33 +++++++
 .../kerberos/common/conf/doris-krb5.conf           |  52 +++++++++++
 .../common/hadoop/apply-config-overrides.sh        |  31 +++++++
 .../kerberos/common/hadoop/hadoop-run.sh           |  42 +++++++++
 .../kerberos/entrypoint-hive-master-2.sh           |  36 ++++++++
 .../kerberos/entrypoint-hive-master.sh             |  34 +++++++
 .../kerberos/health-checks/hadoop-health-check.sh  |  39 ++++++++
 .../kerberos/health-checks/health.sh               |  34 +++++++
 .../docker-compose/kerberos/kerberos.yaml.tpl      |  73 +++++++++++++++
 .../kerberos/sql/create_kerberos_hive_table.sql    |  17 ++++
 .../kerberos/two-kerberos-hives/auth-to-local.xml  |  29 ++++++
 .../two-kerberos-hives/hive2-default-fs-site.xml   |  25 +++++
 .../kerberos/two-kerberos-hives/update-location.sh |  25 +++++
 docker/thirdparties/run-thirdparties-docker.sh     |  30 +++++-
 .../common/security/authentication/HadoopUGI.java  |   4 +-
 .../apache/doris/datasource/ExternalCatalog.java   |   9 +-
 .../doris/datasource/hive/HMSExternalCatalog.java  |   6 +-
 .../doris/datasource/hive/HiveMetaStoreCache.java  |   4 +-
 .../datasource/hive/HiveMetaStoreClientHelper.java |   4 +-
 .../datasource/iceberg/IcebergMetadataCache.java   |   5 +-
 .../datasource/paimon/PaimonExternalCatalog.java   |   3 +-
 .../apache/doris/fs/remote/RemoteFileSystem.java   |   5 +
 .../org/apache/doris/fs/remote/S3FileSystem.java   |   3 +-
 .../apache/doris/fs/remote/dfs/DFSFileSystem.java  |  13 ++-
 regression-test/conf/regression-conf.groovy        |   4 +
 .../kerberos/test_single_hive_kerberos.out         |   6 ++
 .../kerberos/test_two_hive_kerberos.out            |  12 +++
 regression-test/pipeline/external/conf/be.conf     |   3 +
 regression-test/pipeline/external/conf/fe.conf     |   2 +
 .../pipeline/external/conf/regression-conf.groovy  |   5 +
 .../kerberos/test_single_hive_kerberos.groovy      | 101 +++++++++++++++++++++
 .../kerberos/test_two_hive_kerberos.groovy         |  72 +++++++++++++++
 32 files changed, 742 insertions(+), 19 deletions(-)

diff --git 
a/docker/thirdparties/docker-compose/kerberos/ccache/create_kerberos_credential_cache_files.sh
 
b/docker/thirdparties/docker-compose/kerberos/ccache/create_kerberos_credential_cache_files.sh
new file mode 100644
index 00000000000..2bba3f928b1
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/kerberos/ccache/create_kerberos_credential_cache_files.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -exuo pipefail
+
+TICKET_LIFETIME='30m'
+
+kinit -l "$TICKET_LIFETIME" -f -c /etc/trino/conf/presto-server-krbcc \
+      -kt /etc/trino/conf/presto-server.keytab presto-server/$(hostname 
-f)@LABS.TERADATA.COM
+
+kinit -l "$TICKET_LIFETIME" -f -c /etc/trino/conf/hive-presto-master-krbcc \
+      -kt /etc/trino/conf/hive-presto-master.keytab hive/$(hostname 
-f)@LABS.TERADATA.COM
+
+kinit -l "$TICKET_LIFETIME" -f -c /etc/trino/conf/hdfs-krbcc \
+      -kt /etc/hadoop/conf/hdfs.keytab hdfs/hadoop-mas...@labs.teradata.com
+
+kinit -l "$TICKET_LIFETIME" -f -c /etc/trino/conf/hive-krbcc \
+      -kt /etc/hive/conf/hive.keytab hive/hadoop-mas...@labs.teradata.com
diff --git 
a/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf 
b/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf
new file mode 100644
index 00000000000..7624b94e6ad
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+[logging]
+ default = FILE:/var/log/krb5libs.log
+ kdc = FILE:/var/log/krb5kdc.log
+ admin_server = FILE:/var/log/kadmind.log
+
+[libdefaults]
+ default_realm = LABS.TERADATA.COM
+ dns_lookup_realm = false
+ dns_lookup_kdc = false
+ ticket_lifetime = 24h
+ # this setting is causing a Message stream modified (41) error when talking 
to KDC running on CentOS 7: https://stackoverflow.com/a/60978520
+ # renew_lifetime = 7d
+ forwardable = true
+ udp_preference_limit = 1
+
+[realms]
+ LABS.TERADATA.COM = {
+  kdc = hadoop-master:88
+  admin_server = hadoop-master
+ }
+ OTHERLABS.TERADATA.COM = {
+  kdc = hadoop-master:89
+  admin_server = hadoop-master
+ }
+ OTHERLABS.TERADATA.COM = {
+  kdc = hadoop-master:89
+  admin_server = hadoop-master
+ }
+OTHERREALM.COM = {
+  kdc = hadoop-master-2:88
+  admin_server = hadoop-master
+ }
+
+[domain_realm]
+  hadoop-master-2 = OTHERREALM.COM
diff --git 
a/docker/thirdparties/docker-compose/kerberos/common/hadoop/apply-config-overrides.sh
 
b/docker/thirdparties/docker-compose/kerberos/common/hadoop/apply-config-overrides.sh
new file mode 100755
index 00000000000..ec2dc074e70
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/kerberos/common/hadoop/apply-config-overrides.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# test whether OVERRIDES_DIR is set
+if [[ -n "${OVERRIDES_DIR+x}" ]]; then
+    echo "The OVERRIDES_DIR (${OVERRIDES_DIR}) support is disabled as it was 
deemed unused." >&2
+    echo "It is being removed." >&2
+    exit 16
+fi
+
+if test -e /overrides; then
+    find /overrides >&2
+    echo "The /overrides handling is disabled as it was deemed unused." >&2
+    echo "It is being removed." >&2
+    exit 17
+fi
diff --git 
a/docker/thirdparties/docker-compose/kerberos/common/hadoop/hadoop-run.sh 
b/docker/thirdparties/docker-compose/kerberos/common/hadoop/hadoop-run.sh
new file mode 100755
index 00000000000..b8bfd8715e9
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/common/hadoop/hadoop-run.sh
@@ -0,0 +1,42 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+if test $# -gt 0; then
+    echo "$0 does not accept arguments" >&2
+    exit 32
+fi
+
+set -x
+
+HADOOP_INIT_D=${HADOOP_INIT_D:-/etc/hadoop-init.d/}
+
+echo "Applying hadoop init.d scripts from ${HADOOP_INIT_D}"
+if test -d "${HADOOP_INIT_D}"; then
+    for init_script in "${HADOOP_INIT_D}"*; do
+        chmod a+x "${init_script}"
+        "${init_script}"
+    done
+fi
+
+trap exit INT
+
+echo "Running services with supervisord"
+
+supervisord -c /etc/supervisord.conf
diff --git 
a/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master-2.sh 
b/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master-2.sh
new file mode 100755
index 00000000000..c21460c3a57
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master-2.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+echo "Copying kerberos keytabs to /keytabs/"
+mkdir -p /etc/hadoop-init.d/
+cp /etc/trino/conf/hive-presto-master.keytab 
/keytabs/other-hive-presto-master.keytab
+cp /etc/trino/conf/presto-server.keytab /keytabs/other-presto-server.keytab
+cp /keytabs/update-location.sh /etc/hadoop-init.d/update-location.sh
+/usr/local/hadoop-run.sh &
+
+sleep 30
+
+echo "Init kerberos test data"
+kinit -kt /etc/hive/conf/hive.keytab hive/hadoop-maste...@otherrealm.com
+hive  -f /usr/local/sql/create_kerberos_hive_table.sql
+
+sleep 20
+
+tail -f /dev/null
diff --git 
a/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master.sh 
b/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master.sh
new file mode 100755
index 00000000000..62924992219
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/entrypoint-hive-master.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+echo "Copying kerberos keytabs to keytabs/"
+mkdir -p /etc/hadoop-init.d/
+cp /etc/trino/conf/* /keytabs/
+/usr/local/hadoop-run.sh &
+
+sleep 30
+
+echo "Init kerberos test data"
+kinit -kt /etc/hive/conf/hive.keytab hive/hadoop-mas...@labs.teradata.com
+hive  -f /usr/local/sql/create_kerberos_hive_table.sql
+
+sleep 20
+
+tail -f /dev/null
diff --git 
a/docker/thirdparties/docker-compose/kerberos/health-checks/hadoop-health-check.sh
 
b/docker/thirdparties/docker-compose/kerberos/health-checks/hadoop-health-check.sh
new file mode 100755
index 00000000000..190fa838d6f
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/kerberos/health-checks/hadoop-health-check.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+if test $# -gt 0; then
+    echo "$0 does not accept arguments" >&2
+    exit 32
+fi
+
+# Supervisord is not running
+if ! test -f /tmp/supervisor.sock; then
+    exit 0
+fi
+
+# Check if all Hadoop services are running
+FAILED=$(supervisorctl status | grep -v RUNNING || true)
+
+if [ "$FAILED" == "" ]; then
+  exit 0
+else
+  echo "Some of the services are failing: ${FAILED}"
+  exit 1
+fi
diff --git 
a/docker/thirdparties/docker-compose/kerberos/health-checks/health.sh 
b/docker/thirdparties/docker-compose/kerberos/health-checks/health.sh
new file mode 100644
index 00000000000..515f37e36ac
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/health-checks/health.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+if test $# -gt 0; then
+    echo "$0 does not accept arguments" >&2
+    exit 32
+fi
+
+set -x
+
+HEALTH_D=${HEALTH_D:-/etc/health.d/}
+
+if test -d "${HEALTH_D}"; then
+    for health_script in "${HEALTH_D}"/*; do
+        "${health_script}" &>> /var/log/container-health.log || exit 1
+    done
+fi
diff --git a/docker/thirdparties/docker-compose/kerberos/kerberos.yaml.tpl 
b/docker/thirdparties/docker-compose/kerberos/kerberos.yaml.tpl
new file mode 100644
index 00000000000..6f175ab9c6e
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kerberos/kerberos.yaml.tpl
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+version: "3"
+services:
+  hive-krb:
+    image: ghcr.io/trinodb/testing/hdp3.1-hive-kerberized
+    container_name: doris--kerberos1
+    volumes:
+      - ./two-kerberos-hives:/keytabs
+      - ./sql:/usr/local/sql
+      - 
./common/hadoop/apply-config-overrides.sh:/etc/hadoop-init.d/00-apply-config-overrides.sh
+      - ./common/hadoop/hadoop-run.sh:/usr/local/hadoop-run.sh
+      - 
./health-checks/hadoop-health-check.sh:/etc/health.d/hadoop-health-check.sh
+      - ./entrypoint-hive-master.sh:/usr/local/entrypoint-hive-master.sh
+    hostname: hadoop-master
+    entrypoint: /usr/local/entrypoint-hive-master.sh
+    healthcheck:
+      test: ./health-checks/health.sh
+    ports:
+      - "5806:5006"
+      - "8820:8020"
+      - "8842:8042"
+      - "9800:9000"
+      - "9883:9083"
+      - "18000:10000"
+    networks:
+      doris--krb_net:
+        ipv4_address: 172.31.71.25
+
+  hive-krb2:
+    image: ghcr.io/trinodb/testing/hdp3.1-hive-kerberized-2:96
+    container_name: doris--kerberos2
+    hostname: hadoop-master-2
+    volumes:
+      - ./two-kerberos-hives:/keytabs
+      - ./sql:/usr/local/sql
+      - 
./common/hadoop/apply-config-overrides.sh:/etc/hadoop-init.d/00-apply-config-overrides.sh
+      - ./common/hadoop/hadoop-run.sh:/usr/local/hadoop-run.sh
+      - 
./health-checks/hadoop-health-check.sh:/etc/health.d/hadoop-health-check.sh
+      - ./entrypoint-hive-master-2.sh:/usr/local/entrypoint-hive-master-2.sh
+    entrypoint: /usr/local/entrypoint-hive-master-2.sh
+    healthcheck:
+      test: ./health-checks/health.sh
+    ports:
+      - "15806:5006"
+      - "18820:8020"
+      - "18842:8042"
+      - "19800:9000"
+      - "19883:9083"
+      - "18800:10000"
+    networks:
+      doris--krb_net:
+        ipv4_address: 172.31.71.26
+
+networks:
+  doris--krb_net:
+    ipam:
+      config:
+        - subnet: 172.31.71.0/24
diff --git 
a/docker/thirdparties/docker-compose/kerberos/sql/create_kerberos_hive_table.sql
 
b/docker/thirdparties/docker-compose/kerberos/sql/create_kerberos_hive_table.sql
new file mode 100644
index 00000000000..ecf58e88158
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/kerberos/sql/create_kerberos_hive_table.sql
@@ -0,0 +1,17 @@
+CREATE DATABASE IF NOT EXISTS `test_krb_hive_db`;
+CREATE TABLE IF NOT EXISTS `test_krb_hive_db`.`test_krb_hive_tbl`(
+  `id_key` int,
+  `string_key` string,
+  `rate_val` double,
+  `comment` string)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
+STORED AS INPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
+OUTPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat';
+
+INSERT INTO test_krb_hive_db.test_krb_hive_tbl values(1, 'a', 3.16, 'cc0');
+INSERT INTO test_krb_hive_db.test_krb_hive_tbl values(2, 'b', 41.2, 'cc1');
+INSERT INTO test_krb_hive_db.test_krb_hive_tbl values(3, 'c', 6.2, 'cc2');
+INSERT INTO test_krb_hive_db.test_krb_hive_tbl values(4, 'd', 1.4, 'cc3');
diff --git 
a/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/auth-to-local.xml
 
b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/auth-to-local.xml
new file mode 100755
index 00000000000..c0ce38e3cdc
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/auth-to-local.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+<configuration>
+    <property>
+        <name>hadoop.security.auth_to_local</name>
+        <value>
+            RULE:[2:$1@$0](.*@OTHERREALM.COM)s/@.*//
+            RULE:[2:$1@$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
+            DEFAULT
+        </value>
+    </property>
+</configuration>
diff --git 
a/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/hive2-default-fs-site.xml
 
b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/hive2-default-fs-site.xml
new file mode 100755
index 00000000000..4541c1328ae
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/hive2-default-fs-site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+<configuration>
+    <property>
+        <name>fs.default.name</name>
+        <value>hdfs://hadoop-master-2:9000</value>
+    </property>
+</configuration>
diff --git 
a/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/update-location.sh
 
b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/update-location.sh
new file mode 100755
index 00000000000..8d727b2308d
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/kerberos/two-kerberos-hives/update-location.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+/usr/bin/mysqld_safe &
+while ! mysqladmin ping -proot --silent; do sleep 1; done
+
+hive --service metatool -updateLocation 
hdfs://hadoop-master-2:9000/user/hive/warehouse 
hdfs://hadoop-master:9000/user/hive/warehouse
+
+killall mysqld
+while pgrep mysqld; do sleep 1; done
diff --git a/docker/thirdparties/run-thirdparties-docker.sh 
b/docker/thirdparties/run-thirdparties-docker.sh
index b9c720c457e..805c137958d 100755
--- a/docker/thirdparties/run-thirdparties-docker.sh
+++ b/docker/thirdparties/run-thirdparties-docker.sh
@@ -37,7 +37,7 @@ Usage: $0 <options>
      --stop             stop the specified components
 
   All valid components:
-    
mysql,pg,oracle,sqlserver,clickhouse,es,hive2,hive3,iceberg,hudi,trino,kafka,mariadb,db2,lakesoul
+    
mysql,pg,oracle,sqlserver,clickhouse,es,hive2,hive3,iceberg,hudi,trino,kafka,mariadb,db2,lakesoul,kerberos
   "
     exit 1
 }
@@ -59,7 +59,7 @@ eval set -- "${OPTS}"
 
 if [[ "$#" == 1 ]]; then
     # default
-    
COMPONENTS="mysql,es,hive2,hive3,pg,oracle,sqlserver,clickhouse,mariadb,iceberg,db2"
+    
COMPONENTS="mysql,es,hive2,hive3,pg,oracle,sqlserver,clickhouse,mariadb,iceberg,db2,kerberos"
 else
     while true; do
         case "$1" in
@@ -91,7 +91,7 @@ else
     done
     if [[ "${COMPONENTS}"x == ""x ]]; then
         if [[ "${STOP}" -eq 1 ]]; then
-            
COMPONENTS="mysql,es,pg,oracle,sqlserver,clickhouse,hive2,hive3,iceberg,hudi,trino,kafka,mariadb,db2,lakesoul"
+            
COMPONENTS="mysql,es,pg,oracle,sqlserver,clickhouse,hive2,hive3,iceberg,hudi,trino,kafka,mariadb,db2,kerberos,lakesoul"
         fi
     fi
 fi
@@ -136,6 +136,7 @@ RUN_SPARK=0
 RUN_MARIADB=0
 RUN_DB2=0
 RUN_LAKESOUL=0
+RUN_KERBEROS=0
 
 for element in "${COMPONENTS_ARR[@]}"; do
     if [[ "${element}"x == "mysql"x ]]; then
@@ -170,6 +171,8 @@ for element in "${COMPONENTS_ARR[@]}"; do
         RUN_DB2=1
     elif [[ "${element}"x == "lakesoul"x ]]; then
         RUN_LAKESOUL=1
+    elif [[ "${element}"x == "kerberos"x ]]; then
+        RUN_KERBEROS=1
     else
         echo "Invalid component: ${element}"
         usage
@@ -545,5 +548,26 @@ if [[ "${RUN_LAKESOUL}" -eq 1 ]]; then
 #    git checkout doris_dev
     cd LakeSoul/rust
     cargo test load_tpch_data --package lakesoul-datafusion --features=ci -- 
--nocapture
+fi
 
+if [[ "${RUN_KERBEROS}" -eq 1 ]]; then
+    echo "RUN_KERBEROS"
+    cp "${ROOT}"/docker-compose/kerberos/kerberos.yaml.tpl 
"${ROOT}"/docker-compose/kerberos/kerberos.yaml
+    sed -i "s/doris--/${CONTAINER_UID}/g" 
"${ROOT}"/docker-compose/kerberos/kerberos.yaml
+    sudo docker compose -f "${ROOT}"/docker-compose/kerberos/kerberos.yaml down
+    sudo rm -rf "${ROOT}"/docker-compose/kerberos/data
+    if [[ "${STOP}" -ne 1 ]]; then
+        echo "PREPARE KERBEROS DATA"
+        rm -rf "${ROOT}"/docker-compose/kerberos/two-kerberos-hives/*.keytab
+        rm -rf "${ROOT}"/docker-compose/kerberos/two-kerberos-hives/*.jks
+        rm -rf "${ROOT}"/docker-compose/kerberos/two-kerberos-hives/*.conf
+        sudo docker compose -f "${ROOT}"/docker-compose/kerberos/kerberos.yaml 
up -d
+        sudo rm -f /keytabs
+        sudo ln -s "${ROOT}"/docker-compose/kerberos/two-kerberos-hives 
/keytabs
+        sudo cp "${ROOT}"/docker-compose/kerberos/common/conf/doris-krb5.conf 
/keytabs/krb5.conf
+        sudo cp "${ROOT}"/docker-compose/kerberos/common/conf/doris-krb5.conf 
/etc/krb5.conf
+
+        echo '172.31.71.25 hadoop-master' >> /etc/hosts
+        echo '172.31.71.26 hadoop-master-2' >> /etc/hosts
+    fi
 fi
diff --git 
a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
 
b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
index 342f86b7125..5e61664c8fa 100644
--- 
a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
+++ 
b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
@@ -112,7 +112,9 @@ public class HadoopUGI {
         UserGroupInformation ugi = HadoopUGI.loginWithUGI(authConf);
         try {
             if (ugi != null) {
-                ugi.checkTGTAndReloginFromKeytab();
+                if (authConf instanceof KerberosAuthenticationConfig) {
+                    ugi.checkTGTAndReloginFromKeytab();
+                }
                 return ugi.doAs(action);
             } else {
                 return action.run();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java
index 5bdbe594059..929378ae80f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java
@@ -51,6 +51,7 @@ import 
org.apache.doris.datasource.paimon.PaimonExternalDatabase;
 import org.apache.doris.datasource.property.PropertyConverter;
 import org.apache.doris.datasource.test.TestExternalDatabase;
 import 
org.apache.doris.datasource.trinoconnector.TrinoConnectorExternalDatabase;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
 import org.apache.doris.persist.gson.GsonPostProcessable;
 import org.apache.doris.persist.gson.GsonUtils;
 import org.apache.doris.qe.ConnectContext;
@@ -67,7 +68,6 @@ import lombok.Data;
 import org.apache.commons.lang3.NotImplementedException;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 import org.jetbrains.annotations.NotNull;
@@ -148,7 +148,7 @@ public abstract class ExternalCatalog
     }
 
     public Configuration getConfiguration() {
-        Configuration conf = new HdfsConfiguration();
+        Configuration conf = 
DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth());
         Map<String, String> catalogProperties = 
catalogProperty.getHadoopProperties();
         for (Map.Entry<String, String> entry : catalogProperties.entrySet()) {
             conf.set(entry.getKey(), entry.getValue());
@@ -181,6 +181,11 @@ public abstract class ExternalCatalog
                 Boolean.valueOf(catalogProperty.getOrDefault(USE_META_CACHE, 
String.valueOf(DEFAULT_USE_META_CACHE))));
     }
 
+    // we need check auth fallback for kerberos or simple
+    public boolean ifNotSetFallbackToSimpleAuth() {
+        return 
catalogProperty.getOrDefault(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, 
"").isEmpty();
+    }
+
     // Will be called when creating catalog(not replaying).
     // Subclass can override this method to do some check when creating 
catalog.
     public void checkWhenCreating() throws DdlException {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
index 243dfb3c24f..91192b63c24 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
@@ -37,6 +37,7 @@ import org.apache.doris.datasource.property.PropertyConverter;
 import org.apache.doris.datasource.property.constants.HMSProperties;
 import org.apache.doris.fs.FileSystemProvider;
 import org.apache.doris.fs.FileSystemProviderImpl;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
 import org.apache.doris.transaction.TransactionManagerFactory;
 
 import com.google.common.base.Strings;
@@ -59,7 +60,6 @@ public class HMSExternalCatalog extends ExternalCatalog {
     public static final String FILE_META_CACHE_TTL_SECOND = 
"file.meta.cache.ttl-second";
     // broker name for file split and query scan.
     public static final String BIND_BROKER_NAME = "broker.name";
-    private static final String PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH = 
"ipc.client.fallback-to-simple-auth-allowed";
 
     // -1 means file cache no ttl set
     public static final int FILE_META_CACHE_NO_TTL = -1;
@@ -244,9 +244,9 @@ public class HMSExternalCatalog extends ExternalCatalog {
     @Override
     public void setDefaultPropsIfMissing(boolean isReplay) {
         super.setDefaultPropsIfMissing(isReplay);
-        if (catalogProperty.getOrDefault(PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, 
"").isEmpty()) {
+        if (ifNotSetFallbackToSimpleAuth()) {
             // always allow fallback to simple auth, so to support both 
kerberos and simple auth
-            catalogProperty.addProperty(PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, 
"true");
+            
catalogProperty.addProperty(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, 
"true");
         }
     }
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
index f402d27cf6d..5e3366e37c4 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
@@ -41,6 +41,7 @@ import org.apache.doris.datasource.property.PropertyConverter;
 import org.apache.doris.fs.FileSystemCache;
 import org.apache.doris.fs.remote.RemoteFile;
 import org.apache.doris.fs.remote.RemoteFileSystem;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
 import org.apache.doris.metric.GaugeMetric;
 import org.apache.doris.metric.Metric;
 import org.apache.doris.metric.MetricLabel;
@@ -66,7 +67,6 @@ import org.apache.commons.lang3.math.NumberUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hive.common.ValidWriteIdList;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -433,7 +433,7 @@ public class HiveMetaStoreCache {
     }
 
     private synchronized void setJobConf() {
-        Configuration configuration = new HdfsConfiguration();
+        Configuration configuration = 
DFSFileSystem.getHdfsConf(catalog.ifNotSetFallbackToSimpleAuth());
         for (Map.Entry<String, String> entry : 
catalog.getCatalogProperty().getHadoopProperties().entrySet()) {
             configuration.set(entry.getKey(), entry.getValue());
         }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
index 2e7693619b8..ea821cefec6 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
@@ -42,13 +42,13 @@ import org.apache.doris.common.DdlException;
 import org.apache.doris.common.security.authentication.AuthenticationConfig;
 import org.apache.doris.common.security.authentication.HadoopUGI;
 import org.apache.doris.datasource.ExternalCatalog;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
 import org.apache.doris.thrift.TExprOpcode;
 
 import com.google.common.base.Strings;
 import com.google.common.collect.Maps;
 import org.apache.avro.Schema;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -841,7 +841,7 @@ public class HiveMetaStoreClientHelper {
     }
 
     public static Configuration getConfiguration(HMSExternalTable table) {
-        Configuration conf = new HdfsConfiguration();
+        Configuration conf = 
DFSFileSystem.getHdfsConf(table.getCatalog().ifNotSetFallbackToSimpleAuth());
         for (Map.Entry<String, String> entry : 
table.getHadoopProperties().entrySet()) {
             conf.set(entry.getKey(), entry.getValue());
         }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
index 68064c4e439..dc11a6cacc2 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
@@ -25,13 +25,13 @@ import org.apache.doris.datasource.CatalogIf;
 import org.apache.doris.datasource.hive.HMSExternalCatalog;
 import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper;
 import org.apache.doris.datasource.property.constants.HMSProperties;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
 import org.apache.doris.thrift.TIcebergMetadataParams;
 
 import com.github.benmanes.caffeine.cache.LoadingCache;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.iceberg.ManifestFiles;
 import org.apache.iceberg.SerializableTable;
 import org.apache.iceberg.Snapshot;
@@ -177,7 +177,8 @@ public class IcebergMetadataCache {
 
     private Catalog createIcebergHiveCatalog(String uri, Map<String, String> 
hdfsConf, Map<String, String> props) {
         // set hdfs configure
-        Configuration conf = new HdfsConfiguration();
+        Configuration conf = DFSFileSystem.getHdfsConf(
+                
hdfsConf.getOrDefault(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, 
"").isEmpty());
         for (Map.Entry<String, String> entry : hdfsConf.entrySet()) {
             conf.set(entry.getKey(), entry.getValue());
         }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
index 11691224681..8f187e6d7ca 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
@@ -25,6 +25,7 @@ import org.apache.doris.datasource.InitCatalogLog;
 import org.apache.doris.datasource.SessionContext;
 import org.apache.doris.datasource.property.constants.HMSProperties;
 import org.apache.doris.datasource.property.constants.PaimonProperties;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Maps;
@@ -60,7 +61,7 @@ public abstract class PaimonExternalCatalog extends 
ExternalCatalog {
 
     @Override
     protected void initLocalObjectsImpl() {
-        Configuration conf = new Configuration();
+        Configuration conf = 
DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth());
         for (Map.Entry<String, String> propEntry : 
this.catalogProperty.getHadoopProperties().entrySet()) {
             conf.set(propEntry.getKey(), propEntry.getValue());
         }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java 
b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java
index 311532794f1..68de3a8fdef 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java
@@ -21,6 +21,7 @@ import org.apache.doris.analysis.StorageBackend;
 import org.apache.doris.backup.Status;
 import org.apache.doris.common.UserException;
 import org.apache.doris.fs.PersistentFileSystem;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
 
 import com.google.common.collect.ImmutableSet;
 import org.apache.hadoop.fs.FileStatus;
@@ -46,6 +47,10 @@ public abstract class RemoteFileSystem extends 
PersistentFileSystem {
         throw new UserException("Not support to getFileSystem.");
     }
 
+    public boolean ifNotSetFallbackToSimpleAuth() {
+        return 
properties.getOrDefault(DFSFileSystem.PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, 
"").isEmpty();
+    }
+
     @Override
     public Status listFiles(String remotePath, boolean recursive, 
List<RemoteFile> result) {
         try {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/S3FileSystem.java 
b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/S3FileSystem.java
index 3130a0cea52..525d80d6797 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/S3FileSystem.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/S3FileSystem.java
@@ -22,6 +22,7 @@ import org.apache.doris.backup.Status;
 import org.apache.doris.common.UserException;
 import org.apache.doris.datasource.property.PropertyConverter;
 import org.apache.doris.fs.obj.S3ObjStorage;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
 
 import com.amazonaws.services.s3.model.AmazonS3Exception;
 import com.google.common.annotations.VisibleForTesting;
@@ -60,7 +61,7 @@ public class S3FileSystem extends ObjFileSystem {
         if (dfsFileSystem == null) {
             synchronized (this) {
                 if (dfsFileSystem == null) {
-                    Configuration conf = new Configuration();
+                    Configuration conf = 
DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth());
                     System.setProperty("com.amazonaws.services.s3.enableV4", 
"true");
                     // the entry value in properties may be null, and
                     
PropertyConverter.convertToHadoopFSProperties(properties).entrySet().stream()
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java 
b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java
index d608653024f..5532f1187fe 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java
@@ -56,8 +56,8 @@ import java.util.Map;
 
 public class DFSFileSystem extends RemoteFileSystem {
 
+    public static final String PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH = 
"ipc.client.fallback-to-simple-auth-allowed";
     private static final Logger LOG = 
LogManager.getLogger(DFSFileSystem.class);
-
     private HDFSFileOperations operations = null;
 
     public DFSFileSystem(Map<String, String> properties) {
@@ -75,7 +75,7 @@ public class DFSFileSystem extends RemoteFileSystem {
         if (dfsFileSystem == null) {
             synchronized (this) {
                 if (dfsFileSystem == null) {
-                    Configuration conf = new HdfsConfiguration();
+                    Configuration conf = 
getHdfsConf(ifNotSetFallbackToSimpleAuth());
                     for (Map.Entry<String, String> propEntry : 
properties.entrySet()) {
                         conf.set(propEntry.getKey(), propEntry.getValue());
                     }
@@ -94,6 +94,15 @@ public class DFSFileSystem extends RemoteFileSystem {
         return dfsFileSystem;
     }
 
+    public static Configuration getHdfsConf(boolean fallbackToSimpleAuth) {
+        Configuration hdfsConf = new HdfsConfiguration();
+        if (fallbackToSimpleAuth) {
+            // need support fallback to simple if the cluster is a mixture of  
kerberos and simple auth.
+            hdfsConf.set(PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH, "true");
+        }
+        return hdfsConf;
+    }
+
     @Override
     public Status downloadWithFileSize(String remoteFilePath, String 
localFilePath, long fileSize) {
         if (LOG.isDebugEnabled()) {
diff --git a/regression-test/conf/regression-conf.groovy 
b/regression-test/conf/regression-conf.groovy
index 6e9a162df00..6d4d9156339 100644
--- a/regression-test/conf/regression-conf.groovy
+++ b/regression-test/conf/regression-conf.groovy
@@ -220,3 +220,7 @@ externalEnvIp="127.0.0.1"
 
 // trino-connector catalog test config
 enableTrinoConnectorTest = false
+
+enableKerberosTest=false
+kerberosHmsPort=9883
+kerberosHdfsPort=8820
diff --git 
a/regression-test/data/external_table_p0/kerberos/test_single_hive_kerberos.out 
b/regression-test/data/external_table_p0/kerberos/test_single_hive_kerberos.out
new file mode 100644
index 00000000000..95640fecb54
--- /dev/null
+++ 
b/regression-test/data/external_table_p0/kerberos/test_single_hive_kerberos.out
@@ -0,0 +1,6 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !q01 --
+1      a       3.16    cc0
+2      b       41.2    cc1
+3      c       6.2     cc2
+4      d       1.4     cc3
diff --git 
a/regression-test/data/external_table_p0/kerberos/test_two_hive_kerberos.out 
b/regression-test/data/external_table_p0/kerberos/test_two_hive_kerberos.out
new file mode 100644
index 00000000000..9415efd787f
--- /dev/null
+++ b/regression-test/data/external_table_p0/kerberos/test_two_hive_kerberos.out
@@ -0,0 +1,12 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !q01 --
+1      a       3.16    cc0
+2      b       41.2    cc1
+3      c       6.2     cc2
+4      d       1.4     cc3
+
+-- !q02 --
+1      a       3.16    cc0
+2      b       41.2    cc1
+3      c       6.2     cc2
+4      d       1.4     cc3
diff --git a/regression-test/pipeline/external/conf/be.conf 
b/regression-test/pipeline/external/conf/be.conf
index e26929b0dca..a7c0713d8eb 100644
--- a/regression-test/pipeline/external/conf/be.conf
+++ b/regression-test/pipeline/external/conf/be.conf
@@ -62,3 +62,6 @@ enable_debug_log_timeout_secs=0
 trino_connector_plugin_dir=/tmp/trino_connector/connectors
 
 enable_jvm_monitor = true
+
+KRB5_CONFIG=/keytabs/krb5.conf
+kerberos_krb5_conf_path=/keytabs/krb5.conf
diff --git a/regression-test/pipeline/external/conf/fe.conf 
b/regression-test/pipeline/external/conf/fe.conf
index 3727b6a9406..8eed72816e8 100644
--- a/regression-test/pipeline/external/conf/fe.conf
+++ b/regression-test/pipeline/external/conf/fe.conf
@@ -96,3 +96,5 @@ auth_token = 5ff161c3-2c08-4079-b108-26c8850b6598
 infodb_support_ext_catalog=true
 
 trino_connector_plugin_dir=/tmp/trino_connector/connectors
+
+KRB5_CONFIG=/keytabs/krb5.conf
diff --git a/regression-test/pipeline/external/conf/regression-conf.groovy 
b/regression-test/pipeline/external/conf/regression-conf.groovy
index 6b7c451d7cb..97de0530bc9 100644
--- a/regression-test/pipeline/external/conf/regression-conf.groovy
+++ b/regression-test/pipeline/external/conf/regression-conf.groovy
@@ -157,3 +157,8 @@ db2_11_port=50000
 
 // trino-connector catalog test config
 enableTrinoConnectorTest = true
+
+// kerberos docker config
+enableKerberosTest = true
+kerberosHmsPort=9883
+kerberosHdfsPort=8820
diff --git 
a/regression-test/suites/external_table_p0/kerberos/test_single_hive_kerberos.groovy
 
b/regression-test/suites/external_table_p0/kerberos/test_single_hive_kerberos.groovy
new file mode 100644
index 00000000000..7a0864923f5
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/kerberos/test_single_hive_kerberos.groovy
@@ -0,0 +1,101 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_single_hive_kerberos", 
"p0,external,kerberos,external_docker,external_docker_kerberos") {
+    String enabled = context.config.otherConfigs.get("enableKerberosTest")
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        String hms_catalog_name = "test_single_hive_kerberos"
+        sql """drop catalog if exists hms_kerberos;"""
+        sql """
+            CREATE CATALOG IF NOT EXISTS hms_kerberos
+            PROPERTIES (
+                "type" = "hms",
+                "hive.metastore.uris" = "thrift://172.31.71.25:9083",
+                "fs.defaultFS" = "hdfs://172.31.71.25:8020",
+                "hadoop.security.authentication" = "kerberos",
+                
"hadoop.kerberos.principal"="presto-server/presto-master.docker.clus...@labs.teradata.com",
+                "hadoop.kerberos.keytab" = "/keytabs/presto-server.keytab",
+                "hive.metastore.sasl.enabled " = "true",
+                "hive.metastore.kerberos.principal" = 
"hive/_h...@labs.teradata.com"
+            );
+        """
+        sql """ switch hms_kerberos """
+        sql """ show databases """
+        order_qt_q01 """ select * from 
hms_kerberos.test_krb_hive_db.test_krb_hive_tbl """
+        sql """drop catalog hms_kerberos;"""
+
+        try {
+            sql """drop catalog if exists hms_kerberos_hadoop_err1;"""
+            sql """
+                CREATE CATALOG IF NOT EXISTS hms_kerberos_hadoop_err1
+                PROPERTIES (
+                    "type" = "hms",
+                    "hive.metastore.uris" = "thrift://172.31.71.25:9083",
+                    "fs.defaultFS" = "hdfs://172.31.71.25:8020",
+                    "hadoop.security.authentication" = "kerberos",
+                    
"hadoop.kerberos.principal"="presto-server/presto-master.docker.clus...@labs.teradata.com",
+                    "hadoop.kerberos.keytab" = "/keytabs/presto-server.keytab"
+                );
+            """
+            sql """ switch hms_kerberos_hadoop_err1 """
+            sql """ show databases """
+        } catch (Exception e) {
+            logger.info(e.toString())
+            // caused by a warning msg if enable sasl on hive but 
"hive.metastore.sasl.enabled" is not true:
+            // "set_ugi() not successful, Likely cause: new client talking to 
old server. Continuing without it."
+            
assertTrue(e.toString().contains("org.apache.thrift.transport.TTransportException:
 null"))
+        }
+
+        try {
+            sql """drop catalog if exists hms_kerberos_hadoop_err2;"""
+            sql """
+                CREATE CATALOG IF NOT EXISTS hms_kerberos_hadoop_err2
+                PROPERTIES (
+                    "type" = "hms",
+                    "hive.metastore.sasl.enabled " = "true",
+                    "hive.metastore.uris" = "thrift://172.31.71.25:9083",
+                    "fs.defaultFS" = "hdfs://172.31.71.25:8020"
+                );
+            """
+            sql """ switch hms_kerberos_hadoop_err2 """
+            sql """ show databases """
+        } catch (Exception e) {
+            // org.apache.thrift.transport.TTransportException: GSS initiate 
failed
+            assertTrue(e.toString().contains("Could not connect to meta store 
using any of the URIs provided. Most recent failure: 
shade.doris.hive.org.apache.thrift.transport.TTransportException: GSS initiate 
failed"))
+        }
+
+        //        try {
+        //            sql """
+        //                CREATE CATALOG IF NOT EXISTS hms_keberos_ccache
+        //                PROPERTIES (
+        //                    "type" = "hms",
+        //                    "hive.metastore.uris" = 
"thrift://172.31.71.25:9083",
+        //                    "fs.defaultFS" = "hdfs://172.31.71.25:8020",
+        //                    "hadoop.security.authentication" = "kerberos",
+        //                    
"hadoop.kerberos.principal"="presto-server/presto-master.docker.clus...@labs.teradata.com",
+        //                    "hadoop.kerberos.keytab" = 
"/keytabs/presto-server.keytab",
+        //                    "hive.metastore.thrift.impersonation.enabled" = 
true"
+        //                    
"hive.metastore.client.credential-cache.location" = "hive-presto-master-krbcc"
+        //                );
+        //            """
+        //            sql """ switch hms_keberos_ccache """
+        //            sql """ show databases """
+        //        } catch (Exception e) {
+        //            logger.error(e.message)
+        //        }
+    }
+}
diff --git 
a/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy
 
b/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy
new file mode 100644
index 00000000000..a3b39d1221a
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy
@@ -0,0 +1,72 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_two_hive_kerberos", 
"p0,external,kerberos,external_docker,external_docker_kerberos") {
+    String enabled = context.config.otherConfigs.get("enableKerberosTest")
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        String hms_catalog_name = "test_two_hive_kerberos"
+        sql """drop catalog if exists ${hms_catalog_name};"""
+        sql """
+            CREATE CATALOG IF NOT EXISTS ${hms_catalog_name}
+            PROPERTIES ( 
+                "type" = "hms",
+                "hive.metastore.uris" = "thrift://172.31.71.25:9083",
+                "fs.defaultFS" = "hdfs://172.31.71.25:8020",
+                "hadoop.security.authentication" = "kerberos",
+                
"hadoop.kerberos.principal"="presto-server/presto-master.docker.clus...@labs.teradata.com",
+                "hadoop.kerberos.keytab" = "/keytabs/presto-server.keytab",
+                "hive.metastore.sasl.enabled " = "true",
+                "hive.metastore.kerberos.principal" = 
"hive/_h...@labs.teradata.com"
+            );
+        """
+
+        sql """drop catalog if exists other_${hms_catalog_name};"""
+        sql """
+            CREATE CATALOG IF NOT EXISTS other_${hms_catalog_name}
+            PROPERTIES (
+                "type" = "hms",
+                "hive.metastore.uris" = "thrift://172.31.71.26:9083",
+                "fs.defaultFS" = "hdfs://172.31.71.26:8020",
+                "hadoop.security.authentication" = "kerberos",
+                
"hadoop.kerberos.principal"="presto-server/presto-master.docker.clus...@otherrealm.com",
+                "hadoop.kerberos.keytab" = 
"/keytabs/other-presto-server.keytab",
+                "hive.metastore.sasl.enabled " = "true",
+                "hive.metastore.kerberos.principal" = 
"hive/_h...@otherrealm.com",
+                "hadoop.security.auth_to_local" 
="RULE:[2:\$1@\$0](.*@OTHERREALM.COM)s/@.*//
+                                                  
RULE:[2:\$1@\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
+                                                  DEFAULT"
+            );
+        """
+
+        // 1. catalogA
+        sql """switch ${hms_catalog_name};"""
+        logger.info("switched to catalog " + hms_catalog_name)
+        sql """ show databases """
+        sql """ use test_krb_hive_db """
+        order_qt_q01 """ select * from test_krb_hive_db.test_krb_hive_tbl """
+
+        // 2. catalogB
+        sql """switch other_${hms_catalog_name};"""
+        logger.info("switched to other catalog " + hms_catalog_name)
+        sql """ show databases """
+        sql """ use test_krb_hive_db """
+        order_qt_q02 """ select * from test_krb_hive_db.test_krb_hive_tbl """
+
+        sql """drop catalog ${hms_catalog_name};"""
+        sql """drop catalog other_${hms_catalog_name};"""
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to