This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 50c64bc3cdf [Migrate-Test](multi-catalog) Migrate p2 tests from p2 to 
p0. (#36989)
50c64bc3cdf is described below

commit 50c64bc3cdf654194bbaa51a6a8c06722253790f
Author: Qi Chen <kaka11.c...@gmail.com>
AuthorDate: Mon Jul 1 14:45:50 2024 +0800

    [Migrate-Test](multi-catalog) Migrate p2 tests from p2 to p0. (#36989)
    
    ## Proposed changes
    
    [Migrate-Test] (multi-catalog) Migrate p2 tests from p2 to p0.
    - Migrate p2 tests from p2 to p0.
    - Set health check of hms docker to 10s(interval) * 120(retries).
    - Remove duplicated tables in `create_preinstalled_table.hql` by adding
    new scripts.
---
 .../docker-compose/hive/hive-2x.yaml.tpl           |   2 +-
 .../docker-compose/hive/hive-3x.yaml.tpl           |   2 +-
 .../hive/scripts/create_preinstalled_table.hql     | 108 ----------------
 .../data/default/account_fund/create_table.hql     |  28 +++++
 .../scripts/data/default/account_fund/data.tar.gz  | Bin 0 -> 234 bytes
 .../hive/scripts/data/default/account_fund/run.sh  |  12 ++
 .../scripts/data/default/hive01/create_table.hql   |  22 ++++
 .../hive/scripts/data/default/hive01/data.tar.gz   | Bin 0 -> 186 bytes
 .../hive/scripts/data/default/hive01/run.sh        |  12 ++
 .../data/default/sale_table/create_table.hql       |  24 ++++
 .../scripts/data/default/sale_table/data.tar.gz    | Bin 0 -> 221 bytes
 .../hive/scripts/data/default/sale_table/run.sh    |  12 ++
 .../data/default/string_table/create_table.hql     |  27 ++++
 .../scripts/data/default/string_table/data.tar.gz  | Bin 0 -> 260 bytes
 .../hive/scripts/data/default/string_table/run.sh  |  12 ++
 .../scripts/data/default/student/create_table.hql  |  24 ++++
 .../hive/scripts/data/default/student/data.tar.gz  | Bin 0 -> 210 bytes
 .../hive/scripts/data/default/student/run.sh       |  12 ++
 .../scripts/data/default/test1/create_table.hql    |  23 ++++
 .../hive/scripts/data/default/test1/data.tar.gz    | Bin 0 -> 211 bytes
 .../hive/scripts/data/default/test1/run.sh         |  12 ++
 .../scripts/data/default/test2/create_table.hql    |  23 ++++
 .../hive/scripts/data/default/test2/data.tar.gz    | Bin 0 -> 197 bytes
 .../hive/scripts/data/default/test2/run.sh         |  12 ++
 .../data/default/test_hive_doris/create_table.hql  |  20 +++
 .../data/default/test_hive_doris/data.tar.gz       | Bin 0 -> 181 bytes
 .../scripts/data/default/test_hive_doris/run.sh    |  12 ++
 .../par_fields_in_file_orc/create_table.hql        |  21 ++++
 .../par_fields_in_file_orc/data.tar.gz             | Bin 0 -> 751 bytes
 .../multi_catalog/par_fields_in_file_orc/run.sh    |  12 ++
 .../par_fields_in_file_parquet/create_table.hql    |  21 ++++
 .../par_fields_in_file_parquet/data.tar.gz         | Bin 0 -> 548 bytes
 .../par_fields_in_file_parquet/run.sh              |  12 ++
 .../partition_location_1/create_table.hql          |  22 ++++
 .../multi_catalog/partition_location_1/data.tar.gz | Bin 0 -> 583 bytes
 .../data/multi_catalog/partition_location_1/run.sh |  12 ++
 .../partition_location_2/create_table.hql          |  23 ++++
 .../multi_catalog/partition_location_2/data.tar.gz | Bin 0 -> 600 bytes
 .../data/multi_catalog/partition_location_2/run.sh |  12 ++
 .../timestamp_with_time_zone/create_table.hql      |  17 +++
 .../timestamp_with_time_zone/data.tar.gz           | Bin 0 -> 1499 bytes
 .../multi_catalog/timestamp_with_time_zone/run.sh  |  12 ++
 .../scripts/data/test/hive_test/create_table.hql   |  20 +++
 .../hive/scripts/data/test/hive_test/data.tar.gz   | Bin 0 -> 161 bytes
 .../hive/scripts/data/test/hive_test/run.sh        |  12 ++
 .../hive/test_external_catalog_hive.out            | 139 +++++++++++++++------
 .../hive/test_hive_partition_location.out          |  40 ++++++
 .../hive/test_external_catalog_hive.groovy         |  99 ++++++++-------
 .../hive/test_hive_partition_location.groovy       |  18 +--
 49 files changed, 692 insertions(+), 199 deletions(-)

diff --git a/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl 
b/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
index ca0fe2e9ddb..0aec9ec2365 100644
--- a/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
+++ b/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
@@ -89,7 +89,7 @@ services:
       - hive-metastore-postgresql
     healthcheck:
       test: ["CMD", "sh", "-c", "/mnt/scripts/healthy_check.sh"]
-      interval: 5s
+      interval: 10s
       timeout: 60s
       retries: 120
     network_mode: "host"
diff --git a/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl 
b/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
index 09d150c17b2..901e5b3f71a 100644
--- a/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
+++ b/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
@@ -89,7 +89,7 @@ services:
       - hive-metastore-postgresql
     healthcheck:
       test: ["CMD", "sh", "-c", "/mnt/scripts/healthy_check.sh"]
-      interval: 5s
+      interval: 10s
       timeout: 60s
       retries: 120
     network_mode: "host"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/create_preinstalled_table.hql 
b/docker/thirdparties/docker-compose/hive/scripts/create_preinstalled_table.hql
index cdcc2698e9c..1ea2c2c3405 100644
--- 
a/docker/thirdparties/docker-compose/hive/scripts/create_preinstalled_table.hql
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/create_preinstalled_table.hql
@@ -353,18 +353,6 @@ LOCATION
 
 msck repair table orc_all_types;
 
-CREATE TABLE `student` (
-  id varchar(50),
-  name varchar(50),
-  age int,
-  gender varchar(50),
-  addr varchar(50),
-  phone varchar(50)
-)
-ROW FORMAT DELIMITED FIELDS TERMINATED by ','
-LOCATION '/user/doris/preinstalled_data/data_case/student'
-TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
-
 CREATE TABLE `lineorder` (
   `lo_orderkey` int,
   `lo_linenumber` int,
@@ -388,70 +376,6 @@ ROW FORMAT DELIMITED FIELDS TERMINATED by ','
 LOCATION '/user/doris/preinstalled_data/data_case/lineorder'
 TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
 
-CREATE TABLE `test1` (
-  col_1 int,
-  col_2 varchar(20),
-  col_3 int,
-  col_4 int,
-  col_5 varchar(20)
-)
-ROW FORMAT DELIMITED FIELDS TERMINATED by ','
-LOCATION '/user/doris/preinstalled_data/data_case/test1'
-TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
-
-CREATE TABLE `string_table` (
-  p_partkey string,
-  p_name string,
-  p_mfgr string,
-  p_brand string,
-  p_type string,
-  p_size string,
-  p_con string,
-  p_r_price string,
-  p_comment string
-)
-ROW FORMAT DELIMITED FIELDS TERMINATED by ','
-LOCATION '/user/doris/preinstalled_data/data_case/string_table'
-TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
-
-CREATE TABLE `account_fund` (
-  `batchno` string,
-  `appsheet_no` string,
-  `filedate` string,
-  `t_no` string,
-  `tano` string,
-  `t_name` string,
-  `chged_no` string,
-  `mob_no2` string,
-  `home_no` string,
-  `off_no` string
-)
-ROW FORMAT DELIMITED FIELDS TERMINATED by ','
-STORED AS INPUTFORMAT
-  'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT
-  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
-LOCATION
-  '/user/doris/preinstalled_data/data_case/account_fund'
-TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
-
-create table sale_table (
-  `bill_code` varchar(500),
-  `dates` varchar(500),
-  `ord_year` varchar(500),
-  `ord_month` varchar(500),
-  `ord_quarter` varchar(500),
-  `on_time` varchar(500)
-)
-ROW FORMAT DELIMITED FIELDS TERMINATED by ','
-STORED AS INPUTFORMAT
-  'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT
-    'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
-LOCATION
-  '/user/doris/preinstalled_data/data_case/sale_table'
-TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
-
 create table t_hive (
   `k1` int,
   `k2` char(10),
@@ -468,38 +392,6 @@ LOCATION
   '/user/doris/preinstalled_data/data_case/t_hive'
 TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
 
-create table hive01 (
-  first_year int,
-  d_disease varchar(200),
-  i_day int,
-  card_cnt bigint
-)
-ROW FORMAT DELIMITED FIELDS TERMINATED by ','
-LOCATION
-  '/user/doris/preinstalled_data/data_case/hive01'
-TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
-
-CREATE TABLE test2 (
-id int,
-name string ,
-age string ,
-avg_patient_time double,
-dt date
-)
-row format delimited fields terminated by ','
-stored as textfile
-LOCATION '/user/doris/preinstalled_data/data_case/test2'
-TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
-
-create table test_hive_doris(
-id varchar(100),
-age varchar(100)
-)
-row format delimited fields terminated by ','
-stored as textfile
-LOCATION '/user/doris/preinstalled_data/data_case/test_hive_doris'
-TBLPROPERTIES ('transient_lastDdlTime'='1658816839');
-
 CREATE external TABLE `table_with_vertical_line`(
   `k1` string COMMENT 'k1',
   `k2` string COMMENT 'k2',
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/account_fund/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/account_fund/create_table.hql
new file mode 100644
index 00000000000..dacb7d225f4
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/account_fund/create_table.hql
@@ -0,0 +1,28 @@
+CREATE DATABASE IF NOT EXISTS default;
+USE default;
+
+CREATE TABLE `default.account_fund`(
+  `batchno` string, 
+  `appsheet_no` string, 
+  `filedate` string, 
+  `t_no` string, 
+  `tano` string, 
+  `t_name` string, 
+  `chged_no` string, 
+  `mob_no2` string, 
+  `home_no` string, 
+  `off_no` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'=',', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/default/account_fund'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1669712244');
+
+msck repair table account_fund;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/account_fund/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/account_fund/data.tar.gz
new file mode 100644
index 00000000000..d4c8aa7a306
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/account_fund/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/account_fund/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/account_fund/run.sh
new file mode 100755
index 00000000000..15a0b79c41f
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/account_fund/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/default/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/default/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/hive01/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/hive01/create_table.hql
new file mode 100644
index 00000000000..d3c6a586705
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/hive01/create_table.hql
@@ -0,0 +1,22 @@
+CREATE DATABASE IF NOT EXISTS default;
+USE default;
+
+CREATE TABLE `default.hive01`(
+  `first_year` int, 
+  `d_disease` varchar(200), 
+  `i_day` int, 
+  `card_cnt` bigint)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'=',', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/default/hive01'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1669712244');
+
+msck repair table hive01;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/hive01/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/hive01/data.tar.gz
new file mode 100644
index 00000000000..a93ce9fee8a
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/hive01/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/hive01/run.sh 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/hive01/run.sh
new file mode 100755
index 00000000000..15a0b79c41f
--- /dev/null
+++ b/docker/thirdparties/docker-compose/hive/scripts/data/default/hive01/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/default/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/default/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/sale_table/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/sale_table/create_table.hql
new file mode 100644
index 00000000000..57bfe09e1da
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/sale_table/create_table.hql
@@ -0,0 +1,24 @@
+CREATE DATABASE IF NOT EXISTS default;
+USE default;
+
+CREATE TABLE `default.sale_table`(
+  `bill_code` varchar(500), 
+  `dates` varchar(500), 
+  `ord_year` varchar(500), 
+  `ord_month` varchar(500), 
+  `ord_quarter` varchar(500), 
+  `on_time` varchar(500))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'=',', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+LOCATION '/user/doris/suites/default/sale_table'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1669712244');
+
+msck repair table sale_table;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/sale_table/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/sale_table/data.tar.gz
new file mode 100644
index 00000000000..6f2d7d9cd2d
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/sale_table/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/sale_table/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/sale_table/run.sh
new file mode 100755
index 00000000000..15a0b79c41f
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/sale_table/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/default/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/default/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/string_table/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/string_table/create_table.hql
new file mode 100644
index 00000000000..32997552c65
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/string_table/create_table.hql
@@ -0,0 +1,27 @@
+CREATE DATABASE IF NOT EXISTS default;
+USE default;
+
+CREATE TABLE `default.string_table`(
+  `p_partkey` string, 
+  `p_name` string, 
+  `p_mfgr` string, 
+  `p_brand` string, 
+  `p_type` string, 
+  `p_size` string, 
+  `p_con` string, 
+  `p_r_price` string, 
+  `p_comment` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'=',', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/default/string_table'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1669712243');
+
+msck repair table string_table;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/string_table/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/string_table/data.tar.gz
new file mode 100644
index 00000000000..0205e4502ca
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/string_table/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/string_table/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/string_table/run.sh
new file mode 100755
index 00000000000..15a0b79c41f
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/string_table/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/default/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/default/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/student/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/student/create_table.hql
new file mode 100644
index 00000000000..2ce28d17b37
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/student/create_table.hql
@@ -0,0 +1,24 @@
+CREATE DATABASE IF NOT EXISTS default;
+USE default;
+
+CREATE TABLE `default.student`(
+  `id` varchar(50), 
+  `name` varchar(50), 
+  `age` int, 
+  `gender` varchar(50), 
+  `addr` varchar(50), 
+  `phone` varchar(50))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'=',', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/default/student'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1669364024');
+
+msck repair table student;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/student/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/student/data.tar.gz
new file mode 100644
index 00000000000..d4e3aa4c707
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/student/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/student/run.sh 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/student/run.sh
new file mode 100755
index 00000000000..15a0b79c41f
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/student/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/default/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/default/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/test1/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test1/create_table.hql
new file mode 100644
index 00000000000..2211a9edac0
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test1/create_table.hql
@@ -0,0 +1,23 @@
+CREATE DATABASE IF NOT EXISTS default;
+USE default;
+
+CREATE TABLE `default.test1`(
+  `col_1` int, 
+  `col_2` varchar(20), 
+  `col_3` int, 
+  `col_4` int, 
+  `col_5` varchar(20))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'=',', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/default/test1'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1669712243');
+
+msck repair table test1;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/test1/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test1/data.tar.gz
new file mode 100644
index 00000000000..90cbdfd6c0d
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test1/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/test1/run.sh 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test1/run.sh
new file mode 100755
index 00000000000..15a0b79c41f
--- /dev/null
+++ b/docker/thirdparties/docker-compose/hive/scripts/data/default/test1/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/default/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/default/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/test2/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test2/create_table.hql
new file mode 100644
index 00000000000..8c2b4eadeee
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test2/create_table.hql
@@ -0,0 +1,23 @@
+CREATE DATABASE IF NOT EXISTS default;
+USE default;
+
+CREATE TABLE `default.test2`(
+  `id` int, 
+  `name` string, 
+  `age` string, 
+  `avg_patient_time` double, 
+  `dt` date)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'=',', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/default/test2'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1669712244');
+
+msck repair table test2;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/test2/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test2/data.tar.gz
new file mode 100644
index 00000000000..3d065b7bc03
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test2/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/test2/run.sh 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test2/run.sh
new file mode 100755
index 00000000000..15a0b79c41f
--- /dev/null
+++ b/docker/thirdparties/docker-compose/hive/scripts/data/default/test2/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/default/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/default/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/test_hive_doris/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test_hive_doris/create_table.hql
new file mode 100644
index 00000000000..03367472cdf
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test_hive_doris/create_table.hql
@@ -0,0 +1,20 @@
+CREATE DATABASE IF NOT EXISTS default;
+USE default;
+
+CREATE TABLE `default.test_hive_doris`(
+  `id` varchar(100), 
+  `age` varchar(100))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'=',', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/default/test_hive_doris'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1669712244');
+
+msck repair table test_hive_doris;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/test_hive_doris/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test_hive_doris/data.tar.gz
new file mode 100644
index 00000000000..928bf965db3
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test_hive_doris/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/default/test_hive_doris/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test_hive_doris/run.sh
new file mode 100755
index 00000000000..15a0b79c41f
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/default/test_hive_doris/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/default/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/default/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_orc/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_orc/create_table.hql
new file mode 100644
index 00000000000..694d2ed3852
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_orc/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.par_fields_in_file_orc`(
+  `id` int, 
+  `name` string, 
+  `value` double)
+PARTITIONED BY ( 
+  `year` int, 
+  `month` int)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/par_fields_in_file_orc'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1692774424');
+
+msck repair table par_fields_in_file_orc;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_orc/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_orc/data.tar.gz
new file mode 100644
index 00000000000..be1ebf6c5ac
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_orc/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_orc/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_orc/run.sh
new file mode 100755
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_orc/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_parquet/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_parquet/create_table.hql
new file mode 100644
index 00000000000..e6df88cecc3
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_parquet/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.par_fields_in_file_parquet`(
+  `id` int, 
+  `name` string, 
+  `value` double)
+PARTITIONED BY ( 
+  `year` int, 
+  `month` int)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/par_fields_in_file_parquet'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1692774410');
+
+msck repair table par_fields_in_file_parquet;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_parquet/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_parquet/data.tar.gz
new file mode 100644
index 00000000000..35c6a2ac88d
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_parquet/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_parquet/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_parquet/run.sh
new file mode 100755
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/par_fields_in_file_parquet/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_1/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_1/create_table.hql
new file mode 100644
index 00000000000..a477daf78ea
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_1/create_table.hql
@@ -0,0 +1,22 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.partition_location_1`(
+  `id` int, 
+  `name` string)
+PARTITIONED BY ( 
+  `part` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/partition_location_1'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1682405696');
+
+ALTER TABLE partition_location_1 ADD PARTITION (part='part1') LOCATION 
'/user/doris/suites/multi_catalog/partition_location_1/part=part1';
+ALTER TABLE partition_location_1 ADD PARTITION (part='part2') LOCATION 
'/user/doris/suites/multi_catalog/partition_location_1/20230425';
+
+msck repair table partition_location_1;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_1/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_1/data.tar.gz
new file mode 100644
index 00000000000..6f11952b66c
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_1/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_1/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_1/run.sh
new file mode 100755
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_1/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_2/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_2/create_table.hql
new file mode 100644
index 00000000000..bc40eb85f1c
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_2/create_table.hql
@@ -0,0 +1,23 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.partition_location_2`(
+  `id` int, 
+  `name` string)
+PARTITIONED BY ( 
+  `part1` string, 
+  `part2` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/partition_location_2'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1682406065');
+
+ALTER TABLE partition_location_2 ADD PARTITION (part1='part1_1', 
part2='part2_1') LOCATION 
'/user/doris/suites/multi_catalog/partition_location_2/part1=part1_1/part2=part2_1';
+ALTER TABLE partition_location_2 ADD PARTITION (part1='part1_2', 
part2='part2_2') LOCATION 
'/user/doris/suites/multi_catalog/partition_location_2/20230425';
+
+msck repair table partition_location_2;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_2/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_2/data.tar.gz
new file mode 100644
index 00000000000..ddbdea71497
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_2/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_2/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_2/run.sh
new file mode 100755
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_location_2/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/timestamp_with_time_zone/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/timestamp_with_time_zone/create_table.hql
new file mode 100644
index 00000000000..aee31e12800
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/timestamp_with_time_zone/create_table.hql
@@ -0,0 +1,17 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.timestamp_with_time_zone`(
+  `date_col` date, 
+  `timestamp_col` timestamp)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/timestamp_with_time_zone'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1712113278');
+
+msck repair table timestamp_with_time_zone;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/timestamp_with_time_zone/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/timestamp_with_time_zone/data.tar.gz
new file mode 100644
index 00000000000..7232ba34afa
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/timestamp_with_time_zone/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/timestamp_with_time_zone/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/timestamp_with_time_zone/run.sh
new file mode 100755
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/timestamp_with_time_zone/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/test/hive_test/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/test/hive_test/create_table.hql
new file mode 100644
index 00000000000..c829422ece3
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/test/hive_test/create_table.hql
@@ -0,0 +1,20 @@
+CREATE DATABASE IF NOT EXISTS test;
+USE test;
+
+CREATE TABLE `test.hive_test`(
+  `a` int, 
+  `b` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'=',', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/test/hive_test'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1670291786');
+
+msck repair table hive_test;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/test/hive_test/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/test/hive_test/data.tar.gz
new file mode 100644
index 00000000000..6eb3fd8d1bb
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/test/hive_test/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/test/hive_test/run.sh 
b/docker/thirdparties/docker-compose/hive/scripts/data/test/hive_test/run.sh
new file mode 100755
index 00000000000..c39ea633ecb
--- /dev/null
+++ b/docker/thirdparties/docker-compose/hive/scripts/data/test/hive_test/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/test/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/test/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/regression-test/data/external_table_p2/hive/test_external_catalog_hive.out 
b/regression-test/data/external_table_p0/hive/test_external_catalog_hive.out
similarity index 50%
rename from 
regression-test/data/external_table_p2/hive/test_external_catalog_hive.out
rename to 
regression-test/data/external_table_p0/hive/test_external_catalog_hive.out
index 51442aedd82..a55a5bfee1a 100644
--- a/regression-test/data/external_table_p2/hive/test_external_catalog_hive.out
+++ b/regression-test/data/external_table_p0/hive/test_external_catalog_hive.out
@@ -41,60 +41,123 @@ a124       13
 a125   14
 a126   15
 
--- !q10 --
-IVhzIApeRb ot,c,E
+-- !not_single_slot_filter_conjuncts_orc --
+\N     289572  4       1980215 480218.00       24.00   31082.88        0.05    
0       R       F       1994-12-14      1995-01-01      COLLECT COD     AIR     
 final accounts. instructions boost above
+\N     388932  2       6038830 538843.00       46.00   81352.38        0.02    
0.06    A       F       1994-12-15      1995-01-01      NONE    MAIL    ven 
ideas are furiously according 
+\N     452964  3       14917531        167546.00       20.00   30955.80        
0.02    0.03    R       F       1994-12-03      1995-01-01      COLLECT COD     
AIR     deposits. blithely even deposits a
+\N     570084  4       14861731        361760.00       26.00   43991.74        
0.05    0.08    A       F       1994-11-03      1995-01-01      COLLECT COD     
MAIL    ending hockey players wake f
+\N     637092  4       15648780        148811.00       26.00   44928.00        
0.06    0.04    R       F       1994-11-14      1995-01-01      COLLECT COD     
SHIP    lar deposits. as
+\N     1084260 2       6109231 609244.00       10.00   12399.30        0.01    
0.03    R       F       1994-11-05      1995-01-01      DELIVER IN PERSON       
RAIL    efully pending sentiments. epita
+\N     1150884 1       13245123        245124.00       49.00   52305.54        
0.05    0.02    R       F       1994-12-22      1995-01-01      DELIVER IN 
PERSON       REG AIR rious deposits about the quickly bold
+\N     1578180 1       19168165        918223.00       10.00   12322.10        
0.07    0.07    R       F       1994-10-31      1995-01-01      COLLECT COD     
TRUCK   ges. accounts sublate carefully
+\N     2073732 2       13846443        596483.00       21.00   29163.75        
0.10    0.08    R       F       1994-12-06      1995-01-01      DELIVER IN 
PERSON       FOB     dolphins nag furiously q
+\N     2479044 4       9763795 13805.00        40.00   74332.40        0.05    
0.05    R       F       1994-11-16      1995-01-01      COLLECT COD     RAIL    
equests hinder qu
 
--- !q11 --
-16.00
+-- !not_single_slot_filter_conjuncts_parquet --
+\N     289572  4       1980215 480218.00       24.00   31082.88        0.05    
0       R       F       1994-12-14      1995-01-01      COLLECT COD     AIR     
 final accounts. instructions boost above
+\N     388932  2       6038830 538843.00       46.00   81352.38        0.02    
0.06    A       F       1994-12-15      1995-01-01      NONE    MAIL    ven 
ideas are furiously according 
+\N     452964  3       14917531        167546.00       20.00   30955.80        
0.02    0.03    R       F       1994-12-03      1995-01-01      COLLECT COD     
AIR     deposits. blithely even deposits a
+\N     570084  4       14861731        361760.00       26.00   43991.74        
0.05    0.08    A       F       1994-11-03      1995-01-01      COLLECT COD     
MAIL    ending hockey players wake f
+\N     637092  4       15648780        148811.00       26.00   44928.00        
0.06    0.04    R       F       1994-11-14      1995-01-01      COLLECT COD     
SHIP    lar deposits. as
+\N     1084260 2       6109231 609244.00       10.00   12399.30        0.01    
0.03    R       F       1994-11-05      1995-01-01      DELIVER IN PERSON       
RAIL    efully pending sentiments. epita
+\N     1150884 1       13245123        245124.00       49.00   52305.54        
0.05    0.02    R       F       1994-12-22      1995-01-01      DELIVER IN 
PERSON       REG AIR rious deposits about the quickly bold
+\N     1578180 1       19168165        918223.00       10.00   12322.10        
0.07    0.07    R       F       1994-10-31      1995-01-01      COLLECT COD     
TRUCK   ges. accounts sublate carefully
+\N     2073732 2       13846443        596483.00       21.00   29163.75        
0.10    0.08    R       F       1994-12-06      1995-01-01      DELIVER IN 
PERSON       FOB     dolphins nag furiously q
+\N     2479044 4       9763795 13805.00        40.00   74332.40        0.05    
0.05    R       F       1994-11-16      1995-01-01      COLLECT COD     RAIL    
equests hinder qu
 
--- !q12 --
-25
+-- !null_expr_dict_filter_orc --
+4844   4363
 
--- !q13 --
-1500000000
+-- !null_expr_dict_filter_parquet --
+4844   4363
 
--- !q14 --
-moccasin steel bisque cornsilk lace
+-- !par_fields_in_file_orc1 --
+1      Alice   100.0   2023    8
+2      Bob     150.0   2023    8
 
--- !q15 --
-903.73
+-- !par_fields_in_file_parquet1 --
+1      Alice   100.0   2023    8
+2      Bob     150.0   2023    8
 
--- !q16 --
-0      AFRICA  lar deposits. blithely final packages cajole. regular waters 
are final requests. regular accounts are according to 
-1      AMERICA hs use ironic, even requests. s
-2      ASIA    ges. thinly even pinto beans ca
+-- !par_fields_in_file_orc2 --
+1      Alice   100.0   2023    8
+2      Bob     150.0   2023    8
 
--- !q17 --
-Z6n2t4XA2n7CXTECJ,PE,iBbsCh0RE1Dd2A,z48
+-- !par_fields_in_file_parquet2 --
+1      Alice   100.0   2023    8
+2      Bob     150.0   2023    8
 
--- !q18 --
-IVhzIApeRb ot,c,E
+-- !par_fields_in_file_orc3 --
+1      Alice   100.0   2023    8
+2      Bob     150.0   2023    8
 
--- !q19 --
-16.00
+-- !par_fields_in_file_parquet3 --
+1      Alice   100.0   2023    8
+2      Bob     150.0   2023    8
 
--- !q20 --
-25
+-- !par_fields_in_file_orc4 --
+1      Alice   100.0   2023    8
+2      Bob     150.0   2023    8
 
--- !q21 --
-1500000000
+-- !par_fields_in_file_parquet4 --
+1      Alice   100.0   2023    8
+2      Bob     150.0   2023    8
 
--- !q22 --
-moccasin steel bisque cornsilk lace
+-- !par_fields_in_file_orc5 --
 
--- !q23 --
-903.73
+-- !par_fields_in_file_parquet5 --
+
+-- !parquet_adjusted_utc --
+1997-09-21     1999-01-12T15:12:31.235784
+1998-01-12     1993-06-11T11:33:12.356500
+2002-09-29     2001-01-17T21:23:42.120
+2008-08-07     2023-09-23T11:12:17.458
+2009-11-13     2011-11-12T01:23:06.986
+2012-07-08     2023-11-09T20:21:16.321
+2017-09-13     2009-09-21T04:23:14.309124
+2024-03-23     2024-02-01T21:11:09.170
+
+-- !q01 --
+zhangsan       1
+lisi   1
+
+-- !q02 --
+1      1
+2      1
+3      1
+4      1
+
+-- !q03 --
+123    china   4       56      sc
+234    america 5       67      ls
+345    cana    4       56      fy
+567    fre     7       89      pa
 
--- !q24 --
-0      AFRICA  lar deposits. blithely final packages cajole. regular waters 
are final requests. regular accounts are according to 
-1      AMERICA hs use ironic, even requests. s
-2      ASIA    ges. thinly even pinto beans ca
+-- !q04 --
+p_partkey2     p_name2 p_mfgr2 p_brand2        p_type2 p_size2 p_con2  
p_r_price2      p_comment2
+p_partkey1     p_name1 p_mfgr1 p_brand1        p_type1 p_size1 p_con1  
p_r_price1      p_comment1
+p_partkey0     p_name0 p_mfgr0 p_brand0        p_type0 p_size0 p_con0  
p_r_price0      p_comment0
 
--- !q25 --
-Z6n2t4XA2n7CXTECJ,PE,iBbsCh0RE1Dd2A,z48
+-- !q05 --
+batchno        appsheet_no     filedate        t_no    tano    t_name  
chged_no        mob_no2 home_no off_no
+off_no home_no mob_no2 chged_no        t_name  tano    t_no    filedate        
appsheet_no     batchno
+
+-- !q06 --
+bill_code      dates   ord_year        ord_month       ord_quarter     on_time
 
--- !pr21598 --
-5
+-- !q07 --
+2
+
+-- !q08 --
+123    zhangsan        12      123.45  2022-01-01
+124    lisi    12      123.45  2022-01-01
+125    lisan   12      123.45  2022-01-02
+
+-- !q09 --
+a123   12
+a124   13
+a125   14
+a126   15
 
 -- !not_single_slot_filter_conjuncts_orc --
 \N     289572  4       1980215 480218.00       24.00   31082.88        0.05    
0       R       F       1994-12-14      1995-01-01      COLLECT COD     AIR     
 final accounts. instructions boost above
diff --git 
a/regression-test/data/external_table_p2/hive/test_hive_partition_location.out 
b/regression-test/data/external_table_p0/hive/test_hive_partition_location.out
similarity index 54%
rename from 
regression-test/data/external_table_p2/hive/test_hive_partition_location.out
rename to 
regression-test/data/external_table_p0/hive/test_hive_partition_location.out
index 15d4e8f2325..36d9b2a80b4 100644
--- 
a/regression-test/data/external_table_p2/hive/test_hive_partition_location.out
+++ 
b/regression-test/data/external_table_p0/hive/test_hive_partition_location.out
@@ -39,3 +39,43 @@ part2
 part1_1        part2_1
 part1_2        part2_2
 
+-- !one_partition1 --
+1      Zhangsan        part1
+2      Lisi    part2
+
+-- !one_partition2 --
+1      Zhangsan        part1
+
+-- !one_partition3 --
+2      Lisi    part2
+
+-- !one_partition4 --
+part1
+
+-- !one_partition5 --
+part2
+
+-- !one_partition6 --
+part1
+part2
+
+-- !two_partition1 --
+1      Zhangsan        part1_1 part2_1
+2      Lisi    part1_2 part2_2
+
+-- !two_partition2 --
+1      Zhangsan        part1_1 part2_1
+
+-- !two_partition3 --
+1      Zhangsan        part1_1 part2_1
+
+-- !two_partition4 --
+2      Lisi    part1_2 part2_2
+
+-- !two_partition5 --
+2      Lisi    part1_2 part2_2
+
+-- !two_partition6 --
+part1_1        part2_1
+part1_2        part2_2
+
diff --git 
a/regression-test/suites/external_table_p2/hive/test_external_catalog_hive.groovy
 
b/regression-test/suites/external_table_p0/hive/test_external_catalog_hive.groovy
similarity index 54%
rename from 
regression-test/suites/external_table_p2/hive/test_external_catalog_hive.groovy
rename to 
regression-test/suites/external_table_p0/hive/test_external_catalog_hive.groovy
index 813f316df29..33c8feb2a5a 100644
--- 
a/regression-test/suites/external_table_p2/hive/test_external_catalog_hive.groovy
+++ 
b/regression-test/suites/external_table_p0/hive/test_external_catalog_hive.groovy
@@ -15,13 +15,16 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("test_external_catalog_hive", 
"p2,external,hive,external_remote,external_remote_hive") {
-
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "test_external_catalog_hive"
+suite("test_external_catalog_hive", 
"p0,external,hive,external_docker,external_docker_hive") {
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
+    for (String hivePrefix : ["hive2", "hive3"]) {
+        String hms_port = context.config.otherConfigs.get(hivePrefix + 
"HmsPort")
+        String catalog_name = "${hivePrefix}_test_external_catalog_hive"
+        String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
 
         sql """set enable_fallback_to_original_planner=false"""
         sql """drop catalog if exists ${catalog_name};"""
@@ -29,7 +32,7 @@ suite("test_external_catalog_hive", 
"p2,external,hive,external_remote,external_r
         sql """
             create catalog if not exists ${catalog_name} properties (
                 'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
+                'hive.metastore.uris' = 'thrift://${externalEnvIp}:${hms_port}'
             );
         """
 
@@ -60,43 +63,50 @@ suite("test_external_catalog_hive", 
"p2,external,hive,external_remote,external_r
         }
         sql """ use `default`; """
         q01()
+        
+        // Too big for p0 test, comment it.
         //test for big table(parquet format)
-        def q02 = {
-            qt_q10 """ select c_address from customer where c_custkey = 1 and 
c_name = 'Customer#000000001'; """
-            qt_q11 """ select l_quantity from lineitem where l_orderkey = 
599614241 and l_partkey = 59018738 and l_suppkey = 1518744 limit 2 """
-            qt_q12 """ select count(1) from nation """
-            qt_q13 """ select count(1) from orders """
-            qt_q14 """ select p_name from part where p_partkey = 4438130 order 
by p_name limit 1; """
-            qt_q15 """ select ps_supplycost from partsupp where ps_partkey = 
199588198 and ps_suppkey = 9588199 and ps_availqty = 2949 """
-            qt_q16 """ select * from region order by r_regionkey limit 3 """
-            qt_q17 """ select s_address from supplier where s_suppkey = 
2823947 limit 3"""
-        }
-        sql """ use tpch_1000_parquet; """
-        q02()
+        // def q02 = {
+        //     qt_q10 """ select c_address from customer where c_custkey = 1 
and c_name = 'Customer#000000001'; """
+        //     qt_q11 """ select l_quantity from lineitem where l_orderkey = 
599614241 and l_partkey = 59018738 and l_suppkey = 1518744 limit 2 """
+        //     qt_q12 """ select count(1) from nation """
+        //     qt_q13 """ select count(1) from orders """
+        //     qt_q14 """ select p_name from part where p_partkey = 4438130 
order by p_name limit 1; """
+        //     qt_q15 """ select ps_supplycost from partsupp where ps_partkey 
= 199588198 and ps_suppkey = 9588199 and ps_availqty = 2949 """
+        //     qt_q16 """ select * from region order by r_regionkey limit 3 """
+        //     qt_q17 """ select s_address from supplier where s_suppkey = 
2823947 limit 3"""
+        // }
+        // sql """ use tpch_1000_parquet; """
+        // q02()
+
+        // Too big for p0 test, comment it.
         //test for big table(orc format)
-        def q03 = {
-            qt_q18 """ select c_address from customer where c_custkey = 1 and 
c_name = 'Customer#000000001'; """
-            qt_q19 """ select l_quantity from lineitem where l_orderkey = 
599614241 and l_partkey = 59018738 and l_suppkey = 1518744 limit 2 """
-            qt_q20 """ select count(1) from nation """
-            qt_q21 """ select count(1) from orders """
-            qt_q22 """ select p_name from part where p_partkey = 4438130 order 
by p_name limit 1; """
-            qt_q23 """ select ps_supplycost from partsupp where ps_partkey = 
199588198 and ps_suppkey = 9588199 and ps_availqty = 2949 """
-            qt_q24 """ select * from region order by r_regionkey limit 3 """
-            qt_q25 """ select s_address from supplier where s_suppkey = 
2823947 limit 3"""
-        }
-        sql """ use tpch_1000_orc; """
-        q03()
-
+        // def q03 = {
+        //     qt_q18 """ select c_address from customer where c_custkey = 1 
and c_name = 'Customer#000000001'; """
+        //     qt_q19 """ select l_quantity from lineitem where l_orderkey = 
599614241 and l_partkey = 59018738 and l_suppkey = 1518744 limit 2 """
+        //     qt_q20 """ select count(1) from nation """
+        //     qt_q21 """ select count(1) from orders """
+        //     qt_q22 """ select p_name from part where p_partkey = 4438130 
order by p_name limit 1; """
+        //     qt_q23 """ select ps_supplycost from partsupp where ps_partkey 
= 199588198 and ps_suppkey = 9588199 and ps_availqty = 2949 """
+        //     qt_q24 """ select * from region order by r_regionkey limit 3 """
+        //     qt_q25 """ select s_address from supplier where s_suppkey = 
2823947 limit 3"""
+        // }
+        // sql """ use tpch_1000_orc; """
+        // q03()
+
+        // Too big for p0 test, comment it.
         // test #21598
-        qt_pr21598 """select count(*) from( (SELECT r_regionkey AS key1, 
r_name AS name, pday AS pday FROM (SELECT r_regionkey, r_name, 
replace(r_comment, ' ', 'aaaa') AS pday FROM 
${catalog_name}.tpch_1000_parquet.region) t2))x;"""
+        //qt_pr21598 """select count(*) from( (SELECT r_regionkey AS key1, 
r_name AS name, pday AS pday FROM (SELECT r_regionkey, r_name, 
replace(r_comment, ' ', 'aaaa') AS pday FROM 
${catalog_name}.tpch_1000_parquet.region) t2))x;"""
 
+        // TODO(kaka11chen): Need to upload table to oss, comment it 
temporarily.
         // test not_single_slot_filter_conjuncts with dict filter issue
-        qt_not_single_slot_filter_conjuncts_orc """ select * from 
multi_catalog.lineitem_string_date_orc where l_commitdate < l_receiptdate and 
l_receiptdate = '1995-01-01'  order by l_orderkey, l_partkey, l_suppkey, 
l_linenumber limit 10; """
-        qt_not_single_slot_filter_conjuncts_parquet """ select * from 
multi_catalog.lineitem_string_date_orc where l_commitdate < l_receiptdate and 
l_receiptdate = '1995-01-01'  order by l_orderkey, l_partkey, l_suppkey, 
l_linenumber limit 10; """
+        // qt_not_single_slot_filter_conjuncts_orc """ select * from 
multi_catalog.lineitem_string_date_orc where l_commitdate < l_receiptdate and 
l_receiptdate = '1995-01-01'  order by l_orderkey, l_partkey, l_suppkey, 
l_linenumber limit 10; """
+        // qt_not_single_slot_filter_conjuncts_parquet """ select * from 
multi_catalog.lineitem_string_date_orc where l_commitdate < l_receiptdate and 
l_receiptdate = '1995-01-01'  order by l_orderkey, l_partkey, l_suppkey, 
l_linenumber limit 10; """
 
+        // TODO(kaka11chen): Need to upload table to oss, comment it 
temporarily.
         // test null expr with dict filter issue
-        qt_null_expr_dict_filter_orc """ select count(*), count(distinct 
user_no) from multi_catalog.dict_fitler_test_orc WHERE `partitions` in 
('2023-08-21') and actual_intf_type  =  'type1' and (REUSE_FLAG<> 'y' or 
REUSE_FLAG is null); """
-        qt_null_expr_dict_filter_parquet """ select count(*), count(distinct 
user_no) from multi_catalog.dict_fitler_test_parquet WHERE `partitions` in 
('2023-08-21') and actual_intf_type  =  'type1' and (REUSE_FLAG<> 'y' or 
REUSE_FLAG is null); """
+        //qt_null_expr_dict_filter_orc """ select count(*), count(distinct 
user_no) from multi_catalog.dict_fitler_test_orc WHERE `partitions` in 
('2023-08-21') and actual_intf_type  =  'type1' and (REUSE_FLAG<> 'y' or 
REUSE_FLAG is null); """
+        //qt_null_expr_dict_filter_parquet """ select count(*), count(distinct 
user_no) from multi_catalog.dict_fitler_test_parquet WHERE `partitions` in 
('2023-08-21') and actual_intf_type  =  'type1' and (REUSE_FLAG<> 'y' or 
REUSE_FLAG is null); """
 
         // test par fields in file
         qt_par_fields_in_file_orc1 """ select * from 
multi_catalog.par_fields_in_file_orc where year = 2023 and month = 8 order by 
id; """
@@ -113,12 +123,13 @@ suite("test_external_catalog_hive", 
"p2,external,hive,external_remote,external_r
         // timestamp with isAdjustedToUTC=true
         qt_parquet_adjusted_utc """select * from 
multi_catalog.timestamp_with_time_zone order by date_col;"""
 
+        // TODO(kaka11chen): hive docker env throws "Cannot find class 
'com.hadoop.mapred.DeprecatedLzoTextInputFormat'",  comment it temporarily.
         // test unsupported input format query
-        try {
-            sql """ select * from 
multi_catalog.unsupported_input_format_empty; """
-        } catch (Exception e) {
-            assertTrue(e.getMessage().contains("Unsupported hive input format: 
com.hadoop.mapred.DeprecatedLzoTextInputFormat"))
-        }
+        //try {
+        //    sql """ select * from 
multi_catalog.unsupported_input_format_empty; """
+        //} catch (Exception e) {
+        //    assertTrue(e.getMessage().contains("Unsupported hive input 
format: com.hadoop.mapred.DeprecatedLzoTextInputFormat"))
+        //}
 
         // test remember last used database after switch / rename catalog
         sql """switch ${catalog_name};"""
@@ -147,7 +158,7 @@ suite("test_external_catalog_hive", 
"p2,external,hive,external_remote,external_r
             sql """
                 create catalog if not exists ${tmp_name} properties (
                     'type'='hms',
-                    'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}',
+                    'hive.metastore.uris' = 
'thrift://${externalEnvIp}:${hms_port}',
                     'access_controller.properties.ranger.service.name' = 
'hive_wrong',
                     'access_controller.class' = 
'org.apache.doris.catalog.authorizer.ranger.hive.RangerHiveAccessControllerFactory'
                 );
diff --git 
a/regression-test/suites/external_table_p2/hive/test_hive_partition_location.groovy
 
b/regression-test/suites/external_table_p0/hive/test_hive_partition_location.groovy
similarity index 80%
rename from 
regression-test/suites/external_table_p2/hive/test_hive_partition_location.groovy
rename to 
regression-test/suites/external_table_p0/hive/test_hive_partition_location.groovy
index 81bc8d8bcfa..e3a93ad577e 100644
--- 
a/regression-test/suites/external_table_p2/hive/test_hive_partition_location.groovy
+++ 
b/regression-test/suites/external_table_p0/hive/test_hive_partition_location.groovy
@@ -15,7 +15,7 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("test_hive_partition_location", 
"p2,external,hive,external_remote,external_remote_hive") {
+suite("test_hive_partition_location", 
"p0,external,hive,external_docker,external_docker_hive") {
     def one_partition1 = """select * from partition_location_1 order by id;"""
     def one_partition2 = """select * from partition_location_1 where 
part='part1';"""
     def one_partition3 = """select * from partition_location_1 where 
part='part2';"""
@@ -30,16 +30,20 @@ suite("test_hive_partition_location", 
"p2,external,hive,external_remote,external
     def two_partition5 = """select * from partition_location_2 where 
part2='part2_2';"""
     def two_partition6 = """select part1, part2 from partition_location_2 
order by part1;"""
 
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "hive_partition_location"
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
+    for (String hivePrefix : ["hive2", "hive3"]) {
+        String hms_port = context.config.otherConfigs.get(hivePrefix + 
"HmsPort")
+        String catalog_name = "${hivePrefix}_test_hive_partition_location"
+        String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
         sql """drop catalog if exists ${catalog_name};"""
         sql """
             create catalog if not exists ${catalog_name} properties (
                 'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
+                'hive.metastore.uris' = 'thrift://${externalEnvIp}:${hms_port}'
             );
         """
         logger.info("catalog " + catalog_name + " created")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to