This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-2.1 by this push:
     new 56a207c3f0e [case](paimon/iceberg)move cases from p2 to p0 (#37276) 
(#37738)
56a207c3f0e is described below

commit 56a207c3f0e6b6d6a66fd519d5a2e99c005f39b0
Author: Mingyu Chen <morning...@163.com>
AuthorDate: Sat Jul 13 10:01:05 2024 +0800

    [case](paimon/iceberg)move cases from p2 to p0 (#37276) (#37738)
    
    bp #37276
    
    Co-authored-by: wuwenchi <wuwenchi...@hotmail.com>
---
 .../docker-compose/iceberg/iceberg.yaml.tpl        |   8 +-
 .../docker-compose/iceberg/spark-defaults.conf     |  11 +-
 .../{spark-init.sql => spark-init-iceberg.sql}     |   0
 .../docker-compose/iceberg/spark-init-paimon.sql   |   1 +
 docker/thirdparties/run-thirdparties-docker.sh     |  19 +-
 .../datasource/paimon/PaimonExternalTable.java     |   5 +
 .../iceberg/iceberg_complex_type.out               |   0
 .../iceberg_partition_upper_case_nereids.out       |   0
 .../iceberg/iceberg_schema_change.out              |   0
 .../test_external_catalog_iceberg_common.out       |   0
 .../test_external_catalog_iceberg_partition.out    |   0
 .../iceberg/test_iceberg_predicate_conversion.out  |  29 +
 .../paimon/paimon_base_filesystem.out              |   0
 .../iceberg/iceberg_partition_upper_case.out       | 145 -----
 .../iceberg/iceberg_schema_evolution.out           |  79 ---
 .../iceberg_schema_evolution_iceberg_catalog.out   |  79 ---
 .../iceberg/test_external_catalog_icebergv2.out    |  74 ---
 .../iceberg/test_iceberg_predicate_conversion.out  | 611 ---------------------
 .../external_table_p2/paimon/paimon_base_types.out |  56 --
 .../paimon/paimon_timestamp_types.out              |  13 -
 .../pipeline/external/conf/regression-conf.groovy  |   5 +
 .../iceberg/iceberg_complex_type.groovy            |  94 ++++
 .../iceberg_partition_upper_case_nereids.groovy    |  39 +-
 .../iceberg/iceberg_schema_change.groovy           |  49 +-
 .../test_external_catalog_iceberg_common.groovy    |  42 +-
 .../test_external_catalog_iceberg_partition.groovy |  92 ++++
 .../iceberg/test_iceberg_filter.groovy             |  91 +--
 .../test_iceberg_predicate_conversion.groovy       | 109 ++++
 .../paimon/paimon_base_filesystem.groovy           |  15 +-
 .../paimon/paimon_timestamp_types.groovy           | 158 ++++++
 .../iceberg/iceberg_complex_type.groovy            |  92 ----
 .../iceberg/iceberg_partition_upper_case.groovy    | 103 ----
 .../iceberg/iceberg_schema_evolution.groovy        |  67 ---
 ...iceberg_schema_evolution_iceberg_catalog.groovy |  69 ---
 .../test_external_catalog_iceberg_partition.groovy |  84 ---
 .../iceberg/test_external_catalog_icebergv2.groovy |  82 ---
 .../test_iceberg_predicate_conversion.groovy       |  79 ---
 .../paimon/paimon_base_types.groovy                |  81 ---
 .../paimon/paimon_timestamp_types.groovy           |  58 --
 39 files changed, 660 insertions(+), 1879 deletions(-)

diff --git a/docker/thirdparties/docker-compose/iceberg/iceberg.yaml.tpl 
b/docker/thirdparties/docker-compose/iceberg/iceberg.yaml.tpl
index 2f8013f8452..8f71d6c5087 100644
--- a/docker/thirdparties/docker-compose/iceberg/iceberg.yaml.tpl
+++ b/docker/thirdparties/docker-compose/iceberg/iceberg.yaml.tpl
@@ -30,15 +30,19 @@ services:
       - ./data/output/spark-warehouse:/home/iceberg/warehouse
       - ./data/output/spark-notebooks:/home/iceberg/notebooks/notebooks
       - ./data:/mnt/data
-      - ./spark-init.sql:/mnt/spark-init.sql
+      - ./spark-init-iceberg.sql:/mnt/spark-init-iceberg.sql
+      - ./spark-init-paimon.sql:/mnt/spark-init-paimon.sql
       - ./spark-defaults.conf:/opt/spark/conf/spark-defaults.conf
+      - 
./data/input/jars/paimon-spark-3.5-0.8.0.jar:/opt/spark/jars/paimon-spark-3.5-0.8.0.jar
+      - 
./data/input/jars/paimon-s3-0.8.0.jar:/opt/spark/jars/paimon-s3-0.8.0.jar
     environment:
       - AWS_ACCESS_KEY_ID=admin
       - AWS_SECRET_ACCESS_KEY=password
       - AWS_REGION=us-east-1
     entrypoint:  >
       /bin/sh -c "
-          spark-sql -f /mnt/spark-init.sql 2>&1;
+          spark-sql --conf 
spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions
 -f /mnt/spark-init-iceberg.sql 2>&1;
+          spark-sql --conf 
spark.sql.extensions=org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions
 -f /mnt/spark-init-paimon.sql 2>&1;
           tail -f /dev/null
       "
     networks:
diff --git a/docker/thirdparties/docker-compose/iceberg/spark-defaults.conf 
b/docker/thirdparties/docker-compose/iceberg/spark-defaults.conf
index 7b6be0eecb8..a49dc2173b7 100644
--- a/docker/thirdparties/docker-compose/iceberg/spark-defaults.conf
+++ b/docker/thirdparties/docker-compose/iceberg/spark-defaults.conf
@@ -20,7 +20,6 @@
 
 # Example:
 spark.sql.session.timeZone             Asia/Shanghai
-spark.sql.extensions                   
org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions
 spark.sql.catalog.demo                 org.apache.iceberg.spark.SparkCatalog
 spark.sql.catalog.demo.type            rest
 spark.sql.catalog.demo.uri             http://rest:8181
@@ -31,4 +30,12 @@ spark.sql.defaultCatalog               demo
 spark.eventLog.enabled                 true
 spark.eventLog.dir                     /home/iceberg/spark-events
 spark.history.fs.logDirectory          /home/iceberg/spark-events
-spark.sql.catalogImplementation        in-memory
\ No newline at end of file
+spark.sql.catalogImplementation        in-memory
+
+# paimon
+spark.sql.catalog.paimon               org.apache.paimon.spark.SparkCatalog
+spark.sql.catalog.paimon.warehouse     s3://warehouse/wh
+spark.sql.catalog.paimon.s3.endpoint   http://minio:9000
+spark.sql.catalog.paimon.s3.access-key admin
+spark.sql.catalog.paimon.s3.secret-key password
+spark.sql.catalog.paimon.s3.region     us-east-1
\ No newline at end of file
diff --git a/docker/thirdparties/docker-compose/iceberg/spark-init.sql 
b/docker/thirdparties/docker-compose/iceberg/spark-init-iceberg.sql
similarity index 100%
rename from docker/thirdparties/docker-compose/iceberg/spark-init.sql
rename to docker/thirdparties/docker-compose/iceberg/spark-init-iceberg.sql
diff --git a/docker/thirdparties/docker-compose/iceberg/spark-init-paimon.sql 
b/docker/thirdparties/docker-compose/iceberg/spark-init-paimon.sql
new file mode 100644
index 00000000000..c868c4f7b19
--- /dev/null
+++ b/docker/thirdparties/docker-compose/iceberg/spark-init-paimon.sql
@@ -0,0 +1 @@
+-- create database if not exists paimon.test_paimon_db;
diff --git a/docker/thirdparties/run-thirdparties-docker.sh 
b/docker/thirdparties/run-thirdparties-docker.sh
index 1fb7c69536b..67ab8e2317d 100755
--- a/docker/thirdparties/run-thirdparties-docker.sh
+++ b/docker/thirdparties/run-thirdparties-docker.sh
@@ -385,17 +385,26 @@ fi
 
 if [[ "${RUN_ICEBERG}" -eq 1 ]]; then
     # iceberg
+    ICEBERG_DIR=${ROOT}/docker-compose/iceberg
     cp "${ROOT}"/docker-compose/iceberg/iceberg.yaml.tpl 
"${ROOT}"/docker-compose/iceberg/iceberg.yaml
     cp "${ROOT}"/docker-compose/iceberg/entrypoint.sh.tpl 
"${ROOT}"/docker-compose/iceberg/entrypoint.sh
     sed -i "s/doris--/${CONTAINER_UID}/g" 
"${ROOT}"/docker-compose/iceberg/iceberg.yaml
     sed -i "s/doris--/${CONTAINER_UID}/g" 
"${ROOT}"/docker-compose/iceberg/entrypoint.sh
     sudo docker compose -f "${ROOT}"/docker-compose/iceberg/iceberg.yaml 
--env-file "${ROOT}"/docker-compose/iceberg/iceberg.env down
-    sudo rm -rf "${ROOT}"/docker-compose/iceberg/data
     if [[ "${STOP}" -ne 1 ]]; then
-        wget -P "${ROOT}"/docker-compose/iceberg 
https://"${s3BucketName}.${s3Endpoint}"/regression/datalake/pipeline_data/iceberg_data.zip
-        sudo unzip -d "${ROOT}"/docker-compose/iceberg -q 
${ROOT}/docker-compose/iceberg/iceberg_data.zip
-        sudo mv "${ROOT}"/docker-compose/iceberg/iceberg_data 
"${ROOT}"/docker-compose/iceberg/data
-        sudo rm -rf ${ROOT}/docker-compose/iceberg/iceberg_data.zip
+        if [[ ! -d "${ICEBERG_DIR}/data" ]]; then
+            echo "${ICEBERG_DIR}/data does not exist"
+            cd "${ICEBERG_DIR}" \
+            && rm -f iceberg_data.zip \
+            && wget -P "${ROOT}"/docker-compose/iceberg 
https://"${s3BucketName}.${s3Endpoint}"/regression/datalake/pipeline_data/iceberg_data.zip
 \
+            && sudo unzip iceberg_data.zip \
+            && sudo mv iceberg_data data \
+            && sudo rm -rf iceberg_data.zip
+            cd -
+        else
+            echo "${ICEBERG_DIR}/data exist, continue !"
+        fi
+
         sudo docker compose -f "${ROOT}"/docker-compose/iceberg/iceberg.yaml 
--env-file "${ROOT}"/docker-compose/iceberg/iceberg.env up -d
     fi
 fi
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
index ab4bb8eac9b..2bcd095e037 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
@@ -121,6 +121,11 @@ public class PaimonExternalTable extends ExternalTable {
                     if (scale > 6) {
                         scale = 6;
                     }
+                } else if (dataType instanceof 
org.apache.paimon.types.LocalZonedTimestampType) {
+                    scale = ((org.apache.paimon.types.LocalZonedTimestampType) 
dataType).getPrecision();
+                    if (scale > 6) {
+                        scale = 6;
+                    }
                 }
                 return ScalarType.createDatetimeV2Type(scale);
             case ARRAY:
diff --git 
a/regression-test/data/external_table_p2/iceberg/iceberg_complex_type.out 
b/regression-test/data/external_table_p0/iceberg/iceberg_complex_type.out
similarity index 100%
rename from 
regression-test/data/external_table_p2/iceberg/iceberg_complex_type.out
rename to 
regression-test/data/external_table_p0/iceberg/iceberg_complex_type.out
diff --git 
a/regression-test/data/external_table_p2/iceberg/iceberg_partition_upper_case_nereids.out
 
b/regression-test/data/external_table_p0/iceberg/iceberg_partition_upper_case_nereids.out
similarity index 100%
rename from 
regression-test/data/external_table_p2/iceberg/iceberg_partition_upper_case_nereids.out
rename to 
regression-test/data/external_table_p0/iceberg/iceberg_partition_upper_case_nereids.out
diff --git 
a/regression-test/data/external_table_p2/iceberg/iceberg_schema_change.out 
b/regression-test/data/external_table_p0/iceberg/iceberg_schema_change.out
similarity index 100%
rename from 
regression-test/data/external_table_p2/iceberg/iceberg_schema_change.out
rename to 
regression-test/data/external_table_p0/iceberg/iceberg_schema_change.out
diff --git 
a/regression-test/data/external_table_p2/iceberg/test_external_catalog_iceberg_common.out
 
b/regression-test/data/external_table_p0/iceberg/test_external_catalog_iceberg_common.out
similarity index 100%
rename from 
regression-test/data/external_table_p2/iceberg/test_external_catalog_iceberg_common.out
rename to 
regression-test/data/external_table_p0/iceberg/test_external_catalog_iceberg_common.out
diff --git 
a/regression-test/data/external_table_p2/iceberg/test_external_catalog_iceberg_partition.out
 
b/regression-test/data/external_table_p0/iceberg/test_external_catalog_iceberg_partition.out
similarity index 100%
rename from 
regression-test/data/external_table_p2/iceberg/test_external_catalog_iceberg_partition.out
rename to 
regression-test/data/external_table_p0/iceberg/test_external_catalog_iceberg_partition.out
diff --git 
a/regression-test/data/external_table_p0/iceberg/test_iceberg_predicate_conversion.out
 
b/regression-test/data/external_table_p0/iceberg/test_iceberg_predicate_conversion.out
new file mode 100644
index 00000000000..a5e2065a9c3
--- /dev/null
+++ 
b/regression-test/data/external_table_p0/iceberg/test_iceberg_predicate_conversion.out
@@ -0,0 +1,29 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !q01 --
+2023-03-08
+2023-03-09
+
+-- !q02 --
+1996-05-06
+1996-05-06
+1997-05-18
+1997-05-18
+
+-- !q03 --
+1996-05-06     MAIL
+1997-05-18     MAIL
+
+-- !q04 --
+1996-05-01     MAI
+1996-05-06     MAI
+1996-05-06     MAIL
+1997-05-18     MAI
+1997-05-18     MAIL
+1997-05-19     MAI
+
+-- !q05 --
+2023-03-07T20:35:59.123456
+2023-03-07T20:35:59.123456
+2023-03-07T20:36
+2023-03-07T20:37:59
+
diff --git 
a/regression-test/data/external_table_p2/paimon/paimon_base_filesystem.out 
b/regression-test/data/external_table_p0/paimon/paimon_base_filesystem.out
similarity index 100%
rename from 
regression-test/data/external_table_p2/paimon/paimon_base_filesystem.out
rename to 
regression-test/data/external_table_p0/paimon/paimon_base_filesystem.out
diff --git 
a/regression-test/data/external_table_p2/iceberg/iceberg_partition_upper_case.out
 
b/regression-test/data/external_table_p2/iceberg/iceberg_partition_upper_case.out
deleted file mode 100644
index 376a9495b00..00000000000
--- 
a/regression-test/data/external_table_p2/iceberg/iceberg_partition_upper_case.out
+++ /dev/null
@@ -1,145 +0,0 @@
--- This file is automatically generated. You should know what you did if you 
want to edit this
--- !orcupper1 --
-1      k2_1    k3_1    Beijing
-2      k2_2    k3_2    Beijing
-3      k2_3    k3_3    Shanghai
-4      k2_4    k3_4    Shanghai
-
--- !orcupper2 --
-1      Beijing
-2      Beijing
-3      Shanghai
-4      Shanghai
-
--- !orcupper3 --
-1      k2_1
-2      k2_2
-3      k2_3
-4      k2_4
-
--- !orcupper4 --
-Beijing
-Beijing
-Shanghai
-Shanghai
-
--- !orcupper5 --
-2      k2_2    k3_2    Beijing
-
--- !orcupper6 --
-1      k2_1    k3_1    Beijing
-
--- !orcupper7 --
-1      k2_1    k3_1    Beijing
-2      k2_2    k3_2    Beijing
-
--- !orclower1 --
-1      k2_1    k3_1    Beijing
-2      k2_2    k3_2    Beijing
-3      k2_3    k3_3    Shanghai
-4      k2_4    k3_4    Shanghai
-
--- !orclower2 --
-1      Beijing
-2      Beijing
-3      Shanghai
-4      Shanghai
-
--- !orclower3 --
-1      k2_1
-2      k2_2
-3      k2_3
-4      k2_4
-
--- !orclower4 --
-Beijing
-Beijing
-Shanghai
-Shanghai
-
--- !orclower5 --
-2      k2_2    k3_2    Beijing
-
--- !orclower6 --
-1      k2_1    k3_1    Beijing
-
--- !orclower7 --
-1      k2_1    k3_1    Beijing
-2      k2_2    k3_2    Beijing
-
--- !parquetupper1 --
-1      k2_1    k3_1    Beijing
-2      k2_2    k3_2    Beijing
-3      k2_3    k3_3    Shanghai
-4      k2_4    k3_4    Shanghai
-
--- !parquetupper2 --
-1      Beijing
-2      Beijing
-3      Shanghai
-4      Shanghai
-
--- !parquetupper3 --
-1      k2_1
-2      k2_2
-3      k2_3
-4      k2_4
-
--- !parquetupper4 --
-Beijing
-Beijing
-Shanghai
-Shanghai
-
--- !parquetupper5 --
-2      k2_2    k3_2    Beijing
-
--- !parquetupper6 --
-3      k2_3    k3_3    Shanghai
-4      k2_4    k3_4    Shanghai
-
--- !parquetupper7 --
-1      k2_1    k3_1    Beijing
-
--- !parquetupper8 --
-1      k2_1    k3_1    Beijing
-2      k2_2    k3_2    Beijing
-
--- !parquetlower1 --
-1      k2_1    k3_1    Beijing
-2      k2_2    k3_2    Beijing
-3      k2_3    k3_3    Shanghai
-4      k2_4    k3_4    Shanghai
-
--- !parquetlower2 --
-1      Beijing
-2      Beijing
-3      Shanghai
-4      Shanghai
-
--- !parquetlower3 --
-1      k2_1
-2      k2_2
-3      k2_3
-4      k2_4
-
--- !parquetlower4 --
-Beijing
-Beijing
-Shanghai
-Shanghai
-
--- !parquetlower5 --
-2      k2_2    k3_2    Beijing
-
--- !parquetlower6 --
-3      k2_3    k3_3    Shanghai
-4      k2_4    k3_4    Shanghai
-
--- !parquetupper7 --
-1      k2_1    k3_1    Beijing
-
--- !parquetupper8 --
-1      k2_1    k3_1    Beijing
-2      k2_2    k3_2    Beijing
-
diff --git 
a/regression-test/data/external_table_p2/iceberg/iceberg_schema_evolution.out 
b/regression-test/data/external_table_p2/iceberg/iceberg_schema_evolution.out
deleted file mode 100644
index dba805ca6d1..00000000000
--- 
a/regression-test/data/external_table_p2/iceberg/iceberg_schema_evolution.out
+++ /dev/null
@@ -1,79 +0,0 @@
--- This file is automatically generated. You should know what you did if you 
want to edit this
--- !rename1 --
-1      orig2_1 orig3_1
-2      orig2_2 orig3_2
-3      orig2_3 orig3_3
-4      orig2_4 rename3_1
-5      orig2_5 rename3_2
-6      orig2_6 rename3_3
-
--- !rename2 --
-3      orig2_3 orig3_3
-4      orig2_4 rename3_1
-
--- !drop1 --
-1      orig3_1
-2      orig3_2
-3      orig3_3
-4      orig3_4
-5      orig3_5
-6      orig3_6
-
--- !drop2 --
-1      orig3_1
-2      orig3_2
-3      orig3_3
-
--- !drop3 --
-4      orig3_4
-5      orig3_5
-6      orig3_6
-
--- !add1 --
-1      orig2_1 orig3_1 \N
-2      orig2_2 orig3_2 \N
-3      orig2_3 orig3_3 \N
-4      orig2_4 orig3_4 add1_1
-5      orig2_5 orig3_5 add1_2
-6      orig2_6 orig3_6 add1_3
-
--- !add2 --
-2      orig2_2 orig3_2 \N
-
--- !add3 --
-5      orig2_5 orig3_5 add1_2
-
--- !reorder1 --
-1      orig3_1 orig2_1
-2      orig3_2 orig2_2
-3      orig3_3 orig2_3
-4      orig3_4 orig2_4
-5      orig3_5 orig2_5
-6      orig3_6 orig2_6
-
--- !reorder2 --
-2      orig3_2 orig2_2
-
--- !reorder3 --
-5      orig3_5 orig2_5
-
--- !readd1 --
-1      orig2_1 \N
-2      orig2_2 \N
-3      orig2_3 \N
-4      orig2_4 orig3_4
-5      orig2_5 orig3_5
-6      orig2_6 orig3_6
-
--- !readd2 --
-1      orig2_1 \N
-2      orig2_2 \N
-3      orig2_3 \N
-4      orig2_4 orig3_4
-
--- !readd3 --
-3      orig2_3 \N
-4      orig2_4 orig3_4
-5      orig2_5 orig3_5
-6      orig2_6 orig3_6
-
diff --git 
a/regression-test/data/external_table_p2/iceberg/iceberg_schema_evolution_iceberg_catalog.out
 
b/regression-test/data/external_table_p2/iceberg/iceberg_schema_evolution_iceberg_catalog.out
deleted file mode 100644
index dba805ca6d1..00000000000
--- 
a/regression-test/data/external_table_p2/iceberg/iceberg_schema_evolution_iceberg_catalog.out
+++ /dev/null
@@ -1,79 +0,0 @@
--- This file is automatically generated. You should know what you did if you 
want to edit this
--- !rename1 --
-1      orig2_1 orig3_1
-2      orig2_2 orig3_2
-3      orig2_3 orig3_3
-4      orig2_4 rename3_1
-5      orig2_5 rename3_2
-6      orig2_6 rename3_3
-
--- !rename2 --
-3      orig2_3 orig3_3
-4      orig2_4 rename3_1
-
--- !drop1 --
-1      orig3_1
-2      orig3_2
-3      orig3_3
-4      orig3_4
-5      orig3_5
-6      orig3_6
-
--- !drop2 --
-1      orig3_1
-2      orig3_2
-3      orig3_3
-
--- !drop3 --
-4      orig3_4
-5      orig3_5
-6      orig3_6
-
--- !add1 --
-1      orig2_1 orig3_1 \N
-2      orig2_2 orig3_2 \N
-3      orig2_3 orig3_3 \N
-4      orig2_4 orig3_4 add1_1
-5      orig2_5 orig3_5 add1_2
-6      orig2_6 orig3_6 add1_3
-
--- !add2 --
-2      orig2_2 orig3_2 \N
-
--- !add3 --
-5      orig2_5 orig3_5 add1_2
-
--- !reorder1 --
-1      orig3_1 orig2_1
-2      orig3_2 orig2_2
-3      orig3_3 orig2_3
-4      orig3_4 orig2_4
-5      orig3_5 orig2_5
-6      orig3_6 orig2_6
-
--- !reorder2 --
-2      orig3_2 orig2_2
-
--- !reorder3 --
-5      orig3_5 orig2_5
-
--- !readd1 --
-1      orig2_1 \N
-2      orig2_2 \N
-3      orig2_3 \N
-4      orig2_4 orig3_4
-5      orig2_5 orig3_5
-6      orig2_6 orig3_6
-
--- !readd2 --
-1      orig2_1 \N
-2      orig2_2 \N
-3      orig2_3 \N
-4      orig2_4 orig3_4
-
--- !readd3 --
-3      orig2_3 \N
-4      orig2_4 orig3_4
-5      orig2_5 orig3_5
-6      orig2_6 orig3_6
-
diff --git 
a/regression-test/data/external_table_p2/iceberg/test_external_catalog_icebergv2.out
 
b/regression-test/data/external_table_p2/iceberg/test_external_catalog_icebergv2.out
deleted file mode 100644
index a6e68d2f621..00000000000
--- 
a/regression-test/data/external_table_p2/iceberg/test_external_catalog_icebergv2.out
+++ /dev/null
@@ -1,74 +0,0 @@
--- This file is automatically generated. You should know what you did if you 
want to edit this
--- !q01 --
-149988
-
--- !q02 --
-1
-3
-4
-7
-
--- !q03 --
-8242263
-
--- !q04 --
-0
-
--- !q05 --
-1      Customer#000000001      IVhzIApeRb ot,c,E       15      25-989-741-2988 
711.56  BUILDING        to the even, regular platelets. regular, ironic 
epitaphs nag e
-3      Customer#000000003      MG9kdTD2WBHm    1       11-719-748-3364 7498.12 
AUTOMOBILE       deposits eat slyly ironic, even instructions. express foxes 
detect slyly. blithely even accounts abov
-4      Customer#000000004      XxVSJsLAGtn     4       14-128-190-5944 2866.83 
MACHINERY        requests. final, regular ideas sleep final accou
-
--- !q06 --
-604519555
-604519557
-604519558
-
--- !q07 --
-12979.65
-219204.52
-5908.20
-
--- !q08 --
-120001848
-
--- !q09 --
-1
-2
-3
-
--- !q10 --
-150000000
-149999999
-149999996
-
--- !q11 --
-1
-2
-3
-
--- !q12 --
-150000000
-149999999
-149999996
-
--- !q13 --
-1
-4
-7
-
--- !q14 --
-Customer#000000004
-Customer#000000007
-
--- !q15 --
-150000
-
--- !q16 --
-150000
-
--- !q17 --
-150000
-
--- !q18 --
-150000
diff --git 
a/regression-test/data/external_table_p2/iceberg/test_iceberg_predicate_conversion.out
 
b/regression-test/data/external_table_p2/iceberg/test_iceberg_predicate_conversion.out
deleted file mode 100644
index 0569e0d01d8..00000000000
--- 
a/regression-test/data/external_table_p2/iceberg/test_iceberg_predicate_conversion.out
+++ /dev/null
@@ -1,611 +0,0 @@
--- This file is automatically generated. You should know what you did if you 
want to edit this
--- !q01 --
-11801003       35210325
-
--- !q02 --
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1996-05-06
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-1997-05-18
-
--- !q03 --
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1996-05-06     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-1997-05-18     MAIL
-
--- !q04 --
-1992-01-02     REG AIR
-1992-01-02     SHIP
-1992-01-03     REG AIR
-1992-01-03     TRUCK
-1992-01-04     AIR
-1992-01-04     FOB
-1992-01-04     RAIL
-1992-01-04     REG AIR
-1992-01-04     TRUCK
-1992-01-05     AIR
-
--- !q04 --
-2023-03-07T20:35:59.064
-2023-03-07T20:35:59.087
-2023-03-07T20:35:59.110
-2023-03-07T20:35:59.129
-2023-03-07T20:35:59.224
-
diff --git 
a/regression-test/data/external_table_p2/paimon/paimon_base_types.out 
b/regression-test/data/external_table_p2/paimon/paimon_base_types.out
deleted file mode 100644
index 59d953b7218..00000000000
--- a/regression-test/data/external_table_p2/paimon/paimon_base_types.out
+++ /dev/null
@@ -1,56 +0,0 @@
--- This file is automatically generated. You should know what you did if you 
want to edit this
--- !all --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-10     20      30      40      50      60      70      80      90.1    100.1   
110.10  2020-03-02      130str  140varchar      b       false   bbbb    
2023-08-14T08:32:52.821
-
--- !c1 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c2 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c3 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c4 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c5 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c6 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c7 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c8 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c9 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c10 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c11 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c12 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c13 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c14 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c15 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c16 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
--- !c18 --
-1      2       3       4       5       6       7       8       9.1     10.1    
11.10   2020-02-02      13str   14varchar       a       true    aaaa    
2023-08-13T09:32:38.530
-
diff --git 
a/regression-test/data/external_table_p2/paimon/paimon_timestamp_types.out 
b/regression-test/data/external_table_p2/paimon/paimon_timestamp_types.out
deleted file mode 100644
index 641424b160e..00000000000
--- a/regression-test/data/external_table_p2/paimon/paimon_timestamp_types.out
+++ /dev/null
@@ -1,13 +0,0 @@
--- This file is automatically generated. You should know what you did if you 
want to edit this
--- !c1 --
-1      5432-08-30T05:43:21.100 5432-08-30T05:43:21.120 5432-08-30T05:43:21.123 
5432-08-30T05:43:21.123400      5432-08-30T05:43:21.123450      
5432-08-30T05:43:21.123456      5432-08-30T05:43:21.123456      
5432-08-30T05:43:21.123456      5432-08-30T05:43:21.123456
-
--- !c2 --
-1      5432-08-30T05:43:21.100 5432-08-30T05:43:21.120 5432-08-30T05:43:21.123 
5432-08-30T05:43:21.123400      5432-08-30T05:43:21.123450      
5432-08-30T05:43:21.123456      5432-08-30T05:43:21.123456      
5432-08-30T05:43:21.123456      5432-08-30T05:43:21.123456
-
--- !c3 --
-1      5432-08-30T05:43:21.100 5432-08-30T05:43:21.120 5432-08-30T05:43:21.123 
5432-08-30T05:43:21.123400      5432-08-30T05:43:21.123450      
5432-08-30T05:43:21.123456      5432-08-30T05:43:21.123456      
5432-08-30T05:43:21.123456      5432-08-30T05:43:21.123456
-
--- !c4 --
-1      5432-08-30T05:43:21.100 5432-08-30T05:43:21.120 5432-08-30T05:43:21.123 
5432-08-30T05:43:21.123400      5432-08-30T05:43:21.123450      
5432-08-30T05:43:21.123456      5432-08-30T05:43:21.123456      
5432-08-30T05:43:21.123456      5432-08-30T05:43:21.123456
-
diff --git a/regression-test/pipeline/external/conf/regression-conf.groovy 
b/regression-test/pipeline/external/conf/regression-conf.groovy
index f7743ce858e..6c57fc9f89c 100644
--- a/regression-test/pipeline/external/conf/regression-conf.groovy
+++ b/regression-test/pipeline/external/conf/regression-conf.groovy
@@ -109,6 +109,11 @@ hive3HdfsPort=8320
 hive3ServerPort=13000
 hive3PgPort=5732
 
+// iceberg test config
+iceberg_rest_uri_port=18181
+iceberg_minio_port=19001
+enableIcebergTest=true
+
 enableEsTest=true
 es_5_port=59200
 es_6_port="19200/"
diff --git 
a/regression-test/suites/external_table_p0/iceberg/iceberg_complex_type.groovy 
b/regression-test/suites/external_table_p0/iceberg/iceberg_complex_type.groovy
new file mode 100644
index 00000000000..7fe03e2bc66
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/iceberg/iceberg_complex_type.groovy
@@ -0,0 +1,94 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("iceberg_complex_type", 
"p0,external,doris,external_docker,external_docker_doris") {
+
+    String enabled = context.config.otherConfigs.get("enableIcebergTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable iceberg test.")
+        return
+    }
+
+    String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
+    String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+    String catalog_name = "iceberg_complex_type"
+
+    sql """drop catalog if exists ${catalog_name}"""
+    sql """
+    CREATE CATALOG ${catalog_name} PROPERTIES (
+        'type'='iceberg',
+        'iceberg.catalog.type'='rest',
+        'uri' = 'http://${externalEnvIp}:${rest_port}',
+        "s3.access_key" = "admin",
+        "s3.secret_key" = "password",
+        "s3.endpoint" = "http://${externalEnvIp}:${minio_port}";,
+        "s3.region" = "us-east-1"
+    );"""
+
+    logger.info("catalog " + catalog_name + " created")
+    sql """switch ${catalog_name};"""
+    logger.info("switched to catalog " + catalog_name)
+    sql """ use multi_catalog;""" 
+
+    qt_parquet_v1_1  """ desc complex_parquet_v1 ;""" 
+    qt_parquet_v1_2  """ select * from  complex_parquet_v1 order by id; """ 
+    qt_parquet_v1_3  """ select count(*) from  complex_parquet_v1 ;"""
+    qt_parquet_v1_4  """ select array_size(col2) from  complex_parquet_v1 
where col2 is not null   order by id ; """ 
+    qt_parquet_v1_5  """ select map_keys(col3) from  complex_parquet_v1  order 
by id; """ 
+    qt_parquet_v1_6  """ select struct_element(col4, 1) from  
complex_parquet_v1  where id >=7 order by id; """ 
+    qt_parquet_v1_7  """ select id,count(col2) from  complex_parquet_v1  group 
by id order by id desc limit 2; """ 
+
+
+    qt_parquet_v2_1  """ desc complex_parquet_v2 ;""" 
+    qt_parquet_v2_2  """ select * from  complex_parquet_v2 order by id; """ 
+    qt_parquet_v2_3  """ select count(*) from  complex_parquet_v2 ;"""
+    qt_parquet_v2_4  """ select array_size(col2) from  complex_parquet_v2 
where col2 is not null   order by id ; """ 
+    qt_parquet_v2_5  """ select map_keys(col3) from  complex_parquet_v2  order 
by id; """ 
+    qt_parquet_v2_6  """ select struct_element(col4, 1) from  
complex_parquet_v2  where id >=7 order by id; """ 
+    qt_parquet_v2_7  """ select id,count(col2) from  complex_parquet_v2  group 
by id order by id desc limit 2; """ 
+
+
+    qt_orc_v1_1  """ desc complex_orc_v1 ;""" 
+    qt_orc_v1_2  """ select * from  complex_orc_v1 order by id; """ 
+    qt_orc_v1_3  """ select count(*) from  complex_orc_v1 ;"""
+    qt_orc_v1_4  """ select array_size(col2) from  complex_orc_v1 where col2 
is not null   order by id ; """ 
+    qt_orc_v1_5  """ select map_keys(col3) from  complex_orc_v1  order by id; 
""" 
+    qt_orc_v1_6  """ select struct_element(col4, 1) from  complex_orc_v1  
where id >=7 order by id; """ 
+    qt_orc_v1_7  """ select id,count(col2) from  complex_orc_v1  group by id 
order by id desc limit 2; """ 
+
+
+    qt_orc_v2_1  """ desc complex_orc_v2 ;""" 
+    qt_orc_v2_2  """ select * from  complex_orc_v2 order by id; """ 
+    qt_orc_v2_3  """ select count(*) from  complex_orc_v2 ;"""
+    qt_orc_v2_4  """ select array_size(col2) from  complex_orc_v2 where col2 
is not null   order by id ; """ 
+    qt_orc_v2_5  """ select map_keys(col3) from  complex_orc_v2  order by id; 
""" 
+    qt_orc_v2_6  """ select struct_element(col4, 1) from  complex_orc_v2  
where id >=7 order by id; """ 
+    qt_orc_v2_7  """ select id,count(col2) from  complex_orc_v2  group by id 
order by id desc limit 2; """ 
+
+}
+
+/*
+schema :
+    id                      int                                         
+    col2                    array<array<array<array<array<int>>>>>             
         
+    col3                    map<array<float>,map<int,map<int,float>>>          
                 
+    col4                    
struct<x:array<int>,y:array<double>,z:map<boolean,string>>                      
    
+    col5                    
map<int,map<int,map<int,map<int,map<float,map<double,struct<x:int,y:array<double>>>>>>>>
                            
+    col6                    
struct<xx:array<int>,yy:array<map<double,float>>,zz:struct<xxx:struct<xxxx:struct<xxxxx:decimal(13,2)>>>>
+
+*/
\ No newline at end of file
diff --git 
a/regression-test/suites/external_table_p2/iceberg/iceberg_partition_upper_case_nereids.groovy
 
b/regression-test/suites/external_table_p0/iceberg/iceberg_partition_upper_case_nereids.groovy
similarity index 79%
rename from 
regression-test/suites/external_table_p2/iceberg/iceberg_partition_upper_case_nereids.groovy
rename to 
regression-test/suites/external_table_p0/iceberg/iceberg_partition_upper_case_nereids.groovy
index 5293b2800e9..b95483f17d0 100644
--- 
a/regression-test/suites/external_table_p2/iceberg/iceberg_partition_upper_case_nereids.groovy
+++ 
b/regression-test/suites/external_table_p0/iceberg/iceberg_partition_upper_case_nereids.groovy
@@ -15,7 +15,7 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("iceberg_partition_upper_case_nereids", 
"p2,external,iceberg,external_remote,external_remote_iceberg") {
+suite("iceberg_partition_upper_case_nereids", 
"p0,external,doris,external_docker,external_docker_doris") {
     def orc_upper1 = """select * from iceberg_partition_upper_case_orc order 
by k1;"""
     def orc_upper2 = """select k1, city from iceberg_partition_upper_case_orc 
order by k1;"""
     def orc_upper3 = """select k1, k2 from iceberg_partition_upper_case_orc 
order by k1;"""
@@ -40,18 +40,30 @@ suite("iceberg_partition_upper_case_nereids", 
"p2,external,iceberg,external_remo
     def parquet_lower4 = """select city from 
iceberg_partition_lower_case_parquet order by city;"""
     def parquet_lower5 = """select * from iceberg_partition_lower_case_parquet 
where k1>1 and city='Beijing' order by k1;"""
 
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "iceberg_partition_nereids"
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
+    String enabled = context.config.otherConfigs.get("enableIcebergTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable iceberg test.")
+        return
+    }
+
+    String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
+    String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+    String catalog_name = "iceberg_partition_upper_case_nereids"
+
+    sql """drop catalog if exists ${catalog_name}"""
+    sql """
+    CREATE CATALOG ${catalog_name} PROPERTIES (
+        'type'='iceberg',
+        'iceberg.catalog.type'='rest',
+        'uri' = 'http://${externalEnvIp}:${rest_port}',
+        "s3.access_key" = "admin",
+        "s3.secret_key" = "password",
+        "s3.endpoint" = "http://${externalEnvIp}:${minio_port}";,
+        "s3.region" = "us-east-1"
+    );"""
+
+
         logger.info("catalog " + catalog_name + " created")
         sql """switch ${catalog_name};"""
         logger.info("switched to catalog " + catalog_name)
@@ -79,6 +91,5 @@ suite("iceberg_partition_upper_case_nereids", 
"p2,external,iceberg,external_remo
         qt_parquetlower4 parquet_lower4
         qt_parquetlower5 parquet_lower5
 
-    }
 }
 
diff --git 
a/regression-test/suites/external_table_p2/iceberg/iceberg_schema_change.groovy 
b/regression-test/suites/external_table_p0/iceberg/iceberg_schema_change.groovy
similarity index 89%
rename from 
regression-test/suites/external_table_p2/iceberg/iceberg_schema_change.groovy
rename to 
regression-test/suites/external_table_p0/iceberg/iceberg_schema_change.groovy
index 5e036683595..12e15736779 100644
--- 
a/regression-test/suites/external_table_p2/iceberg/iceberg_schema_change.groovy
+++ 
b/regression-test/suites/external_table_p0/iceberg/iceberg_schema_change.groovy
@@ -15,22 +15,33 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("iceberg_schema_change", 
"p2,external,iceberg,external_remote,external_remote_iceberg") {
-
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-
-        String catalog_name = "test_external_iceberg_schema_change"
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHdfsPort = context.config.otherConfigs.get("extHdfsPort")
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                'type'='iceberg',
-                'iceberg.catalog.type'='hadoop',
-                'warehouse' = 
'hdfs://${extHiveHmsHost}:${extHdfsPort}/usr/hive/warehouse/hadoop_catalog'
-            );
-        """
+suite("iceberg_schema_change", 
"p0,external,doris,external_docker,external_docker_doris") {
+
+    String enabled = context.config.otherConfigs.get("enableIcebergTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable iceberg test.")
+        return
+    }
+
+    // TODO 找当时的人看下怎么构造的这个表
+    return
+
+    String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
+    String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+    String catalog_name = "iceberg_schema_change"
+
+    sql """drop catalog if exists ${catalog_name}"""
+    sql """
+    CREATE CATALOG ${catalog_name} PROPERTIES (
+        'type'='iceberg',
+        'iceberg.catalog.type'='rest',
+        'uri' = 'http://${externalEnvIp}:${rest_port}',
+        "s3.access_key" = "admin",
+        "s3.secret_key" = "password",
+        "s3.endpoint" = "http://${externalEnvIp}:${minio_port}";,
+        "s3.region" = "us-east-1"
+    );"""
 
         logger.info("catalog " + catalog_name + " created")
         sql """switch ${catalog_name};"""
@@ -38,9 +49,6 @@ suite("iceberg_schema_change", 
"p2,external,iceberg,external_remote,external_rem
         sql """ use multi_catalog;""" 
 
 
-
-
-
         qt_parquet_v1_1  """ desc complex_parquet_v1_schema_change ;""" 
         qt_parquet_v1_2  """ select * from  complex_parquet_v1_schema_change 
order by id; """ 
         qt_parquet_v1_3  """ select count(*) from  
complex_parquet_v1_schema_change ;""" 
@@ -92,9 +100,6 @@ suite("iceberg_schema_change", 
"p2,external,iceberg,external_remote,external_rem
         qt_orc_v2_9  """ select id,count(col_add) from  
complex_orc_v2_schema_change  group by id order by id desc ; """ 
         qt_orc_v2_10  """ select col_add from  complex_orc_v2_schema_change 
where col_add -1 = col_add2 order by id; """ 
 
-
-
-    }
 }
 /*
 before schema: 
diff --git 
a/regression-test/suites/external_table_p2/iceberg/test_external_catalog_iceberg_common.groovy
 
b/regression-test/suites/external_table_p0/iceberg/test_external_catalog_iceberg_common.groovy
similarity index 58%
rename from 
regression-test/suites/external_table_p2/iceberg/test_external_catalog_iceberg_common.groovy
rename to 
regression-test/suites/external_table_p0/iceberg/test_external_catalog_iceberg_common.groovy
index 577a4e6702a..48cbeb222f8 100644
--- 
a/regression-test/suites/external_table_p2/iceberg/test_external_catalog_iceberg_common.groovy
+++ 
b/regression-test/suites/external_table_p0/iceberg/test_external_catalog_iceberg_common.groovy
@@ -15,20 +15,30 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("test_external_catalog_iceberg_common", 
"p2,external,iceberg,external_remote,external_remote_iceberg") {
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "test_external_catalog_iceberg_partition"
+suite("test_external_catalog_iceberg_common", 
"p0,external,doris,external_docker,external_docker_doris") {
+    String enabled = context.config.otherConfigs.get("enableIcebergTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable iceberg test.")
+        return
+    }
+
+    String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
+    String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+    String catalog_name = "test_external_catalog_iceberg_common"
+
+    sql """drop catalog if exists ${catalog_name}"""
+    sql """
+    CREATE CATALOG ${catalog_name} PROPERTIES (
+        'type'='iceberg',
+        'iceberg.catalog.type'='rest',
+        'uri' = 'http://${externalEnvIp}:${rest_port}',
+        "s3.access_key" = "admin",
+        "s3.secret_key" = "password",
+        "s3.endpoint" = "http://${externalEnvIp}:${minio_port}";,
+        "s3.region" = "us-east-1"
+    );"""
 
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
 
         sql """switch ${catalog_name};"""
         // test parquet format
@@ -44,10 +54,10 @@ suite("test_external_catalog_iceberg_common", 
"p2,external,iceberg,external_remo
                         ) as dc_1;
                     """
         }
-        sql """ use `iceberg_catalog`; """
-        q01_parquet()
+        sql """ use `multi_catalog`; """
+        // TODO support table:lineitem later
+        // q01_parquet()  // 599715
 
         // test the special characters in table fields
         qt_sanitize_mara """select MaTnR, NtgEW, `/dsd/Sv_cnt_grP` from 
sanitize_mara order by mAtNr"""
-    }
 }
diff --git 
a/regression-test/suites/external_table_p0/iceberg/test_external_catalog_iceberg_partition.groovy
 
b/regression-test/suites/external_table_p0/iceberg/test_external_catalog_iceberg_partition.groovy
new file mode 100644
index 00000000000..8aa305893ff
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/iceberg/test_external_catalog_iceberg_partition.groovy
@@ -0,0 +1,92 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_external_catalog_iceberg_partition", 
"p0,external,doris,external_docker,external_docker_doris") {
+    String enabled = context.config.otherConfigs.get("enableIcebergTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable iceberg test.")
+        return
+    }
+
+    String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
+    String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+    String catalog_name = "test_external_catalog_iceberg_partition"
+
+    sql """drop catalog if exists ${catalog_name}"""
+    sql """
+    CREATE CATALOG ${catalog_name} PROPERTIES (
+        'type'='iceberg',
+        'iceberg.catalog.type'='rest',
+        'uri' = 'http://${externalEnvIp}:${rest_port}',
+        "s3.access_key" = "admin",
+        "s3.secret_key" = "password",
+        "s3.endpoint" = "http://${externalEnvIp}:${minio_port}";,
+        "s3.region" = "us-east-1"
+    );"""
+
+        sql """switch ${catalog_name};"""
+        sql """ use multi_catalog; """
+        // test parquet format
+        def q01_parquet = {
+            qt_q01 """ select * from parquet_partitioned_one_column order by 
t_float """
+            qt_q02 """ select * from parquet_partitioned_one_column where 
t_int is null order by t_float """
+            qt_q03 """ select * from parquet_partitioned_one_column where 
t_int is not null order by t_float """
+            qt_q04 """ select * from parquet_partitioned_columns order by 
t_float """
+            qt_q05 """ select * from parquet_partitioned_columns where t_int 
is null order by t_float """
+            qt_q06 """ select * from parquet_partitioned_columns where t_int 
is not null order by t_float """
+            qt_q07 """ select * from parquet_partitioned_truncate_and_fields 
order by t_float """
+            qt_q08 """ select * from parquet_partitioned_truncate_and_fields 
where t_int is null order by t_float """
+            qt_q09 """ select * from parquet_partitioned_truncate_and_fields 
where t_int is not null order by t_float """
+        }
+        // test orc format
+        def q01_orc = {
+            qt_q01 """ select * from orc_partitioned_one_column order by 
t_float """
+            qt_q02 """ select * from orc_partitioned_one_column where t_int is 
null order by t_float """
+            qt_q03 """ select * from orc_partitioned_one_column where t_int is 
not null order by t_float """
+            qt_q04 """ select * from orc_partitioned_columns order by t_float 
"""
+            qt_q05 """ select * from orc_partitioned_columns where t_int is 
null order by t_float """
+            qt_q06 """ select * from orc_partitioned_columns where t_int is 
not null order by t_float """
+            qt_q07 """ select * from orc_partitioned_truncate_and_fields order 
by t_float """
+            qt_q08 """ select * from orc_partitioned_truncate_and_fields where 
t_int is null order by t_float """
+            qt_q09 """ select * from orc_partitioned_truncate_and_fields where 
t_int is not null order by t_float """
+        }
+
+        // test date for partition and predict
+        def q01_date = {
+
+            qt_q01 """ select * from user_case_date_without_partition where d 
= '2020-01-02' """
+            qt_q02 """ select * from user_case_date_without_partition where d 
> '2020-01-01' """
+            qt_q03 """ select * from user_case_date_without_partition where d 
< '2020-01-03' """
+            qt_q04 """ select * from user_case_date_without_partition where ts 
< '2020-01-03' """
+            qt_q05 """ select * from user_case_date_without_partition where ts 
> '2020-01-01' """
+
+            qt_q06 """ select * from user_case_date_with_date_partition where 
d = '2020-01-02' """
+            qt_q07 """ select * from user_case_date_with_date_partition where 
d < '2020-01-03' """
+            qt_q08 """ select * from user_case_date_with_date_partition where 
d > '2020-01-01' """
+
+            qt_q09 """ select * from user_case_date_with_days_date_partition 
where d = '2020-01-02' """
+            qt_q10 """ select * from user_case_date_with_days_date_partition 
where d < '2020-01-03' """
+            qt_q11 """ select * from user_case_date_with_days_date_partition 
where d > '2020-01-01' """
+
+        }
+
+        q01_parquet()
+        q01_orc()
+        q01_date()
+}
+
diff --git 
a/regression-test/suites/external_table_p0/iceberg/test_iceberg_filter.groovy 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_filter.groovy
index b87fb8a34d2..7e654175f9c 100644
--- 
a/regression-test/suites/external_table_p0/iceberg/test_iceberg_filter.groovy
+++ 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_filter.groovy
@@ -36,19 +36,8 @@ suite("test_iceberg_filter", 
"p0,external,doris,external_docker,external_docker_
                 );"""
 
             sql """ switch ${catalog_name} """
-            sql """ create database if not exists ${catalog_name} """
-            sql """ use ${catalog_name} """
-
+            sql """ use multi_catalog """
             String tb_ts_filter = "tb_ts_filter";
-            sql """ drop table if exists ${tb_ts_filter} """
-            sql """ create table ${tb_ts_filter} (id int, ts datetime)"""
-            sql """ insert into ${tb_ts_filter} values (1, '2024-05-30 
20:34:56') """
-            sql """ insert into ${tb_ts_filter} values (2, '2024-05-30 
20:34:56.1') """
-            sql """ insert into ${tb_ts_filter} values (3, '2024-05-30 
20:34:56.12') """
-            sql """ insert into ${tb_ts_filter} values (4, '2024-05-30 
20:34:56.123') """
-            sql """ insert into ${tb_ts_filter} values (5, '2024-05-30 
20:34:56.1234') """
-            sql """ insert into ${tb_ts_filter} values (6, '2024-05-30 
20:34:56.12345') """
-            sql """ insert into ${tb_ts_filter} values (7, '2024-05-30 
20:34:56.123456') """
 
             qt_qt01 """ select * from ${tb_ts_filter} order by id """
             qt_qt02 """ select * from ${tb_ts_filter} where ts = '2024-05-30 
20:34:56' order by id """
@@ -72,38 +61,58 @@ suite("test_iceberg_filter", 
"p0,external,doris,external_docker,external_docker_
             qt_qt17 """ select * from ${tb_ts_ntz_filter} where ts > 
'2024-06-11 12:34:56.12345' """
             qt_qt18 """ select * from ${tb_ts_ntz_filter} where ts < 
'2024-06-11 12:34:56.123466' """
 
-            // TODO support filter
-            // explain {
-            //     sql("select * from ${tb_ts_filter} where ts < '2024-05-30 
20:34:56'")
-            //     contains "inputSplitNum=0"
-            // }
-            // explain {
-            //     sql("select * from ${tb_ts_filter} where ts < '2024-05-30 
20:34:56.12'")
-            //     contains "inputSplitNum=1"
-            // }
-            // explain {
-            //     sql("select * from ${tb_ts_filter} where ts > '2024-05-30 
20:34:56.1234'")
-            //     contains "inputSplitNum=2"
-            // }
-            // explain {
-            //     sql("select * from ${tb_ts_filter} where ts > '2024-05-30 
20:34:56.0'")
-            //     contains "inputSplitNum=1"
-            // }
-            // explain {
-            //     sql("select * from ${tb_ts_filter} where ts = '2024-05-30 
20:34:56.123456'")
-            //     contains "inputSplitNum=1"
-            // }
-            // explain {
-            //     sql("select * from ${tb_ts_filter} where ts < '2024-05-30 
20:34:56.123456'")
-            //     contains "inputSplitNum=5"
-            // }
-            // explain {
-            //     sql("select * from ${tb_ts_filter} where ts > '2024-05-30 
20:34:56.123456'")
-            //     contains "inputSplitNum=0"
-            // }
+            explain {
+                sql("select * from ${tb_ts_filter} where ts < '2024-05-30 
20:34:56'")
+                contains "inputSplitNum=0"
+            }
+            explain {
+                sql("select * from ${tb_ts_filter} where ts < '2024-05-30 
20:34:56.12'")
+                contains "inputSplitNum=2"
+            }
+            explain {
+                sql("select * from ${tb_ts_filter} where ts > '2024-05-30 
20:34:56.1234'")
+                contains "inputSplitNum=2"
+            }
+            explain {
+                sql("select * from ${tb_ts_filter} where ts > '2024-05-30 
20:34:56.0'")
+                contains "inputSplitNum=6"
+            }
+            explain {
+                sql("select * from ${tb_ts_filter} where ts = '2024-05-30 
20:34:56.123456'")
+                contains "inputSplitNum=1"
+            }
+            explain {
+                sql("select * from ${tb_ts_filter} where ts < '2024-05-30 
20:34:56.123456'")
+                contains "inputSplitNum=6"
+            }
+            explain {
+                sql("select * from ${tb_ts_filter} where ts > '2024-05-30 
20:34:56.123456'")
+                contains "inputSplitNum=0"
+            }
 
         } finally {
         }
     }
 }
 
+/*
+
+CREATE TABLE tb_ts_filter (
+  id INT COMMENT '',
+  ts TIMESTAMP_NTZ COMMENT '')
+USING iceberg
+TBLPROPERTIES (
+  'format' = 'iceberg/parquet',
+  'format-version' = '2',
+  'write.parquet.compression-codec' = 'zstd');
+
+insert into tb_ts_filter values (1, timestamp '2024-05-30 20:34:56');
+insert into tb_ts_filter values (2, timestamp '2024-05-30 20:34:56.1');
+insert into tb_ts_filter values (3, timestamp '2024-05-30 20:34:56.12');
+insert into tb_ts_filter values (4, timestamp '2024-05-30 20:34:56.123');
+insert into tb_ts_filter values (5, timestamp '2024-05-30 20:34:56.1234');
+insert into tb_ts_filter values (6, timestamp '2024-05-30 20:34:56.12345');
+insert into tb_ts_filter values (7, timestamp '2024-05-30 20:34:56.123456');
+
+*/
+
diff --git 
a/regression-test/suites/external_table_p0/iceberg/test_iceberg_predicate_conversion.groovy
 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_predicate_conversion.groovy
new file mode 100644
index 00000000000..bbca6d8f023
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_predicate_conversion.groovy
@@ -0,0 +1,109 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_iceberg_predicate_conversion", 
"p0,external,doris,external_docker,external_docker_doris") {
+    String enabled = context.config.otherConfigs.get("enableIcebergTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable iceberg test.")
+        return
+    }
+
+    String catalog_name = "test_iceberg_predicate_conversion"
+    String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
+    String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+
+    sql """drop catalog if exists ${catalog_name}"""
+    sql """
+    CREATE CATALOG ${catalog_name} PROPERTIES (
+        'type'='iceberg',
+        'iceberg.catalog.type'='rest',
+        'uri' = 'http://${externalEnvIp}:${rest_port}',
+        "s3.access_key" = "admin",
+        "s3.secret_key" = "password",
+        "s3.endpoint" = "http://${externalEnvIp}:${minio_port}";,
+        "s3.region" = "us-east-1"
+    );"""
+
+        sql """switch ${catalog_name};"""
+        sql """ use `multi_catalog`; """
+
+        def sqlstr = """select glue_varchar from tb_predict where glue_varchar 
> date '2023-03-07' """
+        order_qt_q01 """${sqlstr}""" 
+        explain {
+            sql("""${sqlstr}""")
+            contains """ref(name="glue_varchar") > "2023-03-07 00:00:00"""
+        }
+
+        sqlstr = """select l_shipdate from tb_predict where l_shipdate in 
("1997-05-18", "1996-05-06"); """
+        order_qt_q02 """${sqlstr}""" 
+        explain {
+            sql("""${sqlstr}""")
+            contains """ref(name="l_shipdate") in"""
+            contains """1997-05-18"""
+            contains """1996-05-06"""
+        }
+
+        sqlstr = """select l_shipdate, l_shipmode from tb_predict where 
l_shipdate in ("1997-05-18", "1996-05-06") and l_shipmode = "MAIL";"""
+        order_qt_q03 """${sqlstr}""" 
+        explain {
+            sql("""${sqlstr}""")
+            contains """ref(name="l_shipdate") in"""
+            contains """1997-05-18"""
+            contains """1996-05-06"""
+            contains """ref(name="l_shipmode") == "MAIL"""
+        }
+
+        sqlstr = """select l_shipdate, l_shipmode from tb_predict where 
l_shipdate in ("1997-05-18", "1996-05-06") or NOT(l_shipmode = "MAIL") order by 
l_shipdate, l_shipmode limit 10"""
+        plan = """(ref(name="l_shipdate") in ("1997-05-18", "1996-05-06") or 
not(ref(name="l_shipmode") == "MAIL"))"""
+        order_qt_q04 """${sqlstr}""" 
+        explain {
+            sql("""${sqlstr}""")
+            contains """or not(ref(name="l_shipmode") == "MAIL"))"""
+            contains """ref(name="l_shipdate")"""
+            contains """1997-05-18"""
+            contains """1996-05-06"""
+        }
+
+        sqlstr = """select glue_timstamp from tb_predict where glue_timstamp > 
'2023-03-07 20:35:59' order by glue_timstamp limit 5"""
+        order_qt_q05 """${sqlstr}""" 
+        explain {
+            sql("""${sqlstr}""")
+            contains """ref(name="glue_timstamp") > 1678192559000000"""
+        }
+}
+
+/*
+
+create table tb_predict (
+    glue_varchar string,
+    glue_timstamp timestamp,
+    l_shipdate date,
+    l_shipmode string
+) using iceberg;
+
+insert into tb_predict values ('2023-03-08', timestamp '2023-03-07 
20:35:59.123456', date "1997-05-19", "MAIL");
+insert into tb_predict values ('2023-03-06', timestamp '2023-03-07 20:35:58', 
date "1997-05-19", "MAI");
+insert into tb_predict values ('2023-03-07', timestamp '2023-03-07 
20:35:59.123456', date "1997-05-18", "MAIL");
+insert into tb_predict values ('2023-03-07', timestamp '2023-03-07 20:35:59', 
date "1997-05-18", "MAI");
+insert into tb_predict values ('2023-03-07', timestamp '2023-03-07 20:35:58', 
date "1996-05-06", "MAIL");
+insert into tb_predict values ('2023-03-04', timestamp '2023-03-07 20:36:00', 
date "1996-05-06", "MAI");
+insert into tb_predict values ('2023-03-07', timestamp '2023-03-07 20:34:59', 
date "1996-05-01", "MAIL");
+insert into tb_predict values ('2023-03-09', timestamp '2023-03-07 20:37:59', 
date "1996-05-01", "MAI");
+
+*/
diff --git 
a/regression-test/suites/external_table_p2/paimon/paimon_base_filesystem.groovy 
b/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy
similarity index 85%
rename from 
regression-test/suites/external_table_p2/paimon/paimon_base_filesystem.groovy
rename to 
regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy
index a091e3615fa..7be15f94243 100644
--- 
a/regression-test/suites/external_table_p2/paimon/paimon_base_filesystem.groovy
+++ 
b/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy
@@ -15,11 +15,15 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("paimon_base_filesystem", 
"p2,external,paimon,external_remote,external_remote_paimon") {
-    String enabled = 
context.config.otherConfigs.get("enableExternalPaimonTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String catalog_cos = "paimon_cos"
-        String catalog_oss = "paimon_oss"
+suite("paimon_base_filesystem", 
"p0,external,doris,external_docker,external_docker_doris") {
+    String enabled = context.config.otherConfigs.get("enablePaimonTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        return
+    }
+
+    try {
+        String catalog_cos = "paimon_base_filesystem_paimon_cos"
+        String catalog_oss = "paimon_base_filesystem_paimon_oss"
         String ak = context.config.otherConfigs.get("aliYunAk")
         String sk = context.config.otherConfigs.get("aliYunSk")
 
@@ -60,6 +64,7 @@ suite("paimon_base_filesystem", 
"p2,external,paimon,external_remote,external_rem
         qt_c3 cos
         qt_c4 oss
 
+    } finally {
         sql """set force_jni_scanner=false"""
     }
 }
diff --git 
a/regression-test/suites/external_table_p0/paimon/paimon_timestamp_types.groovy 
b/regression-test/suites/external_table_p0/paimon/paimon_timestamp_types.groovy
new file mode 100644
index 00000000000..81b0e48e990
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/paimon/paimon_timestamp_types.groovy
@@ -0,0 +1,158 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("paimon_timestamp_types", 
"p0,external,doris,external_docker,external_docker_doris") {
+
+    def ts_orc = """select * from ts_orc"""
+    def ts_parquet = """select * from ts_parquet"""
+
+    String enabled = context.config.otherConfigs.get("enablePaimonTest")
+    // The timestamp type of paimon has no logical or converted type,
+    // and is conflict with column type change from bigint to timestamp.
+    // Deprecated currently.
+    if (enabled == null || 
!enabled.equalsIgnoreCase("enable_deprecated_case")) {
+        return
+    }
+
+    try {
+        String catalog_name = "paimon_timestamp_types"
+        String minio_port = 
context.config.otherConfigs.get("iceberg_minio_port")
+        String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+        sql """drop catalog if exists ${catalog_name}"""
+        sql """CREATE CATALOG ${catalog_name} PROPERTIES (
+                'type'='paimon',
+                'warehouse' = 's3://warehouse/wh/',
+                "s3.access_key" = "admin",
+                "s3.secret_key" = "password",
+                "s3.endpoint" = "http://${externalEnvIp}:${minio_port}";,
+                "s3.region" = "us-east-1"
+            );"""
+
+        logger.info("catalog " + catalog_name + " created")
+        sql """switch ${catalog_name};"""
+        logger.info("switched to catalog " + catalog_name)
+        sql """use test_paimon_db;"""
+        logger.info("use test_paimon_db")
+
+        sql """set force_jni_scanner=true"""
+        qt_c1 ts_orc
+        qt_c2 ts_parquet
+
+        sql """set force_jni_scanner=false"""
+        qt_c3 ts_orc
+        qt_c4 ts_parquet
+
+    } finally {
+        sql """set force_jni_scanner=false"""
+    }
+}
+
+
+/*
+
+--- flink-sql:
+
+SET 'table.local-time-zone' = 'Asia/Shanghai';
+
+create table ts_orc (
+id int,
+ts1 timestamp(1), 
+ts2 timestamp(2), 
+ts3 timestamp(3), 
+ts4 timestamp(4),
+ts5 timestamp(5), 
+ts6 timestamp(6), 
+ts7 timestamp(7), 
+ts8 timestamp(8), 
+ts9 timestamp(9),
+ts11 timestamp_ltz(1), 
+ts12 timestamp_ltz(2), 
+ts13 timestamp_ltz(3), 
+ts14 timestamp_ltz(4),
+ts15 timestamp_ltz(5), 
+ts16 timestamp_ltz(6), 
+ts17 timestamp_ltz(7), 
+ts18 timestamp_ltz(8), 
+ts19 timestamp_ltz(9))
+WITH ('file.format' = 'orc','write-only'='true');
+
+create table ts_parquet (
+id int,
+ts1 timestamp(1), 
+ts2 timestamp(2), 
+ts3 timestamp(3), 
+ts4 timestamp(4),
+ts5 timestamp(5), 
+ts6 timestamp(6), 
+ts7 timestamp(7), 
+ts8 timestamp(8), 
+ts9 timestamp(9),
+ts11 timestamp_ltz(1), 
+ts12 timestamp_ltz(2), 
+ts13 timestamp_ltz(3), 
+ts14 timestamp_ltz(4),
+ts15 timestamp_ltz(5), 
+ts16 timestamp_ltz(6), 
+ts17 timestamp_ltz(7), 
+ts18 timestamp_ltz(8), 
+ts19 timestamp_ltz(9))
+WITH ('file.format' = 'parquet','write-only'='true');
+
+insert into ts_orc values (
+    1,
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789');
+
+insert into ts_parquet values (
+    1,
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789',
+    timestamp '2024-01-02 10:04:05.123456789');
+
+*/
diff --git 
a/regression-test/suites/external_table_p2/iceberg/iceberg_complex_type.groovy 
b/regression-test/suites/external_table_p2/iceberg/iceberg_complex_type.groovy
deleted file mode 100644
index f465a9afe37..00000000000
--- 
a/regression-test/suites/external_table_p2/iceberg/iceberg_complex_type.groovy
+++ /dev/null
@@ -1,92 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("iceberg_complex_type", 
"p2,external,iceberg,external_remote,external_remote_iceberg") {
-
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-
-        String catalog_name = "test_external_iceberg_complex_type"
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHdfsPort = context.config.otherConfigs.get("extHdfsPort")
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                'type'='iceberg',
-                'iceberg.catalog.type'='hadoop',
-                'warehouse' = 
'hdfs://${extHiveHmsHost}:${extHdfsPort}/usr/hive/warehouse/hadoop_catalog'
-            );
-        """
-
-        logger.info("catalog " + catalog_name + " created")
-        sql """switch ${catalog_name};"""
-        logger.info("switched to catalog " + catalog_name)
-        sql """ use multi_catalog;""" 
-
-
-
-        qt_parquet_v1_1  """ desc complex_parquet_v1 ;""" 
-        qt_parquet_v1_2  """ select * from  complex_parquet_v1 order by id; 
""" 
-        qt_parquet_v1_3  """ select count(*) from  complex_parquet_v1 ;"""
-        qt_parquet_v1_4  """ select array_size(col2) from  complex_parquet_v1 
where col2 is not null   order by id ; """ 
-        qt_parquet_v1_5  """ select map_keys(col3) from  complex_parquet_v1  
order by id; """ 
-        qt_parquet_v1_6  """ select struct_element(col4, 1) from  
complex_parquet_v1  where id >=7 order by id; """ 
-        qt_parquet_v1_7  """ select id,count(col2) from  complex_parquet_v1  
group by id order by id desc limit 2; """ 
-
-
-        qt_parquet_v2_1  """ desc complex_parquet_v2 ;""" 
-        qt_parquet_v2_2  """ select * from  complex_parquet_v2 order by id; 
""" 
-        qt_parquet_v2_3  """ select count(*) from  complex_parquet_v2 ;"""
-        qt_parquet_v2_4  """ select array_size(col2) from  complex_parquet_v2 
where col2 is not null   order by id ; """ 
-        qt_parquet_v2_5  """ select map_keys(col3) from  complex_parquet_v2  
order by id; """ 
-        qt_parquet_v2_6  """ select struct_element(col4, 1) from  
complex_parquet_v2  where id >=7 order by id; """ 
-        qt_parquet_v2_7  """ select id,count(col2) from  complex_parquet_v2  
group by id order by id desc limit 2; """ 
-
-
-        qt_orc_v1_1  """ desc complex_orc_v1 ;""" 
-        qt_orc_v1_2  """ select * from  complex_orc_v1 order by id; """ 
-        qt_orc_v1_3  """ select count(*) from  complex_orc_v1 ;"""
-        qt_orc_v1_4  """ select array_size(col2) from  complex_orc_v1 where 
col2 is not null   order by id ; """ 
-        qt_orc_v1_5  """ select map_keys(col3) from  complex_orc_v1  order by 
id; """ 
-        qt_orc_v1_6  """ select struct_element(col4, 1) from  complex_orc_v1  
where id >=7 order by id; """ 
-        qt_orc_v1_7  """ select id,count(col2) from  complex_orc_v1  group by 
id order by id desc limit 2; """ 
-
-
-        qt_orc_v2_1  """ desc complex_orc_v2 ;""" 
-        qt_orc_v2_2  """ select * from  complex_orc_v2 order by id; """ 
-        qt_orc_v2_3  """ select count(*) from  complex_orc_v2 ;"""
-        qt_orc_v2_4  """ select array_size(col2) from  complex_orc_v2 where 
col2 is not null   order by id ; """ 
-        qt_orc_v2_5  """ select map_keys(col3) from  complex_orc_v2  order by 
id; """ 
-        qt_orc_v2_6  """ select struct_element(col4, 1) from  complex_orc_v2  
where id >=7 order by id; """ 
-        qt_orc_v2_7  """ select id,count(col2) from  complex_orc_v2  group by 
id order by id desc limit 2; """ 
-
-
-
-
-    }
-}
-
-/*
-schema :
-    id                      int                                         
-    col2                    array<array<array<array<array<int>>>>>             
         
-    col3                    map<array<float>,map<int,map<int,float>>>          
                 
-    col4                    
struct<x:array<int>,y:array<double>,z:map<boolean,string>>                      
    
-    col5                    
map<int,map<int,map<int,map<int,map<float,map<double,struct<x:int,y:array<double>>>>>>>>
                            
-    col6                    
struct<xx:array<int>,yy:array<map<double,float>>,zz:struct<xxx:struct<xxxx:struct<xxxxx:decimal(13,2)>>>>
-
-*/
\ No newline at end of file
diff --git 
a/regression-test/suites/external_table_p2/iceberg/iceberg_partition_upper_case.groovy
 
b/regression-test/suites/external_table_p2/iceberg/iceberg_partition_upper_case.groovy
deleted file mode 100644
index d46d94db76f..00000000000
--- 
a/regression-test/suites/external_table_p2/iceberg/iceberg_partition_upper_case.groovy
+++ /dev/null
@@ -1,103 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("iceberg_partition_upper_case", 
"p2,external,iceberg,external_remote,external_remote_iceberg") {
-    def orc_upper1 = """select * from iceberg_partition_upper_case_orc order 
by k1;"""
-    def orc_upper2 = """select k1, city from iceberg_partition_upper_case_orc 
order by k1;"""
-    def orc_upper3 = """select k1, k2 from iceberg_partition_upper_case_orc 
order by k1;"""
-    def orc_upper4 = """select city from iceberg_partition_upper_case_orc 
order by city;"""
-    def orc_upper5 = """select * from iceberg_partition_upper_case_orc where 
k1>1 and city='Beijing' order by k1;"""
-    def orc_upper6 = """select * from iceberg_partition_upper_case_orc where 
k1=1 order by k1;"""
-    def orc_upper7 = """select * from iceberg_partition_upper_case_orc where 
k2 like '%k2%' and city like '%Bei%' order by k1;"""
-
-    def orc_lower1 = """select * from iceberg_partition_lower_case_orc order 
by k1;"""
-    def orc_lower2 = """select k1, city from iceberg_partition_lower_case_orc 
order by k1;"""
-    def orc_lower3 = """select k1, k2 from iceberg_partition_lower_case_orc 
order by k1;"""
-    def orc_lower4 = """select city from iceberg_partition_lower_case_orc 
order by city;"""
-    def orc_lower5 = """select * from iceberg_partition_lower_case_orc where 
k1>1 and city='Beijing' order by k1;"""
-    def orc_lower6 = """select * from iceberg_partition_lower_case_orc where 
k1=1 order by k1;"""
-    def orc_lower7 = """select * from iceberg_partition_lower_case_orc where 
k2 like '%k2%' and city like '%Bei%' order by k1;"""
-
-    def parquet_upper1 = """select * from iceberg_partition_upper_case_parquet 
order by k1;"""
-    def parquet_upper2 = """select k1, city from 
iceberg_partition_upper_case_parquet order by k1;"""
-    def parquet_upper3 = """select k1, k2 from 
iceberg_partition_upper_case_parquet order by k1;"""
-    def parquet_upper4 = """select city from 
iceberg_partition_upper_case_parquet order by city;"""
-    def parquet_upper5 = """select * from iceberg_partition_upper_case_parquet 
where k1>1 and city='Beijing' order by k1;"""
-    def parquet_upper6 = """select * from iceberg_partition_upper_case_parquet 
where substring(city, 6)='hai' order by k1;"""
-    def parquet_upper7 = """select * from iceberg_partition_upper_case_parquet 
where k1=1 order by k1;"""
-    def parquet_upper8 = """select * from iceberg_partition_upper_case_parquet 
where k2 like '%k2%' and city like '%Bei%' order by k1;"""
-
-    def parquet_lower1 = """select * from iceberg_partition_lower_case_parquet 
order by k1;"""
-    def parquet_lower2 = """select k1, city from 
iceberg_partition_lower_case_parquet order by k1;"""
-    def parquet_lower3 = """select k1, k2 from 
iceberg_partition_lower_case_parquet order by k1;"""
-    def parquet_lower4 = """select city from 
iceberg_partition_lower_case_parquet order by city;"""
-    def parquet_lower5 = """select * from iceberg_partition_lower_case_parquet 
where k1>1 and city='Beijing' order by k1;"""
-    def parquet_lower6 = """select * from iceberg_partition_lower_case_parquet 
where substring(city, 6)='hai' order by k1;"""
-    def parquet_lower7 = """select * from iceberg_partition_lower_case_parquet 
where k1=1 order by k1;"""
-    def parquet_lower8 = """select * from iceberg_partition_lower_case_parquet 
where k2 like '%k2%' and city like '%Bei%' order by k1;"""
-
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "iceberg_partition"
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
-        logger.info("catalog " + catalog_name + " created")
-        sql """switch ${catalog_name};"""
-        logger.info("switched to catalog " + catalog_name)
-        sql """use multi_catalog;"""
-        qt_orcupper1 orc_upper1
-        qt_orcupper2 orc_upper2
-        qt_orcupper3 orc_upper3
-        qt_orcupper4 orc_upper4
-        qt_orcupper5 orc_upper5
-        qt_orcupper6 orc_upper6
-        qt_orcupper7 orc_upper7
-
-        qt_orclower1 orc_lower1
-        qt_orclower2 orc_lower2
-        qt_orclower3 orc_lower3
-        qt_orclower4 orc_lower4
-        qt_orclower5 orc_lower5
-        qt_orclower6 orc_lower6
-        qt_orclower7 orc_lower7
-        qt_parquetupper1 parquet_upper1
-        qt_parquetupper2 parquet_upper2
-        qt_parquetupper3 parquet_upper3
-        qt_parquetupper4 parquet_upper4
-        qt_parquetupper5 parquet_upper5
-        qt_parquetupper6 parquet_upper6
-        qt_parquetupper7 parquet_upper7
-        qt_parquetupper8 parquet_upper8
-        qt_parquetlower1 parquet_lower1
-        qt_parquetlower2 parquet_lower2
-        qt_parquetlower3 parquet_lower3
-        qt_parquetlower4 parquet_lower4
-        qt_parquetlower5 parquet_lower5
-        qt_parquetlower6 parquet_lower6
-        qt_parquetupper7 parquet_upper7
-        qt_parquetupper8 parquet_upper8
-    }
-}
-
-
diff --git 
a/regression-test/suites/external_table_p2/iceberg/iceberg_schema_evolution.groovy
 
b/regression-test/suites/external_table_p2/iceberg/iceberg_schema_evolution.groovy
deleted file mode 100644
index 182786405a6..00000000000
--- 
a/regression-test/suites/external_table_p2/iceberg/iceberg_schema_evolution.groovy
+++ /dev/null
@@ -1,67 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("iceberg_schema_evolution", 
"p2,external,iceberg,external_remote,external_remote_iceberg") {
-    def rename1 = """select * from rename_test order by rename_1;"""
-    def rename2 = """select * from rename_test where rename_1 in (3, 4) order 
by rename_1;"""
-    def drop1 = """select * from drop_test order by orig1;"""
-    def drop2 = """select * from drop_test where orig1<=3 order by orig1;"""
-    def drop3 = """select * from drop_test where orig1>3 order by orig1;"""
-    def add1 = """select * from add_test order by orig1;"""
-    def add2 = """select * from add_test where orig1 = 2;"""
-    def add3 = """select * from add_test where orig1 = 5;"""
-    def reorder1 = """select * from reorder_test order by orig1;"""
-    def reorder2 = """select * from reorder_test where orig1 = 2;"""
-    def reorder3 = """select * from reorder_test where orig1 = 5;"""
-    def readd1 = """select * from readd_test order by orig1;"""
-    def readd2 = """select * from readd_test where orig1<5 order by orig1;"""
-    def readd3 = """select * from readd_test where orig1>2 order by orig1;"""
-
-
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "iceberg_schema_evolution"
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
-        logger.info("catalog " + catalog_name + " created")
-        sql """switch ${catalog_name};"""
-        logger.info("switched to catalog " + catalog_name)
-        sql """use iceberg_schema_evolution;"""
-        qt_rename1 rename1
-        qt_rename2 rename2
-        qt_drop1 drop1
-        qt_drop2 drop2
-        qt_drop3 drop3
-        qt_add1 add1
-        qt_add2 add2
-        qt_add3 add3
-        qt_reorder1 reorder1
-        qt_reorder2 reorder2
-        qt_reorder3 reorder3
-        qt_readd1 readd1
-        qt_readd2 readd2
-        qt_readd3 readd3
-    }
-}
-
diff --git 
a/regression-test/suites/external_table_p2/iceberg/iceberg_schema_evolution_iceberg_catalog.groovy
 
b/regression-test/suites/external_table_p2/iceberg/iceberg_schema_evolution_iceberg_catalog.groovy
deleted file mode 100644
index 4ef7b534ea4..00000000000
--- 
a/regression-test/suites/external_table_p2/iceberg/iceberg_schema_evolution_iceberg_catalog.groovy
+++ /dev/null
@@ -1,69 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("iceberg_schema_evolution_iceberg_catalog", 
"p2,external,iceberg,external_remote,external_remote_iceberg") {
-    def rename1 = """select * from rename_test order by rename_1;"""
-    def rename2 = """select * from rename_test where rename_1 in (3, 4) order 
by rename_1;"""
-    def drop1 = """select * from drop_test order by orig1;"""
-    def drop2 = """select * from drop_test where orig1<=3 order by orig1;"""
-    def drop3 = """select * from drop_test where orig1>3 order by orig1;"""
-    def add1 = """select * from add_test order by orig1;"""
-    def add2 = """select * from add_test where orig1 = 2;"""
-    def add3 = """select * from add_test where orig1 = 5;"""
-    def reorder1 = """select * from reorder_test order by orig1;"""
-    def reorder2 = """select * from reorder_test where orig1 = 2;"""
-    def reorder3 = """select * from reorder_test where orig1 = 5;"""
-    def readd1 = """select * from readd_test order by orig1;"""
-    def readd2 = """select * from readd_test where orig1<5 order by orig1;"""
-    def readd3 = """select * from readd_test where orig1>2 order by orig1;"""
-
-
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "iceberg_schema_evolution_iceberg_catalog"
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                'type'='iceberg',
-                'iceberg.catalog.type'='hms',
-                'hadoop.username' = 'hadoop',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
-        logger.info("catalog " + catalog_name + " created")
-        sql """switch ${catalog_name};"""
-        logger.info("switched to catalog " + catalog_name)
-        sql """use iceberg_schema_evolution;"""
-        qt_rename1 rename1
-        qt_rename2 rename2
-        qt_drop1 drop1
-        qt_drop2 drop2
-        qt_drop3 drop3
-        qt_add1 add1
-        qt_add2 add2
-        qt_add3 add3
-        qt_reorder1 reorder1
-        qt_reorder2 reorder2
-        qt_reorder3 reorder3
-        qt_readd1 readd1
-        qt_readd2 readd2
-        qt_readd3 readd3
-    }
-}
-
diff --git 
a/regression-test/suites/external_table_p2/iceberg/test_external_catalog_iceberg_partition.groovy
 
b/regression-test/suites/external_table_p2/iceberg/test_external_catalog_iceberg_partition.groovy
deleted file mode 100644
index dfdd923bcc4..00000000000
--- 
a/regression-test/suites/external_table_p2/iceberg/test_external_catalog_iceberg_partition.groovy
+++ /dev/null
@@ -1,84 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("test_external_catalog_iceberg_partition", 
"p2,external,iceberg,external_remote,external_remote_iceberg") {
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "test_external_catalog_iceberg_partition"
-
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
-
-        sql """switch ${catalog_name};"""
-        // test parquet format
-        def q01_parquet = {
-            qt_q01 """ select * from 
iceberg_catalog.parquet_partitioned_one_column order by t_float """
-            qt_q02 """ select * from 
iceberg_catalog.parquet_partitioned_one_column where t_int is null order by 
t_float """
-            qt_q03 """ select * from 
iceberg_catalog.parquet_partitioned_one_column where t_int is not null order by 
t_float """
-            qt_q04 """ select * from 
iceberg_catalog.parquet_partitioned_columns order by t_float """
-            qt_q05 """ select * from 
iceberg_catalog.parquet_partitioned_columns where t_int is null order by 
t_float """
-            qt_q06 """ select * from 
iceberg_catalog.parquet_partitioned_columns where t_int is not null order by 
t_float """
-            qt_q07 """ select * from 
iceberg_catalog.parquet_partitioned_truncate_and_fields order by t_float """
-            qt_q08 """ select * from 
iceberg_catalog.parquet_partitioned_truncate_and_fields where t_int is null 
order by t_float """
-            qt_q09 """ select * from 
iceberg_catalog.parquet_partitioned_truncate_and_fields where t_int is not null 
order by t_float """
-        }
-        // test orc format
-        def q01_orc = {
-            qt_q01 """ select * from 
iceberg_catalog.orc_partitioned_one_column order by t_float """
-            qt_q02 """ select * from 
iceberg_catalog.orc_partitioned_one_column where t_int is null order by t_float 
"""
-            qt_q03 """ select * from 
iceberg_catalog.orc_partitioned_one_column where t_int is not null order by 
t_float """
-            qt_q04 """ select * from iceberg_catalog.orc_partitioned_columns 
order by t_float """
-            qt_q05 """ select * from iceberg_catalog.orc_partitioned_columns 
where t_int is null order by t_float """
-            qt_q06 """ select * from iceberg_catalog.orc_partitioned_columns 
where t_int is not null order by t_float """
-            qt_q07 """ select * from 
iceberg_catalog.orc_partitioned_truncate_and_fields order by t_float """
-            qt_q08 """ select * from 
iceberg_catalog.orc_partitioned_truncate_and_fields where t_int is null order 
by t_float """
-            qt_q09 """ select * from 
iceberg_catalog.orc_partitioned_truncate_and_fields where t_int is not null 
order by t_float """
-        }
-
-        // test date for partition and predict
-        def q01_date = {
-
-            qt_q01 """ select * from user_case_date_without_partition where d 
= '2020-01-02' """
-            qt_q02 """ select * from user_case_date_without_partition where d 
> '2020-01-01' """
-            qt_q03 """ select * from user_case_date_without_partition where d 
< '2020-01-03' """
-            qt_q04 """ select * from user_case_date_without_partition where ts 
< '2020-01-03' """
-            qt_q05 """ select * from user_case_date_without_partition where ts 
> '2020-01-01' """
-
-            qt_q06 """ select * from user_case_date_with_date_partition where 
d = '2020-01-02' """
-            qt_q07 """ select * from user_case_date_with_date_partition where 
d < '2020-01-03' """
-            qt_q08 """ select * from user_case_date_with_date_partition where 
d > '2020-01-01' """
-
-            qt_q09 """ select * from user_case_date_with_days_date_partition 
where d = '2020-01-02' """
-            qt_q10 """ select * from user_case_date_with_days_date_partition 
where d < '2020-01-03' """
-            qt_q11 """ select * from user_case_date_with_days_date_partition 
where d > '2020-01-01' """
-
-        }
-
-        sql """ use `iceberg_catalog`; """
-        q01_parquet()
-        q01_orc()
-        q01_date()
-    }
-}
-
diff --git 
a/regression-test/suites/external_table_p2/iceberg/test_external_catalog_icebergv2.groovy
 
b/regression-test/suites/external_table_p2/iceberg/test_external_catalog_icebergv2.groovy
deleted file mode 100644
index f802f02bcee..00000000000
--- 
a/regression-test/suites/external_table_p2/iceberg/test_external_catalog_icebergv2.groovy
+++ /dev/null
@@ -1,82 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("test_external_catalog_icebergv2", 
"p2,external,iceberg,external_remote,external_remote_iceberg") {
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String hms_catalog_name = "test_external_hms_catalog_iceberg"
-        String iceberg_catalog_name = "test_external_iceberg_catalog_iceberg"
-
-        sql """drop catalog if exists ${hms_catalog_name};"""
-        sql """
-            create catalog if not exists ${hms_catalog_name} properties (
-                'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
-
-        sql """drop catalog if exists ${iceberg_catalog_name};"""
-        sql """
-            create catalog if not exists ${iceberg_catalog_name} properties (
-                'type'='iceberg',
-                'iceberg.catalog.type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
-
-        sql """switch ${hms_catalog_name};"""
-        // test parquet format format
-        def q01 = {
-            qt_q01 """ select count(1) as c from customer_small """
-            qt_q02 """ select c_custkey from customer_small group by c_custkey 
order by c_custkey limit 4 """
-            qt_q03 """ select count(1) from orders_small """
-            qt_q04 """ select count(1) from customer_small where c_name = 
'Customer#000000005' or c_name = 'Customer#000000006' """
-            qt_q05 """ select * from customer_small order by c_custkey limit 3 
"""
-            qt_q06 """ select o_orderkey from orders_small where o_orderkey > 
652566 order by o_orderkey limit 3 """
-            qt_q07 """ select o_totalprice from orders_small where o_custkey < 
3357 order by o_custkey limit 3 """
-            qt_q08 """ select count(1) as c from customer """
-        }
-        // test time travel stmt
-        def q02 = {
-            qt_q09 """ select c_custkey from customer for time as of 
'2022-12-27 10:21:36' order by c_custkey limit 3 """
-            qt_q10 """ select c_custkey from customer for time as of 
'2022-12-28 10:21:36' order by c_custkey desc limit 3 """
-            qt_q11 """ select c_custkey from customer for version as of 
906874575350293177 order by c_custkey limit 3 """
-            qt_q12 """ select c_custkey from customer for version as of 
6352416983354893547 order by c_custkey desc limit 3 """
-        }
-        // in predicate
-        def q03 = {
-            qt_q13 """ select c_custkey from customer_small where c_custkey in 
(1, 2, 4, 7) order by c_custkey """
-            qt_q14 """ select c_name from customer_small where c_name in 
('Customer#000000004', 'Customer#000000007') order by c_custkey """
-        }
-
-        // test for 'FOR TIME AS OF' and 'FOR VERSION AS OF'
-        def q04 = {
-            qt_q15 """ select count(*) from 
${hms_catalog_name}.tpch_1000_icebergv2.customer_small FOR TIME AS OF 
'2022-12-22 02:29:30' """
-            qt_q16 """ select count(*) from 
${hms_catalog_name}.tpch_1000_icebergv2.customer_small FOR VERSION AS OF 
6113938156088124425 """
-            qt_q17 """ select count(*) from 
${iceberg_catalog_name}.tpch_1000_icebergv2.customer_small FOR TIME AS OF 
'2022-12-22 02:29:30' """
-            qt_q18 """ select count(*) from 
${iceberg_catalog_name}.tpch_1000_icebergv2.customer_small FOR VERSION AS OF 
6113938156088124425 """
-        }
-        
-        sql """ use `tpch_1000_icebergv2`; """
-        q01()
-        q02()
-        q03()
-        q04()
-    }
-}
diff --git 
a/regression-test/suites/external_table_p2/iceberg/test_iceberg_predicate_conversion.groovy
 
b/regression-test/suites/external_table_p2/iceberg/test_iceberg_predicate_conversion.groovy
deleted file mode 100644
index 58518489ef0..00000000000
--- 
a/regression-test/suites/external_table_p2/iceberg/test_iceberg_predicate_conversion.groovy
+++ /dev/null
@@ -1,79 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("test_iceberg_predicate_conversion", 
"p2,external,hive,external_remote,external_remote_hive") {
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-
-        sql """drop catalog if exists test_iceberg_predicate_conversion;"""
-        sql """
-            create catalog if not exists test_iceberg_predicate_conversion 
properties (
-                'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
-
-        sql """switch test_iceberg_predicate_conversion;"""
-        sql """ use `iceberg_catalog`; """
-
-        def sqlstr = """select glue_int, glue_varchar from iceberg_glue_types 
where glue_varchar > date '2023-03-07' """
-        order_qt_q01 """${sqlstr}""" 
-        explain {
-            sql("""${sqlstr}""")
-            contains """ref(name="glue_varchar") > "2023-03-07 00:00:00"""
-        }
-
-        sqlstr = """select l_shipdate from lineitem where l_shipdate in 
("1997-05-18", "1996-05-06"); """
-        order_qt_q02 """${sqlstr}""" 
-        explain {
-            sql("""${sqlstr}""")
-            contains """ref(name="l_shipdate") in"""
-            contains """"1997-05-18""""
-            contains """"1996-05-06""""
-        }
-
-        sqlstr = """select l_shipdate, l_shipmode from lineitem where 
l_shipdate in ("1997-05-18", "1996-05-06") and l_shipmode = "MAIL";"""
-        order_qt_q03 """${sqlstr}""" 
-        explain {
-            sql("""${sqlstr}""")
-            contains """ref(name="l_shipdate") in"""
-            contains """"1997-05-18""""
-            contains """"1996-05-06""""
-            contains """ref(name="l_shipmode") == "MAIL""""
-        }
-
-        sqlstr = """select l_shipdate, l_shipmode from lineitem where 
l_shipdate in ("1997-05-18", "1996-05-06") or NOT(l_shipmode = "MAIL") order by 
l_shipdate, l_shipmode limit 10"""
-        plan = """(ref(name="l_shipdate") in ("1997-05-18", "1996-05-06") or 
not(ref(name="l_shipmode") == "MAIL"))"""
-        order_qt_q04 """${sqlstr}""" 
-        explain {
-            sql("""${sqlstr}""")
-            contains """or not(ref(name="l_shipmode") == "MAIL"))"""
-            contains """ref(name="l_shipdate")"""
-            contains """"1997-05-18""""
-            contains """"1996-05-06""""
-        }
-
-        sqlstr = """select glue_timstamp from iceberg_glue_types where 
glue_timstamp > '2023-03-07 20:35:59' order by glue_timstamp limit 5"""
-        order_qt_q04 """${sqlstr}""" 
-        explain {
-            sql("""${sqlstr}""")
-            contains """ref(name="glue_timstamp") > 1678192559000000"""
-        }
-    }
-}
diff --git 
a/regression-test/suites/external_table_p2/paimon/paimon_base_types.groovy 
b/regression-test/suites/external_table_p2/paimon/paimon_base_types.groovy
deleted file mode 100644
index 74994404564..00000000000
--- a/regression-test/suites/external_table_p2/paimon/paimon_base_types.groovy
+++ /dev/null
@@ -1,81 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("paimon_base_types", 
"p2,external,paimon,external_remote,external_remote_paimon") {
-    def all = """select * from all_table;"""
-    def c1 = """select * from all_table where c1=1;"""
-    def c2 = """select * from all_table where c2=2;"""
-    def c3 = """select * from all_table where c3=3;"""
-    def c4 = """select * from all_table where c4=4;"""
-    def c5 = """select * from all_table where c5=5;"""
-    def c6 = """select * from all_table where c6=6;"""
-    def c7 = """select * from all_table where c7=7;"""
-    def c8 = """select * from all_table where c8=8;"""
-    def c9 = """select * from all_table where c9<10;"""
-    def c10 = """select * from all_table where c10=10.1;"""
-    def c11 = """select * from all_table where c11=11.1;"""
-    def c12 = """select * from all_table where c12='2020-02-02';"""
-    def c13 = """select * from all_table where c13='13str';"""
-    def c14 = """select * from all_table where c14='14varchar';"""
-    def c15 = """select * from all_table where c15='a';"""
-    def c16 = """select * from all_table where c16=true;"""
-    def c18 = """select * from all_table where c18='2023-08-13 09:32:38.53';"""
-
-    String enabled = 
context.config.otherConfigs.get("enableExternalPaimonTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String catalog_name = "paimon"
-        String user_name = context.config.otherConfigs.get("extHiveHmsUser")
-        String hiveHost = context.config.otherConfigs.get("extHiveHmsHost")
-        String hivePort = context.config.otherConfigs.get("extHdfsPort")
-
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                "type" = "paimon",
-                "paimon.catalog.type" = "filesystem",
-                "warehouse" = "hdfs://${hiveHost}:${hivePort}/paimon/paimon1",
-                "hadoop.username" = "${user_name}"
-            );
-        """
-        logger.info("catalog " + catalog_name + " created")
-        sql """switch ${catalog_name};"""
-        logger.info("switched to catalog " + catalog_name)
-        sql """use db1;"""
-        logger.info("use db1")
-
-        qt_all all
-        qt_c1 c1
-        qt_c2 c2
-        qt_c3 c3
-        qt_c4 c4
-        qt_c5 c5
-        qt_c6 c6
-        qt_c7 c7
-        qt_c8 c8
-        qt_c9 c9
-        qt_c10 c10
-        qt_c11 c11
-        qt_c12 c12
-        qt_c13 c13
-        qt_c14 c14
-        qt_c15 c15
-        qt_c16 c16
-        qt_c18 c18
-
-    }
-}
-
diff --git 
a/regression-test/suites/external_table_p2/paimon/paimon_timestamp_types.groovy 
b/regression-test/suites/external_table_p2/paimon/paimon_timestamp_types.groovy
deleted file mode 100644
index 6701130b2ad..00000000000
--- 
a/regression-test/suites/external_table_p2/paimon/paimon_timestamp_types.groovy
+++ /dev/null
@@ -1,58 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("paimon_timestamp_types", 
"p2,external,paimon,external_remote,external_remote_paimon") {
-
-    def ts_orc = """select * from ts_orc"""
-    def ts_parquet = """select * from ts_parquet"""
-
-    String enabled = 
context.config.otherConfigs.get("enableExternalPaimonTest")
-    if (enabled != null && enabled.equalsIgnoreCase("enable_deprecated_case")) 
{
-        // The timestamp type of paimon has no logical or converted type,
-        // and is conflict with column type change from bigint to timestamp.
-        // Deprecated currently.
-        String catalog_name = "paimon_timestamp_catalog"
-        String user_name = context.config.otherConfigs.get("extHiveHmsUser")
-        String hiveHost = context.config.otherConfigs.get("extHiveHmsHost")
-        String hivePort = context.config.otherConfigs.get("extHdfsPort")
-
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                "type" = "paimon",
-                "paimon.catalog.type" = "filesystem",
-                "warehouse" = "hdfs://${hiveHost}:${hivePort}/paimon/paimon1",
-                "hadoop.username" = "${user_name}"
-            );
-        """
-        logger.info("catalog " + catalog_name + " created")
-        sql """switch ${catalog_name};"""
-        logger.info("switched to catalog " + catalog_name)
-        sql """use db1;"""
-        logger.info("use db1")
-
-        sql """set force_jni_scanner=true"""
-        qt_c1 ts_orc
-        qt_c2 ts_parquet
-
-        sql """set force_jni_scanner=false"""
-        qt_c3 ts_orc
-        qt_c4 ts_parquet
-
-    }
-}
-


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to