This is an automated email from the ASF dual-hosted git repository. morningman pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push: new fef520c6176 [regression](catalog)Add test case of paimon complex type (#25834) fef520c6176 is described below commit fef520c61764bd13a2b9eb44cded11541509ec7b Author: DongLiang-0 <46414265+donglian...@users.noreply.github.com> AuthorDate: Wed Nov 1 09:59:55 2023 +0800 [regression](catalog)Add test case of paimon complex type (#25834) Add Paimon complex nested type regression case. Related pr:#25364 --- docs/en/docs/lakehouse/multi-catalog/paimon.md | 25 +- docs/zh-CN/docs/lakehouse/multi-catalog/paimon.md | 25 +- .../paimon/test_paimon_catalog.out | 370 +++++++++++++++++++++ .../paimon/test_paimon_catalog.groovy | 153 +++++++++ 4 files changed, 565 insertions(+), 8 deletions(-) diff --git a/docs/en/docs/lakehouse/multi-catalog/paimon.md b/docs/en/docs/lakehouse/multi-catalog/paimon.md index 062565fba6f..29c6a364e38 100644 --- a/docs/en/docs/lakehouse/multi-catalog/paimon.md +++ b/docs/en/docs/lakehouse/multi-catalog/paimon.md @@ -33,7 +33,7 @@ under the License. ## Instructions for use 1. When data in hdfs,need to put core-site.xml, hdfs-site.xml and hive-site.xml in the conf directory of FE and BE. First read the hadoop configuration file in the conf directory, and then read the related to the environment variable `HADOOP_CONF_DIR` configuration file. -2. The currently adapted version of the payment is 0.4.0 +2. The currently adapted version of the payment is 0.5.0 ## Create Catalog @@ -64,7 +64,7 @@ CREATE CATALOG `paimon_hdfs` PROPERTIES ( > Note that. > -> user need download [paimon-s3-0.4.0-incubating.jar](https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-s3/0.4.0-incubating/paimon-s3-0.4.0-incubating.jar) +> user need download [paimon-s3-0.5.0-incubating.jar](https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-s3/0.5.0-incubating/paimon-s3-0.5.0-incubating.jar) > > Place it in directory > ${DORIS_HOME}/be/lib/java_extensions/preload-extensions and restart be > @@ -85,7 +85,7 @@ CREATE CATALOG `paimon_s3` PROPERTIES ( >Note that. > -> user need download [paimon-oss-0.4.0-incubating.jar](https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-oss/0.4.0-incubating/paimon-oss-0.4.0-incubating.jar) +> user need download [paimon-oss-0.5.0-incubating.jar](https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-oss/0.5.0-incubating/paimon-oss-0.5.0-incubating.jar) > Place it in directory > ${DORIS_HOME}/be/lib/java_extensions/preload-extensions and restart be @@ -120,4 +120,21 @@ CREATE CATALOG `paimon_hms` PROPERTIES ( ## Column Type Mapping -Same as that in Hive Catalogs. See the relevant section in [Hive](./hive.md). +| Paimon Data Type | Doris Data Type | Comment | +|---------------------------------------|---------------------------|-----------| +| BooleanType | Boolean | | +| TinyIntType | TinyInt | | +| SmallIntType | SmallInt | | +| IntType | Int | | +| FloatType | Float | | +| BigIntType | BigInt | | +| DoubleType | Double | | +| VarCharType | VarChar | | +| CharType | Char | | +| DecimalType(precision, scale) | Decimal(precision, scale) | | +| TimestampType,LocalZonedTimestampType | DateTime | | +| DateType | Date | | +| MapType | Map | Support Map nesting | +| ArrayType | Array | Support Array nesting | +| VarBinaryType, BinaryType | Binary | | + diff --git a/docs/zh-CN/docs/lakehouse/multi-catalog/paimon.md b/docs/zh-CN/docs/lakehouse/multi-catalog/paimon.md index 90443fef6f9..ab11cc32c7b 100644 --- a/docs/zh-CN/docs/lakehouse/multi-catalog/paimon.md +++ b/docs/zh-CN/docs/lakehouse/multi-catalog/paimon.md @@ -33,7 +33,7 @@ under the License. ## 使用须知 1. 数据放在hdfs时,需要将 core-site.xml,hdfs-site.xml 和 hive-site.xml 放到 FE 和 BE 的 conf 目录下。优先读取 conf 目录下的 hadoop 配置文件,再读取环境变量 `HADOOP_CONF_DIR` 的相关配置文件。 -2. 当前适配的paimon版本为0.4.0 +2. 当前适配的paimon版本为0.5.0 ## 创建 Catalog @@ -64,7 +64,7 @@ CREATE CATALOG `paimon_hdfs` PROPERTIES ( > 注意: > -> 用户需要手动下载[paimon-s3-0.4.0-incubating.jar](https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-s3/0.4.0-incubating/paimon-s3-0.4.0-incubating.jar) +> 用户需要手动下载[paimon-s3-0.5.0-incubating.jar](https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-s3/0.5.0-incubating/paimon-s3-0.5.0-incubating.jar) > 放在${DORIS_HOME}/be/lib/java_extensions/preload-extensions目录下并重启be。 > @@ -85,7 +85,7 @@ CREATE CATALOG `paimon_s3` PROPERTIES ( >注意: > -> 用户需要手动下载[paimon-oss-0.4.0-incubating.jar](https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-oss/0.4.0-incubating/paimon-oss-0.4.0-incubating.jar) +> 用户需要手动下载[paimon-oss-0.5.0-incubating.jar](https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-oss/0.5.0-incubating/paimon-oss-0.5.0-incubating.jar) > 放在${DORIS_HOME}/be/lib/java_extensions/preload-extensions目录下并重启be ```sql @@ -119,5 +119,22 @@ CREATE CATALOG `paimon_hms` PROPERTIES ( ## 列类型映射 -和 Hive Catalog 基本一致,可参阅 [Hive Catalog](./hive.md) 中 **列类型映射** 一节。 +| Paimon Data Type | Doris Data Type | Comment | +|---------------------------------------|---------------------------|-----------| +| BooleanType | Boolean | | +| TinyIntType | TinyInt | | +| SmallIntType | SmallInt | | +| IntType | Int | | +| FloatType | Float | | +| BigIntType | BigInt | | +| DoubleType | Double | | +| VarCharType | VarChar | | +| CharType | Char | | +| DecimalType(precision, scale) | Decimal(precision, scale) | | +| TimestampType,LocalZonedTimestampType | DateTime | | +| DateType | Date | | +| MapType | Map | 支持Map嵌套 | +| ArrayType | Array | 支持Array嵌套 | +| VarBinaryType, BinaryType | Binary | | + diff --git a/regression-test/data/external_table_p0/paimon/test_paimon_catalog.out b/regression-test/data/external_table_p0/paimon/test_paimon_catalog.out index bc1a15cb369..2c8a3d5cd26 100644 --- a/regression-test/data/external_table_p0/paimon/test_paimon_catalog.out +++ b/regression-test/data/external_table_p0/paimon/test_paimon_catalog.out @@ -84,3 +84,373 @@ true \N \N \N 6666 9999 +-- !c27 -- +\N [1, NULL, NULL] [1, NULL, NULL, NULL] [4, NULL, NULL] [NULL, NULL] [NULL, 12, 13] [NULL, 15] [NULL, NULL] [18, 19, 20] [21.11, 22, 22] [NULL] [NULL] [NULL, NULL, 2023-10-24] ["13str", "arr_string"] ["14varchar", "arr_varcha"] [NULL, NULL] [NULL, NULL] [NULL, NULL, NULL] [NULL, NULL] {1:NULL, 0:NULL} {1:NULL, 0:NULL} {1:NULL, 2:NULL} {1:6, 0:NULL} {1:7} {0:NULL, 1:20} {1:NULL} {1:NULL} {0:21.11, 1:22.22} {1:23.333} {0:24.44, 1:NULL} {1:NULL, 4:2023-02-03} {0:"byte_string"} {0:"aa", 1:" [...] +1 [1, 0] [1, 2, 3] [4, 5, 6] [7, 8, 9] [10, 11, 12, 13] [14, 15] [16, 17] [18, 19, 20] [21.11, 22, 22] [23.333] [24.44] [2020-02-02, 2023-10-24] ["13str", "arr_string"] ["14varchar", "arr_varcha"] ["a", "b"] [1, 0, 1] ["aaaa", "bbb"] [2023-10-24 16:10:40.529, 2023-10-24 16:10:40.529] {1:0, 0:1} {0:2, 1:3} {1:4, 0:5} {1:6, 0:9} {1:7} {0:19, 1:20} {1:21} {1:22} {0:21.11, 1:22.22} {1:23.333} {0:24.44, 1:0.00} {1:2021-02-03, 4:2023-02-03} {0:"byte_string"} {0:"aa", 1:"bb"} {1:"o", 0:"k"} {1: [...] +2 [1, NULL, 0] [1, NULL, 2, 3] [4, 5, 6] [NULL, NULL] [NULL, 12, 13] [14, 15] [NULL, NULL] [18, 19, 20] [21.11, 22, 22] [23.333] [NULL, 0.00] [2020-02-02, NULL, 2023-10-24] ["13str", "arr_string"] ["14varchar", "arr_varcha"] ["a", "b"] [1, 0, 1] ["aaaa", "bbb", NULL, NULL] [2023-10-24 18:19:51.670, NULL, 2023-10-24 18:19:51.670] {1:NULL, 0:NULL} {1:NULL, 0:NULL} {1:4, 2:NULL} {1:6, 0:9} {1:7} {0:19, 1:20} {1:21} {1:22} {0:21.11, 1:22.22} {1:23.333} {0:25.55, 1:0.00} {1:NULL, 4:2023-02-03 [...] + +-- !c28 -- +0 +0 +1 + +-- !c29 -- +1 +1 +1 + +-- !c30 -- +4 +4 +4 + +-- !c31 -- +7 +\N +\N + +-- !c32 -- +10 +12 +12 + +-- !c33 -- +14 +14 +15 + +-- !c34 -- +16 +\N +\N + +-- !c35 -- +18 +18 +18 + +-- !c36 -- +21.11 +21.11 +21.11 + +-- !c37 -- +23.333 +23.333 +\N + +-- !c38 -- +24.44 +0.00 +\N + +-- !c39 -- +2023-10-24 +2023-10-24 +2023-10-24 + +-- !c40 -- +2 +2 +2 + +-- !c41 -- +2 +2 +2 + +-- !c42 -- +2 +2 +2 + +-- !c43 -- +true +true +\N + +-- !c44 -- +2 +4 +3 + +-- !c45 -- +2023-10-24T16:10:40.529 +2023-10-24T18:19:51.670 +\N + +-- !c46 -- +1 +\N +\N + +-- !c47 -- +2 +\N +\N + +-- !c48 -- +5 +\N +\N + +-- !c49 -- +9 +9 +\N + +-- !c50 -- +7 +7 +7 + +-- !c51 -- +19 +19 +\N + +-- !c52 -- +\N +\N +\N + +-- !c53 -- +\N +\N +\N + +-- !c54 -- +21.11 +21.11 +21.11 + +-- !c55 -- +\N +\N +\N + +-- !c56 -- +24.44 +25.55 +24.44 + +-- !c57 -- +\N +\N +\N + +-- !c58 -- +byte_string +byte_string +byte_string + +-- !c59 -- +aa +aa +aa + +-- !c60 -- +k +k +k + +-- !c61 -- +true +true +true + +-- !c62 -- +bbb +bbb +bbb + +-- !c63 -- +2023-10-24T16:10:40.532 +2023-10-24T18:19:51.672 +2023-10-24T18:20:16.600 + +-- !c64 -- +1 +\N +\N + +-- !c65 -- +3 +3 +3 + +-- !c66 -- +4 +4 +4 + +-- !c67 -- +6 +\N +\N + +-- !c68 -- +7 +\N +\N + +-- !c69 -- +19 +19 +19 + +-- !c70 -- +21 +21 +21 + +-- !c71 -- +22 +22 +\N + +-- !c72 -- +21.11 +21.11 +21.11 + +-- !c73 -- +23.333 +23.333 +23.333 + +-- !c74 -- +24.44 +25.55 +24.44 + +-- !c75 -- +2021-02-03 +2021-02-03 +2021-02-03 + +-- !c76 -- +byte_string +byte_string +byte_string + +-- !c77 -- +aa +aa +aa + +-- !c78 -- +o +o +\N + +-- !c79 -- +true +\N +\N + +-- !c80 -- +bbb +bbb +bbb + +-- !c80 -- +2023-10-24T16:10:40.533 +2023-10-24T18:19:51.673 +\N + +-- !c80 -- +1 +1 +1 + +-- !c80 -- +3 +3 +3 + +-- !c80 -- +4 +4 +4 + +-- !c80 -- +6 +6 +6 + +-- !c80 -- +7 +7 +7 + +-- !c80 -- +19 +19 +19 + +-- !c80 -- +21 +\N +\N + +-- !c80 -- +22 +22 +22 + +-- !c90 -- +21.11 +21.11 +21.11 + +-- !c91 -- +23.333 +23.333 +23.333 + +-- !c92 -- +24.44 +25.55 +24.44 + +-- !c93 -- +2021-02-03 +2021-02-03 +2021-02-03 + +-- !c94 -- +byte_string +byte_string +byte_string + +-- !c95 -- +aa +aa +aa + +-- !c96 -- +o +o +o + +-- !c97 -- +true +true +true + +-- !c98 -- +bbb +bbb +bbb + +-- !c99 -- +2023-10-24T16:10:40.533 +2023-10-24T18:19:51.673 +\N + +-- !c100 -- +1 [[0, 1, 1], [0, 1, 1], [1, 1, 1]] [["3", "7", "a"], ["0", "e", "2"], ["b", "4", "5"]] [["77", "83", "1c"], ["af", "f0", "0b"], ["be", "d2", "7b"]] [["4a3aee3c345936f7a6eeb5307fc79d5f9ee3ae3a5e58edcfd4d40ec3d27353bf7b15a2eb6b6011e9ddf2c2cd1d6d50d6cc20", "00cb283c1a2bb2f3f244f89eafb79effd8aea55d5e68ce9d707250a3b47b65c0ea2b591b7145a56c801b9e6bda853e2f0581", "ae8fbe21e20f32a486757b5254faea09906bf451e096cc7d33b5a6fb56995c1601e7469a674f5e8475a2b86d2a69e02f9438"], ["6827f8e65869d476a9e2fdfa03 [...] +2 [[0, 0, 0], [1, 0, 0], [0, 1, 1]] [["a", "5", "f"], ["7", "2", "3"], ["1", "b", "f"]] [["11", "a8", "e3"], ["f4", "ee", "c3"], ["0f", "c6", "05"]] [["b723869515b24e9fbb54503f8a7584f083479998766213b784a9c530cbc0376bd5035054c657437251b85fa3dd41a0483776", "ee780d81d3e9faa36aad06522a09cf9b18e00614c991d2c079243dcc7190f3dd6559e75e2c1992270272d9a9c01e950c7bd4", "6e825a52cdae65786801caae53182956c80f88a48fa258a90d2c93302023c78b83f1dce758615a74731c9eef993a8c1dc4ad"], ["2ae97869372970f7ad7d0007a1 [...] +3 [[1, 0, 0], [1, 0, 1], [1, 0, 1]] [["c", "6", "4"], ["1", "e", "7"], ["7", "1", "d"]] [["50", "a9", "b0"], ["02", "bf", "3a"], ["0a", "1d", "9a"]] [["2df05ba6ce8661f0fb9bc88386a1ba67188e3e99a4142a0703e1cd8bdf041fbc20131e50bea2a9891498c638ebac842d3d46", "f6fecf3a4263ed8d4c5b63e4b3d9f4084b83835d9fba2046bf48d0a8068f2044c48271a1e9726741a09badea72c37cf18de3", "d23af3266db4eda12673f5c451d36343ba1cea00fbfeeff2165de40e834778eb96a1199cb523dd394b4f08824f6af2a7d894"], ["dc00e9b27b9540e170caf93805 [...] + diff --git a/regression-test/suites/external_table_p0/paimon/test_paimon_catalog.groovy b/regression-test/suites/external_table_p0/paimon/test_paimon_catalog.groovy index 0b9b35d7aa5..03603fda0de 100644 --- a/regression-test/suites/external_table_p0/paimon/test_paimon_catalog.groovy +++ b/regression-test/suites/external_table_p0/paimon/test_paimon_catalog.groovy @@ -81,6 +81,85 @@ suite("test_paimon_catalog", "p0,external,doris,external_docker,external_docker_ def c26 = """select array_max(c2) from complex_tab""" def c25 = """select c3['a_test'], c3['b_test'], c3['bbb'], c3['ccc'] from complex_tab""" + def c27 = """select * from complex_all order by c1;""" + def c28 = """select array_min(c2) from complex_all""" + def c29 = """select array_min(c3) from complex_all""" + def c30= """select array_min(c4) from complex_all""" + def c31= """select array_min(c5) from complex_all""" + def c32= """select array_min(c6) from complex_all""" + def c33= """select array_min(c7) from complex_all""" + def c34= """select array_min(c8) from complex_all""" + def c35= """select array_min(c9) from complex_all""" + def c36= """select array_min(c10) from complex_all""" + def c37= """select array_max(c11) from complex_all""" + def c38= """select array_max(c12) from complex_all""" + def c39= """select array_max(c13) from complex_all""" + def c40= """select array_size(c14) from complex_all""" + def c41= """select array_size(c15) from complex_all""" + def c42= """select array_size(c16) from complex_all""" + def c43= """select array_max(c17) from complex_all""" + def c44= """select array_size(c18) from complex_all""" + def c45= """select array_max(c19) from complex_all""" + + def c46= """select c20[0] from complex_all""" + def c47= """select c21[0] from complex_all""" + def c48= """select c22[0] from complex_all""" + def c49= """select c23[0] from complex_all""" + def c50= """select c24[1] from complex_all""" + def c51= """select c25[0] from complex_all""" + def c52= """select c26[0] from complex_all""" + def c53= """select c27[0] from complex_all""" + def c54= """select c28[0] from complex_all""" + def c55= """select c29[0] from complex_all""" + def c56= """select c30[0] from complex_all""" + def c57= """select c31[0] from complex_all""" + def c58= """select c32[0] from complex_all""" + def c59= """select c33[0] from complex_all""" + def c60= """select c34[0] from complex_all""" + def c61= """select c35[0] from complex_all""" + def c62= """select c36[0] from complex_all""" + def c63= """select c37[0] from complex_all""" + + def c64= """select c38[2] from complex_all""" + def c65= """select c39[4] from complex_all""" + def c66= """select c40[5] from complex_all;""" + def c67= """select c41[7] from complex_all;""" + def c68= """select c42[9] from complex_all""" + def c69= """select c43[10] from complex_all""" + def c70= """select c44[12] from complex_all""" + def c71= """select c45[13] from complex_all""" + def c72= """select c46[14] from complex_all;""" + def c73= """select c47[16] from complex_all;""" + def c74= """select c48[17] from complex_all;""" + def c75= """select c49[19] from complex_all;""" + def c76= """select c50[21] from complex_all;""" + def c77= """select c51[22] from complex_all;""" + def c78= """select c52[25] from complex_all;""" + def c79= """select c53[27] from complex_all;""" + def c80= """select c54[29] from complex_all;""" + def c81= """select c55[30] from complex_all;""" + + def c82= """select c56[2] from complex_all""" + def c83= """select c57[4] from complex_all""" + def c84= """select c58[5] from complex_all;""" + def c85= """select c59[7] from complex_all;""" + def c86= """select c60[9] from complex_all""" + def c87= """select c61[10] from complex_all""" + def c88= """select c62[12] from complex_all""" + def c89= """select c63[13] from complex_all""" + def c90= """select c64[14] from complex_all;""" + def c91= """select c65[16] from complex_all;""" + def c92= """select c66[17] from complex_all;""" + def c93= """select c67[19] from complex_all;""" + def c94= """select c68[21] from complex_all;""" + def c95= """select c69[22] from complex_all;""" + def c96= """select c70[25] from complex_all;""" + def c97= """select c71[27] from complex_all;""" + def c98= """select c72[29] from complex_all;""" + def c99= """select c73[30] from complex_all;""" + + def c100= """select * from array_nested order by c1;""" + String hdfs_port = context.config.otherConfigs.get("hdfs_port") String catalog_name = "paimon1" String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") @@ -119,5 +198,79 @@ suite("test_paimon_catalog", "p0,external,doris,external_docker,external_docker_ qt_c24 c24 qt_c25 c25 qt_c26 c26 + qt_c27 c27 + qt_c28 c28 + qt_c29 c29 + qt_c30 c30 + qt_c31 c31 + qt_c32 c32 + qt_c33 c33 + qt_c34 c34 + qt_c35 c35 + qt_c36 c36 + qt_c37 c37 + qt_c38 c38 + qt_c39 c39 + qt_c40 c40 + qt_c41 c41 + qt_c42 c42 + qt_c43 c43 + qt_c44 c44 + qt_c45 c45 + qt_c46 c46 + qt_c47 c47 + qt_c48 c48 + qt_c49 c49 + qt_c50 c50 + qt_c51 c51 + qt_c52 c52 + qt_c53 c53 + qt_c54 c54 + qt_c55 c55 + qt_c56 c56 + qt_c57 c57 + qt_c58 c58 + qt_c59 c59 + qt_c60 c60 + qt_c61 c61 + qt_c62 c62 + qt_c63 c63 + qt_c64 c64 + qt_c65 c65 + qt_c66 c66 + qt_c67 c67 + qt_c68 c68 + qt_c69 c69 + qt_c70 c70 + qt_c71 c71 + qt_c72 c72 + qt_c73 c73 + qt_c74 c74 + qt_c75 c75 + qt_c76 c76 + qt_c77 c77 + qt_c78 c78 + qt_c79 c79 + qt_c80 c80 + qt_c80 c81 + qt_c80 c82 + qt_c80 c83 + qt_c80 c84 + qt_c80 c85 + qt_c80 c86 + qt_c80 c87 + qt_c80 c88 + qt_c80 c89 + qt_c90 c90 + qt_c91 c91 + qt_c92 c92 + qt_c93 c93 + qt_c94 c94 + qt_c95 c95 + qt_c96 c96 + qt_c97 c97 + qt_c98 c98 + qt_c99 c99 + qt_c100 c100 } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org