This is an automated email from the ASF dual-hosted git repository.

jasonmfehr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git

commit 80f5e3bfa398490224fbd06a170b268fe1717a51
Author: Daniel Vanko <[email protected]>
AuthorDate: Mon Aug 18 15:42:45 2025 +0200

    IMPALA-14322: Fix typo in IMPALA-12520
    
    In IMPALA-12520 affected paths were modified to test_warehouse
    (separated with underscore) instead of test-warehouse (with hyphen).
    This commit replaces the underscore to hyphen.
    
    Change-Id: I3a9737af3e6169cc0cd144df53fd35e9e2b20468
    Reviewed-on: http://gerrit.cloudera.org:8080/23304
    Reviewed-by: Daniel Becker <[email protected]>
    Tested-by: Impala Public Jenkins <[email protected]>
---
 .../queries/QueryTest/iceberg-alter-default.test           |  2 +-
 .../queries/QueryTest/iceberg-alter-v1.test                |  2 +-
 .../queries/QueryTest/iceberg-alter-v2.test                |  2 +-
 .../functional-query/queries/QueryTest/iceberg-create.test | 14 +++++++-------
 .../queries/QueryTest/iceberg-negative.test                |  2 +-
 5 files changed, 11 insertions(+), 11 deletions(-)

diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-default.test
 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-default.test
index 7b51dd6e4..3a16b4a35 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-default.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-default.test
@@ -74,7 +74,7 @@ CREATE TABLE iceberg_hadoop_catalog(
 )
 STORED AS ICEBERG
 TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
-'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE/hadoop_catalog_test');
+'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE/hadoop_catalog_test');
 ALTER TABLE iceberg_hadoop_catalog ADD COLUMNS(event_time TIMESTAMP, 
register_time DATE);
 ALTER TABLE iceberg_hadoop_catalog ADD COLUMNS(message STRING, price 
DECIMAL(8,1));
 ALTER TABLE iceberg_hadoop_catalog ADD COLUMNS(map_test MAP <STRING, array 
<STRING>>, struct_test STRUCT <f1: BIGINT, f2: BIGINT>);
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-v1.test 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-v1.test
index 9aa74288a..1fe2c3525 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-v1.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-v1.test
@@ -74,7 +74,7 @@ CREATE TABLE iceberg_hadoop_catalog(
 )
 STORED AS ICEBERG
 TBLPROPERTIES('format-version'='1', 'iceberg.catalog'='hadoop.catalog',
-'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE/hadoop_catalog_test');
+'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE/hadoop_catalog_test');
 ALTER TABLE iceberg_hadoop_catalog ADD COLUMNS(event_time TIMESTAMP, 
register_time DATE);
 ALTER TABLE iceberg_hadoop_catalog ADD COLUMNS(message STRING, price 
DECIMAL(8,1));
 ALTER TABLE iceberg_hadoop_catalog ADD COLUMNS(map_test MAP <STRING, array 
<STRING>>, struct_test STRUCT <f1: BIGINT, f2: BIGINT>);
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-v2.test 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-v2.test
index 5e512f970..024b1a9a7 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-v2.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-alter-v2.test
@@ -74,7 +74,7 @@ CREATE TABLE iceberg_hadoop_catalog(
 )
 STORED AS ICEBERG
 TBLPROPERTIES('format-version'='2', 'iceberg.catalog'='hadoop.catalog',
-'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE/hadoop_catalog_test');
+'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE/hadoop_catalog_test');
 ALTER TABLE iceberg_hadoop_catalog ADD COLUMNS(event_time TIMESTAMP, 
register_time DATE);
 ALTER TABLE iceberg_hadoop_catalog ADD COLUMNS(message STRING, price 
DECIMAL(8,1));
 ALTER TABLE iceberg_hadoop_catalog ADD COLUMNS(map_test MAP <STRING, array 
<STRING>>, struct_test STRUCT <f1: BIGINT, f2: BIGINT>);
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-create.test 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-create.test
index 451c81771..49b84ac7b 100644
--- a/testdata/workloads/functional-query/queries/QueryTest/iceberg-create.test
+++ b/testdata/workloads/functional-query/queries/QueryTest/iceberg-create.test
@@ -70,13 +70,13 @@ PARTITIONED BY SPEC
   TRUNCATE(15, level)
 )
 STORED AS ICEBERG
-LOCATION 
'$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE.iceberg_test_with_location'
+LOCATION 
'$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE.iceberg_test_with_location'
 TBLPROPERTIES('iceberg.catalog'='hadoop.tables');
 CREATE EXTERNAL TABLE iceberg_hadoop_tbls_external(
   level STRING
 )
 STORED AS ICEBERG
-LOCATION 
'$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE.iceberg_test_with_location'
+LOCATION 
'$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE.iceberg_test_with_location'
 TBLPROPERTIES('iceberg.catalog'='hadoop.tables');
 ---- RESULTS
 'Table has been created.'
@@ -97,7 +97,7 @@ STRING,BIGINT,BIGINT
 ---- QUERY
 CREATE EXTERNAL TABLE iceberg_hadoop_tbls_external_empty_col
 STORED AS ICEBERG
-LOCATION 
'$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE.iceberg_test_with_location'
+LOCATION 
'$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE.iceberg_test_with_location'
 TBLPROPERTIES('iceberg.catalog'='hadoop.tables');
 ---- RESULTS
 'Table has been created.'
@@ -137,7 +137,7 @@ PARTITIONED BY SPEC
 )
 STORED AS ICEBERG
 TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
-'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE/hadoop_catalog_test');
+'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE/hadoop_catalog_test');
 ---- RESULTS
 'Table has been created.'
 ====
@@ -171,13 +171,13 @@ PARTITIONED BY SPEC
 )
 STORED AS ICEBERG
 TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
-'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE/hadoop_catalog_test');
+'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE/hadoop_catalog_test');
 CREATE EXTERNAL TABLE iceberg_hadoop_cat_external(
   level STRING
 )
 STORED AS ICEBERG
 TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
-'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE/hadoop_catalog_test',
 'iceberg.table_identifier'='$DATABASE.iceberg_hadoop_catalog');
+'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE/hadoop_catalog_test',
 'iceberg.table_identifier'='$DATABASE.iceberg_hadoop_catalog');
 ---- RESULTS
 'Table has been created.'
 ====
@@ -476,7 +476,7 @@ create table ice_part_hadoop_catalog (
   col_identity
 ) stored as iceberg TBLPROPERTIES(
   'iceberg.catalog' = 'hadoop.catalog',
-  'iceberg.catalog_location' = 
'$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE/hadoop_catalog_test'
+  'iceberg.catalog_location' = 
'$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE/hadoop_catalog_test'
 );
 ---- RESULTS
 'Table has been created.'
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-negative.test 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-negative.test
index 2474901ca..ff9f2b5aa 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-negative.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-negative.test
@@ -266,7 +266,7 @@ CREATE TABLE iceberg_table_hadoop_catalog(
 )
 STORED AS ICEBERG
 TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
-'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test_warehouse/$DATABASE/hadoop_catalog_test');
+'iceberg.catalog_location'='$WAREHOUSE_LOCATION_PREFIX/test-warehouse/$DATABASE/hadoop_catalog_test');
 ALTER TABLE iceberg_table_hadoop_catalog RENAME TO 
iceberg_table_hadoop_catalog_new;
 ---- CATCH
 UnsupportedOperationException: Cannot rename Iceberg tables that use 
'hadoop.catalog' as catalog.

Reply via email to