This is an automated email from the ASF dual-hosted git repository.

joemcdonnell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git


The following commit(s) were added to refs/heads/master by this push:
     new d5f7cc3a0 IMPALA-14138: Addendum test fix
d5f7cc3a0 is described below

commit d5f7cc3a0b39e106ef7c090338e5ab1afdef8617
Author: Zoltan Borok-Nagy <[email protected]>
AuthorDate: Fri Jul 18 12:29:29 2025 +0200

    IMPALA-14138: Addendum test fix
    
    This patch moves out a query from no-block-locations.test to only run
    it on HDFS because the queried Iceberg table has Iceberg delete files
    that contain HDFS-specific URIs.
    
    Change-Id: Iea862dd3b73a9aceceeb848d0ac85ac87627c8c2
    Reviewed-on: http://gerrit.cloudera.org:8080/23189
    Reviewed-by: Daniel Becker <[email protected]>
    Reviewed-by: Csaba Ringhofer <[email protected]>
    Tested-by: Impala Public Jenkins <[email protected]>
---
 .../queries/QueryTest/no-block-locations-hdfs-only.test   | 15 +++++++++++++++
 .../queries/QueryTest/no-block-locations.test             | 12 ------------
 tests/custom_cluster/test_disabled_block_locations.py     |  3 +++
 3 files changed, 18 insertions(+), 12 deletions(-)

diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/no-block-locations-hdfs-only.test
 
b/testdata/workloads/functional-query/queries/QueryTest/no-block-locations-hdfs-only.test
new file mode 100644
index 000000000..b3fc8b3d1
--- /dev/null
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/no-block-locations-hdfs-only.test
@@ -0,0 +1,15 @@
+====
+---- QUERY
+# The following query needs to run against HDFS because 
iceberg_lineitem_sixblocks
+# contains position delete files that contain HDFS-specific URIs.
+# 'iceberg_lineitem_sixblocks' contains a single data file with six HDFS 
blocks. Without
+# block information we schedule the whole data file to a single SCAN operator.
+select count(*) from functional_parquet.iceberg_lineitem_sixblocks where 
l_orderkey % 2 = 0;
+---- RESULTS
+9805
+---- TYPES
+BIGINT
+---- RUNTIME_PROFILE
+# The following should be in the ExecSummary
+row_regex: 00:SCAN [A-Z0-9]+ +1 +1 +.*
+====
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/no-block-locations.test 
b/testdata/workloads/functional-query/queries/QueryTest/no-block-locations.test
index e63848251..9df5d0c94 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/no-block-locations.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/no-block-locations.test
@@ -36,15 +36,3 @@ BIGINT
 # The following should be in the ExecSummary
 row_regex: 00:SCAN [A-Z0-9]+ +1 +1 +.*
 ====
----- QUERY
-# 'iceberg_lineitem_sixblocks' contains a single data file with six HDFS 
blocks. Without
-# block information we schedule the whole data file to a single SCAN operator.
-select count(*) from functional_parquet.iceberg_lineitem_sixblocks where 
l_orderkey % 2 = 0;
----- RESULTS
-9805
----- TYPES
-BIGINT
----- RUNTIME_PROFILE
-# The following should be in the ExecSummary
-row_regex: 00:SCAN [A-Z0-9]+ +1 +1 +.*
-====
diff --git a/tests/custom_cluster/test_disabled_block_locations.py 
b/tests/custom_cluster/test_disabled_block_locations.py
index 504faac06..172a175ed 100644
--- a/tests/custom_cluster/test_disabled_block_locations.py
+++ b/tests/custom_cluster/test_disabled_block_locations.py
@@ -18,6 +18,7 @@
 from __future__ import absolute_import, division, print_function
 import pytest
 from os import getenv
+from tests.util.filesystem_utils import IS_HDFS
 
 from tests.common.custom_cluster_test_suite import CustomClusterTestSuite
 
@@ -34,3 +35,5 @@ class TestDisabledBlockLocations(CustomClusterTestSuite):
   @CustomClusterTestSuite.with_args(custom_core_site_dir=CORE_SITE_CONFIG_DIR)
   def test_no_block_locations(self, vector):
     self.run_test_case('QueryTest/no-block-locations', vector)
+    if IS_HDFS:
+      self.run_test_case('QueryTest/no-block-locations-hdfs-only', vector)

Reply via email to