This is an automated email from the ASF dual-hosted git repository.

mbutrovich pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new 48ebd2816 docs: Improve documentation on maven usage for running tests 
(#3370)
48ebd2816 is described below

commit 48ebd281695552180064c2d022f350f99cb61e2d
Author: Andy Grove <[email protected]>
AuthorDate: Thu Feb 5 07:17:51 2026 -0700

    docs: Improve documentation on maven usage for running tests (#3370)
    
    * docs: remove -pl from mvn test commands and unnecessary mvn install steps
    
    Avoid using -pl spark when running tests since it can cause Maven to
    pick up stale artifacts from the local repository. Without -pl, Maven
    builds all modules from source, eliminating the need for a separate
    mvn install step before running tests or regenerating golden files.
    
    Also documents how to run individual SQL file tests.
    
    Co-Authored-By: Claude Opus 4.5 <[email protected]>
    
    * address feedback
    
    ---------
    
    Co-authored-by: Claude Opus 4.5 <[email protected]>
---
 dev/regenerate-golden-files.sh                  | 17 +++----------
 docs/source/contributor-guide/development.md    | 33 +++++++++----------------
 docs/source/contributor-guide/sql-file-tests.md | 15 +++++++++--
 3 files changed, 28 insertions(+), 37 deletions(-)

diff --git a/dev/regenerate-golden-files.sh b/dev/regenerate-golden-files.sh
index 56bfb2456..4fe2e283f 100755
--- a/dev/regenerate-golden-files.sh
+++ b/dev/regenerate-golden-files.sh
@@ -74,16 +74,6 @@ build_native() {
     cd native && cargo build && cd ..
 }
 
-# Install Comet for a specific Spark version
-install_for_spark_version() {
-    local spark_version=$1
-    echo ""
-    echo "=============================================="
-    echo "[INFO] Installing Comet for Spark $spark_version"
-    echo "=============================================="
-    ./mvnw install -DskipTests -Pspark-$spark_version
-}
-
 # Regenerate golden files for a specific Spark version
 regenerate_golden_files() {
     local spark_version=$1
@@ -94,12 +84,12 @@ regenerate_golden_files() {
     echo "=============================================="
 
     echo "[INFO] Running CometTPCDSV1_4_PlanStabilitySuite..."
-    SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw -pl spark \
+    SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw \
         
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" \
         -Pspark-$spark_version -nsu test
 
     echo "[INFO] Running CometTPCDSV2_7_PlanStabilitySuite..."
-    SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw -pl spark \
+    SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw \
         
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" \
         -Pspark-$spark_version -nsu test
 }
@@ -158,9 +148,8 @@ main() {
         versions=("3.4" "3.5" "4.0")
     fi
 
-    # Install and regenerate for each version
+    # Regenerate for each version
     for version in "${versions[@]}"; do
-        install_for_spark_version "$version"
         regenerate_golden_files "$version"
     done
 
diff --git a/docs/source/contributor-guide/development.md 
b/docs/source/contributor-guide/development.md
index 52519e7c6..14a67ff84 100644
--- a/docs/source/contributor-guide/development.md
+++ b/docs/source/contributor-guide/development.md
@@ -191,33 +191,24 @@ Spark version, and runs the plan stability tests with 
`SPARK_GENERATE_GOLDEN_FIL
 
 Alternatively, you can run the tests manually using the following commands.
 
-First, Comet needs to be installed for each Spark version to be tested:
-
-```sh
-./mvnw install -DskipTests -Pspark-3.4
-./mvnw install -DskipTests -Pspark-3.5
-# note that Spark 4.0 requires JDK 17 or later
-./mvnw install -DskipTests -Pspark-4.0
-```
-
 Note that the output files get written to `$SPARK_HOME`.
 
 The tests can be run with:
 
 ```sh
 export SPARK_HOME=`pwd`
-./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-3.4 -nsu test
-./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-3.5 -nsu test
-./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-4.0 -nsu test
+./mvnw -Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-3.4 -nsu test
+./mvnw -Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-3.5 -nsu test
+./mvnw -Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-4.0 -nsu test
 ```
 
 and
 
 ```sh
 export SPARK_HOME=`pwd`
-./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-3.4 -nsu test
-./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-3.5 -nsu test
-./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-4.0 -nsu test
+./mvnw -Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-3.4 -nsu test
+./mvnw -Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-3.5 -nsu test
+./mvnw -Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-4.0 -nsu test
 ```
 
 If your pull request changes the query plans generated by Comet, you should 
regenerate the golden files.
@@ -225,18 +216,18 @@ To regenerate the golden files, you can run the following 
commands.
 
 ```sh
 export SPARK_HOME=`pwd`
-SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-3.4 -nsu test
-SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-3.5 -nsu test
-SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-4.0 -nsu test
+SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-3.4 -nsu test
+SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-3.5 -nsu test
+SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite" 
-Pspark-4.0 -nsu test
 ```
 
 and
 
 ```sh
 export SPARK_HOME=`pwd`
-SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-3.4 -nsu test
-SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-3.5 -nsu test
-SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw -pl spark 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-4.0 -nsu test
+SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-3.4 -nsu test
+SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-3.5 -nsu test
+SPARK_GENERATE_GOLDEN_FILES=1 ./mvnw 
-Dsuites="org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite" 
-Pspark-4.0 -nsu test
 ```
 
 ## Benchmark
diff --git a/docs/source/contributor-guide/sql-file-tests.md 
b/docs/source/contributor-guide/sql-file-tests.md
index f521cb4ad..b2dee3a3b 100644
--- a/docs/source/contributor-guide/sql-file-tests.md
+++ b/docs/source/contributor-guide/sql-file-tests.md
@@ -25,10 +25,21 @@ way to add expression and operator test coverage without 
writing Scala test code
 
 ## Running the tests
 
+Run all SQL file tests:
+
+```shell
+./mvnw test -Dsuites="org.apache.comet.CometSqlFileTestSuite" -Dtest=none
+```
+
+Run a single test file by adding the file name (without `.sql` extension) 
after the suite name:
+
 ```shell
-mvn test -pl spark -Dsuites="org.apache.comet.CometSqlFileTestSuite" 
-Dtest=none
+./mvnw test -Dsuites="org.apache.comet.CometSqlFileTestSuite 
create_named_struct" -Dtest=none
 ```
 
+This uses ScalaTest's substring matching, so the argument must match part of 
the test name.
+Test names follow the pattern `sql-file: expressions/<category>/<file>.sql 
[<config>]`.
+
 ## Test file location
 
 SQL test files live under:
@@ -208,7 +219,7 @@ SELECT space(n) FROM test_space WHERE n < 0
 6. Run the tests to verify:
 
    ```shell
-   mvn test -pl spark -Dsuites="org.apache.comet.CometSqlFileTestSuite" 
-Dtest=none
+   ./mvnw test -Dsuites="org.apache.comet.CometSqlFileTestSuite" -Dtest=none
    ```
 
 ### Tips for writing thorough tests


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to