This is an automated email from the ASF dual-hosted git repository.

agrove pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new 3b18e1ad6 chore: show line of error sql (#3390)
3b18e1ad6 is described below

commit 3b18e1ad629de8c6071a3fab13012dd55488c66b
Author: Peter Lee <[email protected]>
AuthorDate: Thu Feb 5 01:42:51 2026 +0800

    chore: show line of error sql (#3390)
---
 .../org/apache/comet/CometSqlFileTestSuite.scala   | 40 +++++++++++++---------
 .../scala/org/apache/comet/SqlFileTestParser.scala | 30 ++++++++++++----
 2 files changed, 47 insertions(+), 23 deletions(-)

diff --git a/spark/src/test/scala/org/apache/comet/CometSqlFileTestSuite.scala 
b/spark/src/test/scala/org/apache/comet/CometSqlFileTestSuite.scala
index 80ccf9255..136152ef7 100644
--- a/spark/src/test/scala/org/apache/comet/CometSqlFileTestSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometSqlFileTestSuite.scala
@@ -77,25 +77,31 @@ class CometSqlFileTestSuite extends CometTestBase with 
AdaptiveSparkPlanHelper {
     "spark.sql.optimizer.excludedRules" ->
       "org.apache.spark.sql.catalyst.optimizer.ConstantFolding")
 
-  private def runTestFile(file: SqlTestFile): Unit = {
+  private def runTestFile(relativePath: String, file: SqlTestFile): Unit = {
     val allConfigs = file.configs ++ constantFoldingExcluded
     withSQLConf(allConfigs: _*) {
       withTable(file.tables: _*) {
         file.records.foreach {
-          case SqlStatement(sql) =>
-            spark.sql(sql)
-          case SqlQuery(sql, mode) =>
-            mode match {
-              case CheckCoverageAndAnswer =>
-                checkSparkAnswerAndOperator(sql)
-              case SparkAnswerOnly =>
-                checkSparkAnswer(sql)
-              case WithTolerance(tol) =>
-                checkSparkAnswerWithTolerance(sql, tol)
-              case ExpectFallback(reason) =>
-                checkSparkAnswerAndFallbackReason(sql, reason)
-              case Ignore(reason) =>
-                logInfo(s"IGNORED query (${reason}): $sql")
+          case SqlStatement(sql, line) =>
+            val location = if (line > 0) s"$relativePath:$line" else 
relativePath
+            withClue(s"In SQL file $location, executing statement:\n$sql\n") {
+              spark.sql(sql)
+            }
+          case SqlQuery(sql, mode, line) =>
+            val location = if (line > 0) s"$relativePath:$line" else 
relativePath
+            withClue(s"In SQL file $location, executing query:\n$sql\n") {
+              mode match {
+                case CheckCoverageAndAnswer =>
+                  checkSparkAnswerAndOperator(sql)
+                case SparkAnswerOnly =>
+                  checkSparkAnswer(sql)
+                case WithTolerance(tol) =>
+                  checkSparkAnswerWithTolerance(sql, tol)
+                case ExpectFallback(reason) =>
+                  checkSparkAnswerAndFallbackReason(sql, reason)
+                case Ignore(reason) =>
+                  logInfo(s"IGNORED query (${reason}): $sql")
+              }
             }
         }
       }
@@ -118,7 +124,7 @@ class CometSqlFileTestSuite extends CometTestBase with 
AdaptiveSparkPlanHelper {
           logInfo(s"SKIPPED (requires Spark ${parsed.minSparkVersion.get}): 
$relativePath")
         } else {
           val effectiveConfigs = parsed.configs ++ 
combinations.headOption.getOrElse(Seq.empty)
-          runTestFile(parsed.copy(configs = effectiveConfigs))
+          runTestFile(relativePath, parsed.copy(configs = effectiveConfigs))
         }
       }
     } else {
@@ -129,7 +135,7 @@ class CometSqlFileTestSuite extends CometTestBase with 
AdaptiveSparkPlanHelper {
           if (skip) {
             logInfo(s"SKIPPED (requires Spark ${parsed.minSparkVersion.get}): 
$relativePath")
           } else {
-            runTestFile(parsed.copy(configs = parsed.configs ++ matrixConfigs))
+            runTestFile(relativePath, parsed.copy(configs = parsed.configs ++ 
matrixConfigs))
           }
         }
       }
diff --git a/spark/src/test/scala/org/apache/comet/SqlFileTestParser.scala 
b/spark/src/test/scala/org/apache/comet/SqlFileTestParser.scala
index 62a349cde..7a98fd57b 100644
--- a/spark/src/test/scala/org/apache/comet/SqlFileTestParser.scala
+++ b/spark/src/test/scala/org/apache/comet/SqlFileTestParser.scala
@@ -26,11 +26,27 @@ import scala.io.Source
 /** A record in a SQL test file: either a statement (DDL/DML) or a query 
(SELECT). */
 sealed trait SqlTestRecord
 
-/** A SQL statement to execute (CREATE TABLE, INSERT, etc.). */
-case class SqlStatement(sql: String) extends SqlTestRecord
+/**
+ * A SQL statement to execute (CREATE TABLE, INSERT, etc.).
+ *
+ * @param sql
+ *   The SQL text.
+ * @param line
+ *   1-based line number in the original .sql file where the statement starts.
+ */
+case class SqlStatement(sql: String, line: Int) extends SqlTestRecord
 
-/** A SQL query whose results are compared between Spark and Comet. */
-case class SqlQuery(sql: String, mode: QueryAssertionMode = 
CheckCoverageAndAnswer)
+/**
+ * A SQL query whose results are compared between Spark and Comet.
+ *
+ * @param sql
+ *   The SQL text.
+ * @param mode
+ *   How to validate the query.
+ * @param line
+ *   1-based line number in the original .sql file where the query starts.
+ */
+case class SqlQuery(sql: String, mode: QueryAssertionMode = 
CheckCoverageAndAnswer, line: Int)
     extends SqlTestRecord
 
 sealed trait QueryAssertionMode
@@ -103,17 +119,19 @@ object SqlFileTestParser {
 
         case "statement" =>
           lineIdx += 1
+          val startLine = lineIdx + 1
           val (sql, nextIdx) = collectSql(lines, lineIdx)
           // Extract table names for cleanup
           CreateTablePattern.findFirstMatchIn(sql).foreach(m => tables += 
m.group(1))
-          records += SqlStatement(sql)
+          records += SqlStatement(sql, startLine)
           lineIdx = nextIdx
 
         case s if s.startsWith("query") =>
           val mode = parseQueryAssertionMode(s)
           lineIdx += 1
+          val startLine = lineIdx + 1
           val (sql, nextIdx) = collectSql(lines, lineIdx)
-          records += SqlQuery(sql, mode)
+          records += SqlQuery(sql, mode, startLine)
           lineIdx = nextIdx
 
         case _ =>


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to