This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 439ad309f4a3 [SPARK-55826][SQL] Rename `_LEGACY_ERROR_TEMP_0006` to 
`MERGE_INSERT_VALUE_COUNT_MISMATCH`
439ad309f4a3 is described below

commit 439ad309f4a3b6fda6a1049cee49d65be62b9e44
Author: ilicmarkodb <[email protected]>
AuthorDate: Thu Mar 5 12:06:35 2026 +0800

    [SPARK-55826][SQL] Rename `_LEGACY_ERROR_TEMP_0006` to 
`MERGE_INSERT_VALUE_COUNT_MISMATCH`
    
    ### What changes were proposed in this pull request?
    
    Rename the legacy error class `_LEGACY_ERROR_TEMP_0006` to 
`MERGE_INSERT_VALUE_COUNT_MISMATCH` and add SQL state `21S01`.
    
    This error is thrown when the number of inserted values does not match the 
number of fields in a MERGE NOT MATCHED INSERT clause.
    
    ### Why are the changes needed?
    Proper error messaging.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes. The error class name changes from `_LEGACY_ERROR_TEMP_0006` to 
`MERGE_INSERT_VALUE_COUNT_MISMATCH`, and the SQL state `21S01` is now included. 
The error message text remains identical.
    
    ### How was this patch tested?
    
    Existing tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Yes, co-authored with Claude Code.
    
    Closes #54610 from ilicmarkodb/rename_LEGACY_ERROR_TEMP_0006.
    
    Authored-by: ilicmarkodb <[email protected]>
    Signed-off-by: Wenchen Fan <[email protected]>
---
 .../utils/src/main/resources/error/error-conditions.json  | 11 ++++++-----
 .../org/apache/spark/sql/errors/QueryParsingErrors.scala  | 11 +++++++++--
 .../org/apache/spark/sql/catalyst/parser/AstBuilder.scala |  3 ++-
 .../apache/spark/sql/catalyst/parser/DDLParserSuite.scala | 15 +++++++++++++++
 4 files changed, 32 insertions(+), 8 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index 3e99d14baeae..b5b4115bf746 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -4544,6 +4544,12 @@
     ],
     "sqlState" : "23K01"
   },
+  "MERGE_INSERT_VALUE_COUNT_MISMATCH" : {
+    "message" : [
+      "The number of provided values (<actualCount>) must match the number of 
expected columns (<expectedCount>) in the INSERT clause of MERGE."
+    ],
+    "sqlState" : "21S01"
+  },
   "MERGE_WITHOUT_WHEN" : {
     "message" : [
       "There must be at least one WHEN clause in a MERGE statement."
@@ -7925,11 +7931,6 @@
       "Empty source for merge: you should specify a source table/subquery in 
merge."
     ]
   },
-  "_LEGACY_ERROR_TEMP_0006" : {
-    "message" : [
-      "The number of inserted values cannot match the fields."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_0012" : {
     "message" : [
       "DISTRIBUTE BY is not supported."
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index 76a20f1d4d83..67f89e996da2 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -55,8 +55,15 @@ private[sql] object QueryParsingErrors extends 
DataTypeErrorsBase {
     new ParseException(errorClass = "_LEGACY_ERROR_TEMP_0004", ctx.source)
   }
 
-  def insertedValueNumberNotMatchFieldNumberError(ctx: 
NotMatchedClauseContext): Throwable = {
-    new ParseException(errorClass = "_LEGACY_ERROR_TEMP_0006", 
ctx.notMatchedAction())
+  def insertedValueNumberNotMatchColumnNumberError(
+      expectedCount: Int,
+      actualCount: Int,
+      ctx: NotMatchedClauseContext): Throwable = {
+    new ParseException(
+      errorClass = "MERGE_INSERT_VALUE_COUNT_MISMATCH",
+      messageParameters =
+        Map("expectedCount" -> expectedCount.toString, "actualCount" -> 
actualCount.toString),
+      ctx.notMatchedAction())
   }
 
   def mergeStatementWithoutWhenClauseError(ctx: MergeIntoTableContext): 
Throwable = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 49e6373a9c6b..a12add91f10e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -1127,7 +1127,8 @@ class AstBuilder extends DataTypeAstBuilder
                 .asScala.map(attr => 
UnresolvedAttribute(visitMultipartIdentifier(attr)))
             val values = 
clause.notMatchedAction().expression().asScala.map(expression)
             if (columns.size != values.size) {
-              throw 
QueryParsingErrors.insertedValueNumberNotMatchFieldNumberError(clause)
+              throw 
QueryParsingErrors.insertedValueNumberNotMatchColumnNumberError(
+                columns.size, values.size, clause)
             }
             InsertAction(condition, columns.zip(values).map(kv => 
Assignment(kv._1, kv._2)).toSeq)
           }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index f07e7edff72d..3e216d7ef5c6 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -2289,6 +2289,21 @@ class DDLParserSuite extends AnalysisTest {
         stop = 106))
   }
 
+  test("merge into table: inserted value count must match field count") {
+    val sqlText =
+      """MERGE INTO t1 USING t2 ON t1.id = t2.id
+        |WHEN NOT MATCHED THEN INSERT (col1, col2) VALUES (1)""".stripMargin
+    checkError(
+      exception = parseException(sqlText),
+      condition = "MERGE_INSERT_VALUE_COUNT_MISMATCH",
+      sqlState = "21S01",
+      parameters = Map("expectedCount" -> "2", "actualCount" -> "1"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = sqlText.length - 1))
+  }
+
   test("show views") {
     comparePlans(
       parsePlan("SHOW VIEWS"),


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to