davidm-db commented on code in PR #49427:
URL: https://github.com/apache/spark/pull/49427#discussion_r1916326005


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala:
##########
@@ -159,15 +159,97 @@ class AstBuilder extends DataTypeAstBuilder
     script
   }
 
+  private def assertSqlState(sqlState: String): Unit = {
+    val sqlStateRegex = "^[A-Za-z0-9]{5}$".r
+    assert(sqlStateRegex.findFirstIn(sqlState).isDefined,
+      "SQLSTATE must be exactly 5 characters long and contain only A-Z and 
0-9.")
+    assert(!sqlState.startsWith("00") && !sqlState.startsWith("01") && 
!sqlState.startsWith("XX"),
+      "SQLSTATE must not start with '00', '01', or 'XX'.")
+  }
+
+  override def visitConditionValue(ctx: ConditionValueContext): String = {
+    Option(ctx.sqlStateValue())
+      .map { sqlStateValueContext =>
+        val sqlState = sqlStateValueContext.getText.replace("'", "")
+        assertSqlState(sqlState)
+        sqlState
+      }
+      .getOrElse(ctx.getText)
+  }
+
+  override def visitConditionValues(ctx: ConditionValuesContext): Seq[String] 
= {
+    val buff = scala.collection.mutable.Set[String]()
+    ctx.cvList.forEach { conditionValue =>
+      val elem = visit(conditionValue).asInstanceOf[String]
+      if (buff(elem)) {
+        throw 
SqlScriptingErrors.duplicateConditionInHandlerDeclaration(CurrentOrigin.get, 
elem)
+      }
+      buff += elem
+    }
+    buff.toSeq
+  }
+
+  private def visitDeclareConditionStatementImpl(
+      ctx: DeclareConditionStatementContext): ErrorCondition = {
+    val conditionName = ctx.multipartIdentifier().getText
+    val sqlState = Option(ctx.sqlStateValue())
+      .map(_.getText.replace("'", "")).getOrElse("45000")
+
+    assertSqlState(sqlState)
+    ErrorCondition(conditionName, sqlState)
+  }
+
+  private def visitDeclareHandlerStatementImpl(
+      ctx: DeclareHandlerStatementContext,
+      labelCtx: SqlScriptingLabelContext): ErrorHandler = {
+    val conditions = visit(ctx.conditionValues()).asInstanceOf[Seq[String]]
+
+    if (Option(ctx.CONTINUE()).isDefined) {
+      throw SqlScriptingErrors.continueHandlerNotSupported(CurrentOrigin.get)
+    }
+
+    val handlerType = HandlerType.EXIT
+
+    val body = if (Option(ctx.compoundBody()).isDefined) {
+      visitCompoundBodyImpl(
+        ctx.compoundBody(),
+        None,
+        allowVarDeclare = true,
+        labelCtx,
+        isScope = false)
+    } else {
+      val logicalPlan = visitChildren(ctx).asInstanceOf[LogicalPlan]
+      CompoundBody(Seq(SingleStatement(parsedPlan = logicalPlan)), None, 
isScope = false)
+    }
+
+    ErrorHandler(conditions, body, handlerType)
+  }
+
   private def visitCompoundBodyImpl(
       ctx: CompoundBodyContext,
       label: Option[String],
       allowVarDeclare: Boolean,
       labelCtx: SqlScriptingLabelContext,
       isScope: Boolean): CompoundBody = {
     val buff = ListBuffer[CompoundPlanStatement]()
-    ctx.compoundStatements.forEach(
-      compoundStatement => buff += 
visitCompoundStatementImpl(compoundStatement, labelCtx))
+
+    val handlers = ListBuffer[ErrorHandler]()
+    val conditions = HashMap[String, String]()
+
+    ctx.compoundStatements.forEach(compoundStatement => {
+      val stmt = visitCompoundStatementImpl(compoundStatement, labelCtx)
+      stmt match {
+        case handler: ErrorHandler => handlers += handler
+        case condition: ErrorCondition =>
+          // Check for duplicate condition names in each scope.
+          if (conditions.contains(condition.conditionName)) {
+            throw SparkException.internalError(
+              s"Duplicate condition name ${condition.conditionName}.")

Review Comment:
   why is this an internal error - isn't it a user mistake?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to