davidm-db commented on code in PR #49427:
URL: https://github.com/apache/spark/pull/49427#discussion_r1918154394


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala:
##########
@@ -159,15 +159,99 @@ class AstBuilder extends DataTypeAstBuilder
     script
   }
 
+  private def assertSqlState(sqlState: String): Unit = {
+    val sqlStateRegex = "^[A-Za-z0-9]{5}$".r
+    if (sqlStateRegex.findFirstIn(sqlState).isEmpty
+      || sqlState.startsWith("00")
+      || sqlState.startsWith("01")
+      || sqlState.startsWith("XX")) {
+      throw SqlScriptingErrors.invalidSqlStateValue(CurrentOrigin.get, 
sqlState)
+    }
+  }
+
+  override def visitConditionValue(ctx: ConditionValueContext): String = {
+    Option(ctx.sqlStateValue())
+      .map { sqlStateValueContext =>
+        val sqlState = sqlStateValueContext.getText.replace("'", "")
+        assertSqlState(sqlState)
+        sqlState
+      }
+      .getOrElse(ctx.getText)
+  }
+
+  override def visitConditionValues(ctx: ConditionValuesContext): Seq[String] 
= {
+    val buff = scala.collection.mutable.Set[String]()
+    ctx.cvList.forEach { conditionValue =>
+      val elem = visit(conditionValue).asInstanceOf[String]
+      if (buff(elem)) {
+        throw 
SqlScriptingErrors.duplicateConditionInHandlerDeclaration(CurrentOrigin.get, 
elem)
+      }
+      buff += elem
+    }
+    buff.toSeq
+  }
+
+  private def visitDeclareConditionStatementImpl(
+      ctx: DeclareConditionStatementContext): ErrorCondition = {
+    val conditionName = ctx.multipartIdentifier().getText
+    val sqlState = Option(ctx.sqlStateValue())
+      .map(_.getText.replace("'", "")).getOrElse("45000")
+
+    assertSqlState(sqlState)
+    ErrorCondition(conditionName, sqlState)
+  }
+
+  private def visitDeclareHandlerStatementImpl(
+      ctx: DeclareHandlerStatementContext,
+      labelCtx: SqlScriptingLabelContext): ErrorHandler = {
+    val conditions = visit(ctx.conditionValues()).asInstanceOf[Seq[String]]
+
+    if (Option(ctx.CONTINUE()).isDefined) {
+      throw SqlScriptingErrors.continueHandlerNotSupported(CurrentOrigin.get)
+    }
+
+    val handlerType = HandlerType.EXIT
+
+    val body = if (Option(ctx.compoundBody()).isDefined) {
+      visitCompoundBodyImpl(
+        ctx.compoundBody(),
+        None,
+        allowVarDeclare = true,
+        labelCtx,
+        isScope = false)
+    } else {
+      val logicalPlan = visitChildren(ctx).asInstanceOf[LogicalPlan]
+      CompoundBody(Seq(SingleStatement(parsedPlan = logicalPlan)), None, 
isScope = false)
+    }
+
+    ErrorHandler(conditions, body, handlerType)
+  }
+
   private def visitCompoundBodyImpl(
       ctx: CompoundBodyContext,
       label: Option[String],
       allowVarDeclare: Boolean,
       labelCtx: SqlScriptingLabelContext,
       isScope: Boolean): CompoundBody = {
     val buff = ListBuffer[CompoundPlanStatement]()
-    ctx.compoundStatements.forEach(
-      compoundStatement => buff += 
visitCompoundStatementImpl(compoundStatement, labelCtx))
+
+    val handlers = ListBuffer[ErrorHandler]()

Review Comment:
   Since we'll be adding similar stuff in the future, let's think about whether 
we can create a kind of "united" approach for checking these constraints, and 
not do everything separately - it would get really ugly I think.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to