dusantism-db commented on code in PR #49427: URL: https://github.com/apache/spark/pull/49427#discussion_r1917465961
########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala: ########## @@ -159,15 +159,99 @@ class AstBuilder extends DataTypeAstBuilder script } + private def assertSqlState(sqlState: String): Unit = { + val sqlStateRegex = "^[A-Za-z0-9]{5}$".r + if (sqlStateRegex.findFirstIn(sqlState).isEmpty + || sqlState.startsWith("00") + || sqlState.startsWith("01") + || sqlState.startsWith("XX")) { + throw SqlScriptingErrors.invalidSqlStateValue(CurrentOrigin.get, sqlState) + } + } + + override def visitConditionValue(ctx: ConditionValueContext): String = { + Option(ctx.sqlStateValue()) + .map { sqlStateValueContext => + val sqlState = sqlStateValueContext.getText.replace("'", "") + assertSqlState(sqlState) + sqlState + } + .getOrElse(ctx.getText) + } + + override def visitConditionValues(ctx: ConditionValuesContext): Seq[String] = { + val buff = scala.collection.mutable.Set[String]() + ctx.cvList.forEach { conditionValue => + val elem = visit(conditionValue).asInstanceOf[String] + if (buff(elem)) { + throw SqlScriptingErrors.duplicateConditionInHandlerDeclaration(CurrentOrigin.get, elem) + } + buff += elem + } + buff.toSeq + } + + private def visitDeclareConditionStatementImpl( + ctx: DeclareConditionStatementContext): ErrorCondition = { + val conditionName = ctx.multipartIdentifier().getText + val sqlState = Option(ctx.sqlStateValue()) + .map(_.getText.replace("'", "")).getOrElse("45000") + + assertSqlState(sqlState) + ErrorCondition(conditionName, sqlState) + } + + private def visitDeclareHandlerStatementImpl( + ctx: DeclareHandlerStatementContext, + labelCtx: SqlScriptingLabelContext): ErrorHandler = { + val conditions = visit(ctx.conditionValues()).asInstanceOf[Seq[String]] + + if (Option(ctx.CONTINUE()).isDefined) { + throw SqlScriptingErrors.continueHandlerNotSupported(CurrentOrigin.get) + } + + val handlerType = HandlerType.EXIT + + val body = if (Option(ctx.compoundBody()).isDefined) { + visitCompoundBodyImpl( + ctx.compoundBody(), + None, + allowVarDeclare = true, + labelCtx, + isScope = false) + } else { + val logicalPlan = visitChildren(ctx).asInstanceOf[LogicalPlan] + CompoundBody(Seq(SingleStatement(parsedPlan = logicalPlan)), None, isScope = false) + } + + ErrorHandler(conditions, body, handlerType) + } + private def visitCompoundBodyImpl( ctx: CompoundBodyContext, label: Option[String], allowVarDeclare: Boolean, labelCtx: SqlScriptingLabelContext, isScope: Boolean): CompoundBody = { val buff = ListBuffer[CompoundPlanStatement]() - ctx.compoundStatements.forEach( - compoundStatement => buff += visitCompoundStatementImpl(compoundStatement, labelCtx)) + + val handlers = ListBuffer[ErrorHandler]() Review Comment: Should we have checks that declarations of handlers/conditions must be at the beginning of CompoundBodies, similar to how it is currently for variables? ########## sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4: ########## @@ -79,6 +81,29 @@ setStatementWithOptionalVarKeyword LEFT_PAREN query RIGHT_PAREN #setVariableWithOptionalKeyword ; +sqlStateValue + : stringLit + ; + +declareConditionStatement + : DECLARE multipartIdentifier CONDITION (FOR SQLSTATE VALUE? sqlStateValue)? + ; + +conditionValue + : sqlStateValue + | multipartIdentifier + | SQLEXCEPTION + | NOT FOUND + ; + +conditionValues + : cvList+=conditionValue (COMMA cvList+=conditionValue)* + ; + +declareHandlerStatement + : DECLARE (CONTINUE | EXIT) HANDLER FOR conditionValues (BEGIN compoundBody END | statement | setStatementWithOptionalVarKeyword) Review Comment: Why do we have `BEGIN compoundBody END` instead of `beginEndCompoundBlock`? Are labels not allowed here? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org