davidm-db commented on code in PR #49427: URL: https://github.com/apache/spark/pull/49427#discussion_r1925132255
########## sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala: ########## @@ -111,15 +124,32 @@ class SqlScriptingExecutionFrame( } } - def findHandler(condition: String, sqlState: String): Option[ErrorHandlerExec] = { - if (scopes.isEmpty) { - throw SparkException.internalError(s"Cannot find handler: no scopes.") - } + def findHandler( + condition: String, + sqlState: String, + firstHandlerScope: Option[String]): Option[ErrorHandlerExec] = { + // Find the most outer scope where an active handler is defined. + // Search for handler should start from the scope that is surrounding that scope. + var found: Boolean = false scopes.reverseIterator.foreach { scope => - val handler = scope.findHandler(condition, sqlState) - if (handler.isDefined) { - return handler + // If there is no active handler or the current frame is a handler, try to find a handler + // in this scope. That is because handlers can have nested handlers defined in their body. + if (firstHandlerScope.isEmpty || frameType == SqlScriptingFrameType.HANDLER) { + found = true + } + + if (found) { + val handler = scope.findHandler(condition, sqlState) + if (handler.isDefined) { + return handler + } + } + + // If there are active handlers, iterate we reach the most outer scope where + // an active handler is defined. + if (firstHandlerScope.isDefined && scope.label == firstHandlerScope.get) { + found = true } Review Comment: agreed offline on how to refactor this, please don't forget to add TODO comments for the follow-up improvement(s) -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org