dejankrak-db commented on code in PR #49372:
URL: https://github.com/apache/spark/pull/49372#discussion_r1907103137


##########
sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala:
##########
@@ -42,32 +42,35 @@ class SqlScriptingExecution(
     val ctx = new SqlScriptingExecutionContext()
     val executionPlan = interpreter.buildExecutionPlan(sqlScript, args, ctx)
     // Add frame which represents SQL Script to the context.
-    ctx.frames.addOne(new 
SqlScriptingExecutionFrame(executionPlan.getTreeIterator))
+    ctx.frames.append(new 
SqlScriptingExecutionFrame(executionPlan.getTreeIterator))
     // Enter the scope of the top level compound.
     // We don't need to exit this scope explicitly as it will be done 
automatically
     // when the frame is removed during iteration.
     executionPlan.enterScope()
     ctx
   }
 
-  private var current: Option[DataFrame] = getNextResult
+  private var current: Option[DataFrame] = None
 
-  override def hasNext: Boolean = current.isDefined
+  override def hasNext: Boolean = {
+    current = getNextResult
+    current.isDefined
+  }
 
   override def next(): DataFrame = {
     current match {
       case None => throw SparkException.internalError("No more elements to 
iterate through.")
-      case Some(result) =>
-        current = getNextResult
-        result
+      case Some(result) => result
     }
   }
 
   /** Helper method to iterate get next statements from the first available 
frame. */
   private def getNextStatement: Option[CompoundStatementExec] = {
+    // Remove frames that are already executed.
     while (context.frames.nonEmpty && !context.frames.last.hasNext) {
       context.frames.remove(context.frames.size - 1)
     }
+    // If there are still frames available, get the next statement.

Review Comment:
   Thanks for adding comments throughout the code, it helps improve readability 
a lot!



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to