davidm-db commented on code in PR #47609: URL: https://github.com/apache/spark/pull/47609#discussion_r1704237126
########## sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala: ########## @@ -27,29 +29,44 @@ import org.apache.spark.sql.test.SharedSparkSession * Output from the interpreter (iterator over executable statements) is then checked - statements * are executed and output DataFrames are compared with expected outputs. */ -class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession { +class SqlScriptingInterpreterSuite extends SparkFunSuite with SharedSparkSession { // Helpers - private def verifySqlScriptResult(sqlText: String, expected: Seq[Seq[Row]]): Unit = { - val interpreter = SqlScriptingInterpreter() - val compoundBody = spark.sessionState.sqlParser.parseScript(sqlText) - val executionPlan = interpreter.buildExecutionPlan(compoundBody, spark) - val result = executionPlan.flatMap { - case statement: SingleStatementExec => - if (statement.isExecuted) { - None - } else { - Some(Dataset.ofRows(spark, statement.parsedPlan, new QueryPlanningTracker)) - } - case _ => None - }.toArray - + private def verifySqlScriptResult(sqlText: String, expected: Seq[Array[Row]]): Unit = { + val interpreter = SqlScriptingInterpreter(spark) + val compoundBody = spark.sessionState.sqlParser.parsePlan(sqlText).asInstanceOf[CompoundBody] + val result = interpreter.executeInternal(compoundBody).toSeq assert(result.length == expected.length) - result.zip(expected).foreach { case (df, expectedAnswer) => checkAnswer(df, expectedAnswer) } Review Comment: Sure, depends on the discussion in the other comment - let's see what folks have to say there and we'll easily adjust tests accordingly. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org