cloud-fan commented on code in PR #49715: URL: https://github.com/apache/spark/pull/49715#discussion_r1950185321
########## sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanExec.scala: ########## @@ -521,6 +513,66 @@ case class AdaptiveSparkPlanExec( this.inputPlan == obj.asInstanceOf[AdaptiveSparkPlanExec].inputPlan } + /** + * We separate stage creation of result and non-result stages because there are several edge cases + * of result stage creation: + * - existing ResultQueryStage created in previous `withFinalPlanUpdate`. + * - the root node is a non-result query stage and we have to create query result stage on top of + * it. + * - we create a non-result query stage as root node and the stage is immediately materialized + * due to stage resue, therefore we have to create a result stage right after. + * + * This method wraps around `createNonResultQueryStages`, the general logic is: + * - Early return if ResultQueryStageExec already created before. + * - Create non result query stage if possible. + * - Try to create result query stage when there is no new non-result query stage created and all + * stages are materialized. + */ + private def createQueryStages( + resultHandler: SparkPlan => Any, + plan: SparkPlan, + firstRun: Boolean): CreateStageResult = { + // 1. Early return if ResultQueryStageExec is already created + plan match { + case resultStage@ResultQueryStageExec(_, optimizedPlan, _) => + assertStageNotFailed(resultStage) + return if (firstRun) { Review Comment: let's avoid early return and make the code more scala-ish ``` plan match { case resultStage ... case _ => // 2. Create non result query stage ... } ``` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org