cloud-fan commented on code in PR #49837:
URL: https://github.com/apache/spark/pull/49837#discussion_r1950851713


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PullOutNondeterministic.scala:
##########
@@ -51,27 +52,12 @@ object PullOutNondeterministic extends Rule[LogicalPlan] {
     // from LogicalPlan, currently we only do it for UnaryNode which has same 
output
     // schema with its child.
     case p: UnaryNode if p.output == p.child.output && 
p.expressions.exists(!_.deterministic) =>
-      val nondeterToAttr = getNondeterToAttr(p.expressions)
+      val nondeterToAttr =
+        
NondeterministicExpressionCollection.getNondeterministicToAttributes(p.expressions)
       val newPlan = p.transformExpressions { case e =>
         nondeterToAttr.get(e).map(_.toAttribute).getOrElse(e)
       }
       val newChild = Project(p.child.output ++ nondeterToAttr.values, p.child)
       Project(p.output, newPlan.withNewChildren(newChild :: Nil))
   }
-
-  private def getNondeterToAttr(exprs: Seq[Expression]): Map[Expression, 
NamedExpression] = {
-    exprs.filterNot(_.deterministic).flatMap { expr =>
-      val leafNondeterministic = expr.collect {
-        case n: Nondeterministic => n
-        case udf: UserDefinedExpression if !udf.deterministic => udf
-      }
-      leafNondeterministic.distinct.map { e =>
-        val ne = e match {
-          case n: NamedExpression => n
-          case _ => Alias(e, "_nondeterministic")()
-        }
-        e -> ne
-      }
-    }.toMap

Review Comment:
   shall we also keep the previous code of creating an immutable map?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to