mbutrovich commented on code in PR #2840:
URL: https://github.com/apache/datafusion-comet/pull/2840#discussion_r2585966474
##########
spark/src/main/scala/org/apache/comet/rules/CometExecRule.scala:
##########
@@ -394,6 +332,53 @@ case class CometExecRule(session: SparkSession) extends
Rule[SparkPlan] {
}
}
+ private def operator2ProtoIfAllChildrenAreNative(op: SparkPlan):
Option[Operator] = {
+ if (op.children.forall(_.isInstanceOf[CometNativeExec])) {
+ operator2Proto(op,
op.children.map(_.asInstanceOf[CometNativeExec].nativeOp): _*)
+ } else {
+ None
+ }
+ }
+
+ /**
+ * Convert operator to proto and then apply a transformation to wrap the
proto in a new plan.
+ */
+ private def newPlanWithProto(op: SparkPlan, fun: Operator => SparkPlan):
SparkPlan = {
+ operator2ProtoIfAllChildrenAreNative(op).map(fun).getOrElse(op)
+ }
+
+ private def tryNativeShuffle(s: ShuffleExchangeExec): Option[SparkPlan] = {
+ Some(s)
+ .filter(_ => nativeShuffleSupported(s))
+ .flatMap(_ => operator2ProtoIfAllChildrenAreNative(s))
+ .map { nativeOp =>
+ // Switch to use Decimal128 regardless of precision, since Arrow
native execution
+ // doesn't support Decimal32 and Decimal64 yet.
Review Comment:
Unrelated: I'm not sure if this is true anymore. I saw a lot of code related
to `Decimal32` and `Decimal64` in DataFusion these days, so we might want to
revisit this assumption from the early days of Comet.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]