andygrove commented on code in PR #2873:
URL: https://github.com/apache/datafusion-comet/pull/2873#discussion_r2608034250


##########
spark/src/main/scala/org/apache/comet/CometSparkSessionExtensions.scala:
##########
@@ -149,62 +120,10 @@ object CometSparkSessionExtensions extends Logging {
       "org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager"
   }
 
-  private[comet] def isCometScanEnabled(conf: SQLConf): Boolean = {
-    COMET_NATIVE_SCAN_ENABLED.get(conf)
-  }
-
-  private[comet] def isCometExecEnabled(conf: SQLConf): Boolean = {
-    COMET_EXEC_ENABLED.get(conf)
-  }
-
   def isCometScan(op: SparkPlan): Boolean = {
     op.isInstanceOf[CometBatchScanExec] || op.isInstanceOf[CometScanExec]
   }
 
-  def shouldApplySparkToColumnar(conf: SQLConf, op: SparkPlan): Boolean = {
-    // Only consider converting leaf nodes to columnar currently, so that all 
the following
-    // operators can have a chance to be converted to columnar. Leaf operators 
that output
-    // columnar batches, such as Spark's vectorized readers, will also be 
converted to native
-    // comet batches.
-    val fallbackReasons = new ListBuffer[String]()
-    if (CometSparkToColumnarExec.isSchemaSupported(op.schema, 
fallbackReasons)) {
-      op match {
-        // Convert Spark DS v1 scan to Arrow format
-        case scan: FileSourceScanExec =>
-          scan.relation.fileFormat match {
-            case _: CSVFileFormat => 
CometConf.COMET_CONVERT_FROM_CSV_ENABLED.get(conf)
-            case _: JsonFileFormat => 
CometConf.COMET_CONVERT_FROM_JSON_ENABLED.get(conf)
-            case _: ParquetFileFormat => 
CometConf.COMET_CONVERT_FROM_PARQUET_ENABLED.get(conf)
-            case _ => isSparkToArrowEnabled(conf, op)
-          }
-        // Convert Spark DS v2 scan to Arrow format
-        case scan: BatchScanExec =>
-          scan.scan match {
-            case _: CSVScan => 
CometConf.COMET_CONVERT_FROM_CSV_ENABLED.get(conf)
-            case _: JsonScan => 
CometConf.COMET_CONVERT_FROM_JSON_ENABLED.get(conf)
-            case _: ParquetScan => 
CometConf.COMET_CONVERT_FROM_PARQUET_ENABLED.get(conf)
-            case _ => isSparkToArrowEnabled(conf, op)
-          }
-        // other leaf nodes
-        case _: LeafExecNode =>
-          isSparkToArrowEnabled(conf, op)
-        case _ =>
-          // TODO: consider converting other intermediate operators to 
columnar.
-          false
-      }
-    } else {
-      false
-    }
-  }
-
-  private def isSparkToArrowEnabled(conf: SQLConf, op: SparkPlan) = {

Review Comment:
   Moved to `CometExecRule`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to