This is an automated email from the ASF dual-hosted git repository.
agrove pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git
The following commit(s) were added to refs/heads/main by this push:
new 87d1524af fix: respect scan impl config for v2 scan (#3357)
87d1524af is described below
commit 87d1524afefc77f5908300de339dd8dfd6cc1ab9
Author: Andy Grove <[email protected]>
AuthorDate: Sun Feb 1 12:38:14 2026 -0700
fix: respect scan impl config for v2 scan (#3357)
---
spark/src/main/scala/org/apache/comet/rules/CometScanRule.scala | 2 +-
spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala | 3 ++-
.../src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala | 2 +-
.../src/test/scala/org/apache/comet/rules/CometScanRuleSuite.scala | 6 ++++--
4 files changed, 8 insertions(+), 5 deletions(-)
diff --git a/spark/src/main/scala/org/apache/comet/rules/CometScanRule.scala
b/spark/src/main/scala/org/apache/comet/rules/CometScanRule.scala
index 68a63b6ae..45faa4d94 100644
--- a/spark/src/main/scala/org/apache/comet/rules/CometScanRule.scala
+++ b/spark/src/main/scala/org/apache/comet/rules/CometScanRule.scala
@@ -228,7 +228,7 @@ case class CometScanRule(session: SparkSession) extends
Rule[SparkPlan] with Com
private def transformV2Scan(scanExec: BatchScanExec): SparkPlan = {
scanExec.scan match {
- case scan: ParquetScan =>
+ case scan: ParquetScan if COMET_NATIVE_SCAN_IMPL.get() ==
SCAN_NATIVE_COMET =>
val fallbackReasons = new ListBuffer[String]()
val schemaSupported =
CometBatchScanExec.isSchemaSupported(scan.readDataSchema,
fallbackReasons)
diff --git a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
index 696a12d4a..2d07c03c1 100644
--- a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
@@ -382,9 +382,10 @@ class CometExecSuite extends CometTestBase {
}
}
- test("ReusedExchangeExec should work on CometBroadcastExchangeExec") {
+ test("ReusedExchangeExec should work on CometBroadcastExchangeExec with V2
scan") {
withSQLConf(
CometConf.COMET_EXEC_BROADCAST_FORCE_ENABLED.key -> "true",
+ CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_NATIVE_COMET,
SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false",
SQLConf.USE_V1_SOURCE_LIST.key -> "") {
withTempPath { path =>
diff --git
a/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
b/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
index e4486e940..3da00a240 100644
--- a/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
@@ -2045,7 +2045,7 @@ class ParquetReadV2Suite extends ParquetReadSuite with
AdaptiveSparkPlanHelper {
case (cometEnabled, expectedScanner) =>
testScanner(
cometEnabled,
- CometConf.SCAN_NATIVE_DATAFUSION,
+ CometConf.SCAN_NATIVE_COMET,
scanner = expectedScanner,
v1 = None)
}
diff --git
a/spark/src/test/scala/org/apache/comet/rules/CometScanRuleSuite.scala
b/spark/src/test/scala/org/apache/comet/rules/CometScanRuleSuite.scala
index d0dfbbb09..c7a07b3f1 100644
--- a/spark/src/test/scala/org/apache/comet/rules/CometScanRuleSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/rules/CometScanRuleSuite.scala
@@ -101,11 +101,13 @@ class CometScanRuleSuite extends CometTestBase {
}
}
- test("CometExecRule should replace BatchScanExec, but only when Comet is
enabled") {
+ test("CometScanRule should replace V2 BatchScanExec, but only when Comet is
enabled") {
withTempPath { path =>
createTestDataFrame.write.parquet(path.toString)
withTempView("test_data") {
- withSQLConf(SQLConf.USE_V1_SOURCE_LIST.key -> "") {
+ withSQLConf(
+ SQLConf.USE_V1_SOURCE_LIST.key -> "",
+ CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_NATIVE_COMET)
{
spark.read.parquet(path.toString).createOrReplaceTempView("test_data")
val sparkPlan =
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]