Kimahriman commented on code in PR #865:
URL: https://github.com/apache/datafusion-comet/pull/865#discussion_r1729322534


##########
spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala:
##########
@@ -2003,10 +2003,17 @@ class CometExpressionSuite extends CometTestBase with 
AdaptiveSparkPlanHelper {
     Seq(true, false).foreach { dictionaryEnabled =>
       withTempDir { dir =>
         val path = new Path(dir.toURI.toString, "test.parquet")
-        makeParquetFileAllTypes(path, dictionaryEnabled = dictionaryEnabled, 
10000)
+        makeParquetFileAllTypes(path, dictionaryEnabled = dictionaryEnabled, 
1000)
         val df = spark.read.parquet(path.toString)
         checkSparkAnswerAndOperator(df.select(array(col("_2"), col("_3"), 
col("_4"))))
         checkSparkAnswerAndOperator(df.select(array(col("_4"), col("_11"), 
lit(null))))
+        checkSparkAnswerAndOperator(
+          df.select(array(array(col("_4")), array(col("_4"), lit(null)))))
+        checkSparkAnswerAndOperator(df.select(array(col("_8"), col("_13"))))
+        // TODO: Some part of this converts the null to an empty string
+        // checkSparkAnswerAndOperator(df.select(array(col("_8"), col("_13"), 
lit(null))))

Review Comment:
   I was able to recreate with a test on v41.0.0, but it seems to be fixed on 
master



##########
spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala:
##########
@@ -2003,10 +2003,17 @@ class CometExpressionSuite extends CometTestBase with 
AdaptiveSparkPlanHelper {
     Seq(true, false).foreach { dictionaryEnabled =>
       withTempDir { dir =>
         val path = new Path(dir.toURI.toString, "test.parquet")
-        makeParquetFileAllTypes(path, dictionaryEnabled = dictionaryEnabled, 
10000)
+        makeParquetFileAllTypes(path, dictionaryEnabled = dictionaryEnabled, 
1000)
         val df = spark.read.parquet(path.toString)
         checkSparkAnswerAndOperator(df.select(array(col("_2"), col("_3"), 
col("_4"))))
         checkSparkAnswerAndOperator(df.select(array(col("_4"), col("_11"), 
lit(null))))
+        checkSparkAnswerAndOperator(
+          df.select(array(array(col("_4")), array(col("_4"), lit(null)))))
+        checkSparkAnswerAndOperator(df.select(array(col("_8"), col("_13"))))
+        // TODO: Some part of this converts the null to an empty string
+        // checkSparkAnswerAndOperator(df.select(array(col("_8"), col("_13"), 
lit(null))))

Review Comment:
   I was able to recreate with a test on v41.0.0, but it seems to be fixed on 
main



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to