andygrove commented on code in PR #2542:
URL: https://github.com/apache/datafusion-comet/pull/2542#discussion_r2424722661
##########
spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala:
##########
@@ -3017,6 +3017,37 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
}
+ test("ANSI support for round function") {
+ val data = Seq((Integer.MAX_VALUE, Integer.MIN_VALUE, Long.MinValue,
Long.MaxValue))
+ Seq("true", "false").foreach { p =>
+ withSQLConf(SQLConf.ANSI_ENABLED.key -> p) {
+ withParquetTable(data, "tbl") {
+ val res = spark.sql(s"""
+ |SELECT
+ | round(_1, -1) ,
+ | round(_1, -10) ,
+ | round(${Int.MaxValue}, -10)
Review Comment:
It would also be better to test the input values individually. I'd suggest
something like this:
```scala
test("ANSI support for round function") {
for (value <- Seq((Integer.MAX_VALUE, Integer.MIN_VALUE, Long.MinValue,
Long.MaxValue))) {
val data = Seq(value)
withParquetTable(data, "tbl") {
for (scale <- Seq(-10, -1, 0, 1, 10)) {
for (ansi <- Seq(true, false)) {
withSQLConf(SQLConf.ANSI_ENABLED.key -> ansi.toString) {
val res = spark.sql(s"SELECT round(_1, $scale) from tbl")
checkSparkMaybeThrows(res) match {
case (Some(sparkException), Some(cometException)) =>
assert(sparkException.getMessage.contains("ARITHMETIC_OVERFLOW"))
assert(cometException.getMessage.contains("ARITHMETIC_OVERFLOW"))
case (None, None) => checkSparkAnswerAndOperator(res)
case (None, Some(ex)) =>
fail(
"Comet threw an exception but Spark did not. Comet
exception: " + ex.getMessage)
case (Some(sparkException), None) =>
fail(
"Spark threw an exception but Comet did not. Spark
exception: " +
sparkException.getMessage)
}
}
}
}
}
}
}
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]