kazuyukitanimura commented on code in PR #1385: URL: https://github.com/apache/datafusion-comet/pull/1385#discussion_r1966118342
########## spark/src/test/scala/org/apache/comet/CometCastSuite.scala: ########## @@ -1126,27 +1129,33 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper { val cometMessage = if (cometException.getCause != null) cometException.getCause.getMessage else cometException.getMessage - if (CometSparkSessionExtensions.isSpark40Plus) { - // for Spark 4 we expect to sparkException carries the message - assert( - sparkException.getMessage - .replace(".WITH_SUGGESTION] ", "]") - .startsWith(cometMessage)) - } else if (CometSparkSessionExtensions.isSpark34Plus) { - // for Spark 3.4 we expect to reproduce the error message exactly - assert(cometMessage == sparkMessage) + // for comet decimal conversion throws ArrowError(string) from arrow - across spark versions the message dont match. + if (sparkMessage.contains("cannot be represented as")) { + cometMessage.contains("cannot be represented as") || cometMessage.contains( + "too large to store") } else { Review Comment: I think we still need to remove this new `if` block and update the test cases below. This new block may still pass with cometMessage.contains("cannot be represented as") that seems to be an indication of Spark cast instead of native cast -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org For additional commands, e-mail: github-h...@datafusion.apache.org