robreeves commented on code in PR #50269: URL: https://github.com/apache/spark/pull/50269#discussion_r2001535420
########## sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala: ########## @@ -2150,4 +2150,66 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { condition = "INTERNAL_ERROR", parameters = Map("message" -> "Cannot evaluate expression: localtimestamp(Some(UTC))")) } + + test("creating values of TimeType via make_time") { + Seq(true, false).foreach { ansi => + withSQLConf(SQLConf.ANSI_ENABLED.key -> ansi.toString) { + // basic case + checkEvaluation( + MakeTime(Literal(13), Literal.create(2, IntegerType), + Literal(Decimal(BigDecimal(23.5), 16, 6))), + LocalTime.of(13, 2, 23, 500000000)) + + // null cases + checkEvaluation( + MakeTime(Literal.create(null, IntegerType), Literal(18), + Literal(Decimal(BigDecimal(23.5), 16, 6))), + null) + checkEvaluation( + MakeTime(Literal(13), Literal.create(null, IntegerType), + Literal(Decimal(BigDecimal(23.5), 16, 6))), + null) + checkEvaluation(MakeTime(Literal(13), Literal(18), + Literal.create(null, DecimalType(16, 6))), null) + } + } + + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { + // Invalid times return null + checkEvaluation( + MakeTime(Literal(Int.MaxValue), Literal(18), + Literal(Decimal(BigDecimal(23.5), 16, 6))), + null) + checkEvaluation( + MakeTime(Literal(13), Literal(Int.MinValue), + Literal(Decimal(BigDecimal(23.5), 16, 6))), + null) + checkEvaluation(MakeTime(Literal(13), Literal(18), + Literal(Decimal(BigDecimal(60.1), 16, 6))), null) + } + + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { + val errorCode = "DATETIME_FIELD_OUT_OF_BOUNDS" + val baseErrorParams = Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"") + + checkErrorInExpression[SparkDateTimeException]( Review Comment: I don't understand the suggestion. This implementation does use `checkErrorInExpression` and checks the error condition and message params. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org