the-sakthi commented on code in PR #50296: URL: https://github.com/apache/spark/pull/50296#discussion_r2005022335
########## sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeExpressionsSuite.scala: ########## @@ -51,4 +51,37 @@ class TimeExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { condition = "CANNOT_PARSE_TIME", parameters = Map("input" -> "'100:50'", "format" -> "'mm:HH'")) } + + test("Minute with TIME type") { + // A few test times in microseconds since midnight: + // (time in microseconds, expected minute) + val testTimes = Seq( + (localTime(), 0), // 00:00:00 => 0 + (localTime(12, 58, 59), 58), // 12:58:59 => 58 + (localTime(0, 59), 59), // 00:59:00 => 59 + (localTime(23, 0, 1), 0), // 23:00:01 => 0 + (localTime(1), 0), // 01:00:00 => 0 + (localTime(14, 30), 30), // 14:30:00 => 30 + (localTime(23, 59, 59, 999999), 59) // 23:59:59.999999 => 59 + ) + + // Create a literal with TimeType() for each test microsecond value + // evaluate Minute(...), and check that the result matches the expected minute. + testTimes.foreach { case (micros, expectedMinute) => + checkEvaluation( + MinutesOfTime(Literal(micros, TimeType())), + expectedMinute) + } + + // Verify NULL handling + checkEvaluation( + MinutesOfTime(Literal.create(null, TimeType(6))), + null + ) + + // Verify that the expression is consistent in interpreted vs. codegen mode. + // TODO: codegen failing. Pending fix + checkConsistencyBetweenInterpretedAndCodegen( + (child: Expression) => MinutesOfTime(child), TimeType()) + } Review Comment: I think this part of the testing code isn't how this is supposed to be tested. I'll remove this from the next version of the PR, if that's fine. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org