MaxGekk commented on code in PR #50296:
URL: https://github.com/apache/spark/pull/50296#discussion_r1998572198


##########
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala:
##########
@@ -388,6 +388,44 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     }
   }
 
+  test("Minute with TIME type") {
+    // A few test times in microseconds since midnight:
+    //   (time in microseconds, expected minute)
+    val secondsPerHour = 3600L
+    val secondsPerMinute = 60L
+    val testTimes = Seq(
+      (0L, 0), // 00:00:00 => 0
+      ((12L * secondsPerHour + 58L * secondsPerMinute + 59L) * 1000000L, 58), 
// 12:58:59 => 58
+      ((0L * secondsPerHour + 59L * secondsPerMinute + 0L) * 1000000L, 59),   
// 00:59:00 => 59
+      ((23L * secondsPerHour + 0L * secondsPerMinute + 1L) * 1000000L, 0), // 
23:00:01 => 0
+      ((1L * secondsPerHour) * 1000000L, 0), // 01:00:00 => 0
+      ((14L * secondsPerHour + 30L * secondsPerMinute) * 1000000L, 30), // 
14:30:00 => 30
+      ((23L * secondsPerHour + 59L * secondsPerMinute + 59L) * 1000000L
+        + 999999L, 59) // 23:59:59.999999 => 59
+    )
+
+    // Create a literal with TimeType() for each test microsecond value
+    // evaluate Minute(...), and check that the result matches the expected 
minute.
+    testTimes.foreach { case (micros, expectedMinute) =>
+      checkEvaluation(
+        Minute(Literal(micros, TimeType())),
+        expectedMinute)
+    }
+
+    // Verify NULL handling
+    checkEvaluation(
+      Minute(Literal.create(null, TimeType(6))),
+      null
+    )
+
+    // Verify that the expression is consistent in interpreted vs. codegen 
mode.
+    checkConsistencyBetweenInterpretedAndCodegen(
+      (child: Expression) => Minute(child), TimeType())
+  }
+
+
+
+

Review Comment:
   Please, remove the blank lines.



##########
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala:
##########
@@ -388,6 +388,44 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     }
   }
 
+  test("Minute with TIME type") {
+    // A few test times in microseconds since midnight:
+    //   (time in microseconds, expected minute)
+    val secondsPerHour = 3600L
+    val secondsPerMinute = 60L
+    val testTimes = Seq(
+      (0L, 0), // 00:00:00 => 0
+      ((12L * secondsPerHour + 58L * secondsPerMinute + 59L) * 1000000L, 58), 
// 12:58:59 => 58

Review Comment:
   You can use the helper function `localTime` to get micros.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala:
##########
@@ -429,8 +436,18 @@ case class Hour(child: Expression, timeZoneId: 
Option[String] = None) extends Ge
 case class Minute(child: Expression, timeZoneId: Option[String] = None) 
extends GetTimeField {

Review Comment:
   `GetTimeField` is time zone aware expression, but in the case of TIME, 
`Minute` shouldn't depend on time zone, and require time zone resolution.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala:
##########
@@ -113,6 +113,15 @@ object DateTimeUtils extends SparkDateTimeUtils {
     getLocalDateTime(micros, zoneId).getMinute
   }
 
+  /**
+   * Returns the minute value of a given TIME (TimeType) value.
+   * The `ZoneId` parameter is ignored because TIME values do not require time 
zones.
+   * It is included only to satisfy the `GetTimeField` function signature.
+   */
+  def getMinutesFromTime(micros: Long, zoneId: ZoneId): Int = {
+    SparkDateTimeUtils.microsToLocalTime(micros).getMinute

Review Comment:
   `microsToLocalTime` should be in the scope:
   ```suggestion
       microsToLocalTime(micros).getMinute
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to