MaxGekk commented on code in PR #50269:
URL: https://github.com/apache/spark/pull/50269#discussion_r1996630578


##########
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala:
##########
@@ -2150,4 +2150,71 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       condition = "INTERNAL_ERROR",
       parameters = Map("message" -> "Cannot evaluate expression: 
localtimestamp(Some(UTC))"))
   }
+
+  test("creating values of TimeType via make_time") {
+    Seq(true, false).foreach { ansi =>
+      withSQLConf(SQLConf.ANSI_ENABLED.key -> ansi.toString) {
+        // basic case
+        checkEvaluation(
+          MakeTime(Literal(13), Literal.create(2, IntegerType),
+            Literal(Decimal(BigDecimal(23.5), 16, 6))),
+          LocalTime.of(13, 2, 23, 500000000))
+
+        // Postgres compatibility
+        checkEvaluation(
+          MakeTime(Literal(13), Literal.create(2, IntegerType),
+            Literal(Decimal(BigDecimal(60), 16, 6))),
+          LocalTime.of(13, 3, 0, 0))
+        checkEvaluation(
+          MakeTime(Literal(13), Literal.create(59, IntegerType),
+            Literal(Decimal(BigDecimal(60), 16, 6))),
+          LocalTime.of(14, 0, 0, 0))
+
+        // null cases
+        checkEvaluation(
+          MakeTime(Literal.create(null, IntegerType), Literal(18),
+            Literal(Decimal(BigDecimal(23.5), 16, 6))),
+          null)
+        checkEvaluation(
+          MakeTime(Literal(13), Literal.create(null, IntegerType),
+            Literal(Decimal(BigDecimal(23.5), 16, 6))),
+          null)
+        checkEvaluation(MakeTime(Literal(13), Literal(18),
+          Literal.create(null, DecimalType(16, 6))), null)
+      }
+    }
+
+    withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
+      // Invalid times return null
+      checkEvaluation(
+        MakeTime(Literal(Int.MaxValue), Literal(18),
+          Literal(Decimal(BigDecimal(23.5), 16, 6))),
+        null)
+      checkEvaluation(
+        MakeTime(Literal(13), Literal(Int.MinValue),
+          Literal(Decimal(BigDecimal(23.5), 16, 6))),
+        null)
+      checkEvaluation(MakeTime(Literal(13), Literal(18),
+        Literal(Decimal(BigDecimal(65.1), 16, 6))), null)
+      checkEvaluation(MakeTime(Literal(13), Literal(18),
+        Literal(Decimal(BigDecimal(60.1), 16, 6))), null)
+    }
+
+    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
+      checkExceptionInExpression[DateTimeException](
+        MakeTime(Literal(25), Literal(2), Literal(Decimal(BigDecimal(23.5), 
16, 6))),
+        "Invalid value for HourOfDay")
+      checkExceptionInExpression[DateTimeException](
+        MakeTime(Literal(23), Literal(-1), Literal(Decimal(BigDecimal(23.5), 
16, 6))),
+        "Invalid value for MinuteOfHour")
+      checkExceptionInExpression[DateTimeException](
+        MakeTime(Literal(23), Literal(12), Literal(Decimal(BigDecimal(100.5), 
16, 6))),
+        "Invalid value for SecondOfMinute")
+
+      // Invalid Postgres compatability case where seconds=60 and has a 
fractional second
+      checkExceptionInExpression[DateTimeException](
+        MakeTime(Literal(23), Literal(12), Literal(Decimal(BigDecimal(60.5), 
16, 6))),
+        "[INVALID_FRACTION_OF_SECOND] Valid range for seconds is [0, 60]")

Review Comment:
   Could you use `checkErrorInExpression`, please. It would be nice to avoid 
dependencies of error messages in tests. In this way, it will be possible to 
improve error messages without touching/repackaging spark distro.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala:
##########
@@ -2556,6 +2556,118 @@ case class MakeDate(
     copy(year = newFirst, month = newSecond, day = newThird)
 }
 
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = "_FUNC_(hour, minute, second) - Create time from hour, minute and 
second fields. If the configuration `spark.sql.ansi.enabled` is false, the 
function returns NULL on invalid inputs. Otherwise, it will throw an error 
instead.",
+  arguments = """
+    Arguments:
+      * hour - the hour to represent, from 0 to 23
+      * minute - the minute to represent, from 0 to 59
+      * second - the second to represent, from 0 to 59.999999
+  """,
+  examples = """
+    Examples:
+      > SELECT _FUNC_(6, 30, 45.887);
+       06:30:45.887
+      > SELECT _FUNC_(NULL, 30, 0);
+       NULL
+  """,
+  group = "datetime_funcs",
+  since = "4.1.0")
+// scalastyle:on line.size.limit
+case class MakeTime(
+                     hours: Expression,
+                     minutes: Expression,
+                     secAndMicros: Expression,
+                     failOnError: Boolean = SQLConf.get.ansiEnabled)

Review Comment:
   Fix indentation, please, see 
https://github.com/databricks/scala-style-guide?tab=readme-ov-file#spacing-and-indentation



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala:
##########
@@ -2556,6 +2556,118 @@ case class MakeDate(
     copy(year = newFirst, month = newSecond, day = newThird)
 }
 
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = "_FUNC_(hour, minute, second) - Create time from hour, minute and 
second fields. If the configuration `spark.sql.ansi.enabled` is false, the 
function returns NULL on invalid inputs. Otherwise, it will throw an error 
instead.",
+  arguments = """
+    Arguments:
+      * hour - the hour to represent, from 0 to 23
+      * minute - the minute to represent, from 0 to 59
+      * second - the second to represent, from 0 to 59.999999
+  """,
+  examples = """
+    Examples:
+      > SELECT _FUNC_(6, 30, 45.887);
+       06:30:45.887
+      > SELECT _FUNC_(NULL, 30, 0);
+       NULL
+  """,
+  group = "datetime_funcs",
+  since = "4.1.0")
+// scalastyle:on line.size.limit
+case class MakeTime(
+                     hours: Expression,
+                     minutes: Expression,
+                     secAndMicros: Expression,
+                     failOnError: Boolean = SQLConf.get.ansiEnabled)
+  extends TernaryExpression with ImplicitCastInputTypes with 
SecAndNanosExtractor {
+  override def nullIntolerant: Boolean = true
+
+  def this(hours: Expression, minutes: Expression, secAndMicros: Expression) =
+    this(hours, minutes, secAndMicros, SQLConf.get.ansiEnabled)
+
+  override def first: Expression = hours
+  override def second: Expression = minutes
+  override def third: Expression = secAndMicros
+  override def inputTypes: Seq[AbstractDataType] = Seq(IntegerType, 
IntegerType, DecimalType(16, 6))
+  override def dataType: DataType = TimeType(TimeType.MAX_PRECISION)
+  override def nullable: Boolean = if (failOnError) 
children.exists(_.nullable) else true
+
+  override protected def nullSafeEval(hours: Any, minutes: Any, secAndMicros: 
Any): Any = {
+    val (secs, nanos) = toSecondsAndNanos(secAndMicros.asInstanceOf[Decimal])
+
+    try {
+      val lt = if (secs == 60) {
+        if (nanos == 0) {
+          // This case of sec = 60 and nanos = 0 is supported for 
compatibility with PostgreSQL

Review Comment:
   I believe we don't need this legacy feature of PostgreSQL. Let's just 
strictly follow the SQL standard, and support seconds in the range from 0 to 
59.999999.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to