MaxGekk commented on code in PR #50558: URL: https://github.com/apache/spark/pull/50558#discussion_r2046160107
########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala: ########## @@ -148,6 +148,24 @@ object DateTimeUtils extends SparkDateTimeUtils { Decimal(getMicroseconds(micros, zoneId), 8, 6) } + + /** + * Returns the second value with fraction from a given TIME (TimeType) value. + * @param micros + * The number of microseconds since the epoch. + * @param precision + * The time fractional seconds precision, which indicates the number of decimal digits + * maintained. + */ + def getSecondsOfTimeWithFraction(micros: Long, precision: Int): Decimal = { + val nanos = micros * NANOS_PER_MICROS Review Comment: What's the reason to convert micros to nanos? Why not just: ```scala def getSecondsOfTimeWithFraction(micros: Long, precision: Int): Decimal = { val seconds = (micros / MICROS_PER_SECOND) % SECONDS_PER_MINUTE val scaleFactor = math.pow(10, precision).toLong val scaledFraction = (micros % MICROS_PER_SECOND) * scaleFactor / MICROS_PER_SECOND val fraction = scaledFraction.toDouble / scaleFactor Decimal(seconds + fraction, 8, 6) } ``` ########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala: ########## @@ -297,6 +312,36 @@ object HourExpressionBuilder extends ExpressionBuilder { } } +case class SecondsOfTimeWithFraction(child: Expression) + extends RuntimeReplaceable + with ExpectsInputTypes { + + override def replacement: Expression = { + + StaticInvoke( + classOf[DateTimeUtils.type], + DecimalType(8, 6), + "getSecondsOfTimeWithFraction", + Seq(child, Literal(precision)), + Seq(child.dataType, IntegerType)) + } + private val precision: Int = child.dataType match { + case t: TimeType => t.precision + case _ => TimeType.MAX_PRECISION + } + override def inputTypes: Seq[AbstractDataType] = + Seq(TimeType(precision)) + + override def children: Seq[Expression] = Seq(child) + + override def prettyName: String = "secondoftime_with_fraction" Review Comment: Could you remove it, and leave the default implementation as the class name. It should be not visible to users at in regular cases but for devs much more convenient to grep by source code. ########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala: ########## @@ -297,6 +312,36 @@ object HourExpressionBuilder extends ExpressionBuilder { } } +case class SecondsOfTimeWithFraction(child: Expression) + extends RuntimeReplaceable + with ExpectsInputTypes { + + override def replacement: Expression = { + + StaticInvoke( + classOf[DateTimeUtils.type], + DecimalType(8, 6), + "getSecondsOfTimeWithFraction", + Seq(child, Literal(precision)), + Seq(child.dataType, IntegerType)) + } + private val precision: Int = child.dataType match { + case t: TimeType => t.precision + case _ => TimeType.MAX_PRECISION Review Comment: What's the case when the type is not `TimeType`? ########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala: ########## @@ -297,6 +312,36 @@ object HourExpressionBuilder extends ExpressionBuilder { } } +case class SecondsOfTimeWithFraction(child: Expression) + extends RuntimeReplaceable + with ExpectsInputTypes { Review Comment: Please, fix indentation, see https://github.com/databricks/scala-style-guide?tab=readme-ov-file#spacing-and-indentation ```suggestion extends RuntimeReplaceable with ExpectsInputTypes { ``` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org