KurtYoung commented on a change in pull request #15136: URL: https://github.com/apache/flink/pull/15136#discussion_r592940636
########## File path: flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/TemporalTypesTest.scala ########## @@ -262,6 +263,30 @@ class TemporalTypesTest extends ExpressionTestBase { "2018-03-14") } + private def expectExceptionThrown( + sqlExpr: String, + expected: String, Review comment: `expected` is unnecessary if you are expecting an exception ########## File path: flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/TemporalTypesTest.scala ########## @@ -1122,6 +1154,90 @@ class TemporalTypesTest extends ExpressionTestBase { } + @Test + def testToTimestampLtzShanghai(): Unit = { + config.setLocalTimeZone(ZoneId.of("Asia/Shanghai")) + + //INT -> TIMESTAMP_LTZ + testSqlApi( + "TO_TIMESTAMP_LTZ(100)", + "1970-01-01 08:01:40.000") + + //TINYINT -> TIMESTAMP_LTZ + testSqlApi( + "TO_TIMESTAMP_LTZ(CAST(100 AS TINYINT))", + "1970-01-01 08:01:40.000") + + //BIGINT -> TIMESTAMP_LTZ + testSqlApi( + "TO_TIMESTAMP_LTZ(CAST(100 AS BIGINT))", + "1970-01-01 08:01:40.000") + + //FLOAT -> TIMESTAMP_LTZ + testSqlApi( + "TO_TIMESTAMP_LTZ(CAST(100.01 AS FLOAT))", + "1970-01-01 08:01:40.000") Review comment: the result should be `1970-01-01 08:01:40.010`? ########## File path: flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/TemporalTypesTest.scala ########## @@ -262,6 +263,30 @@ class TemporalTypesTest extends ExpressionTestBase { "2018-03-14") } + private def expectExceptionThrown( + sqlExpr: String, + expected: String, + keywords: String, + clazz: Class[_ <: Throwable] = classOf[ValidationException]) + : Unit = { Review comment: nit: align `: Unit` with `try` manually ########## File path: flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/TemporalTypesTest.scala ########## @@ -1122,6 +1154,90 @@ class TemporalTypesTest extends ExpressionTestBase { } + @Test + def testToTimestampLtzShanghai(): Unit = { Review comment: 1. Please also test the minimum and maximum values for each numeric type this function supports. (I think we might need some boundary protection for timestamp because we can only support 9999-12-31) 2. Add a test case with NULL input 3. Add some tests about unsupported input types 4. Add some tests that exceeds the precision of timestamp, such as `TO_TIMESTAMP_LTZ(0.01, 3)` ########## File path: flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/utils/ExpressionTestBase.scala ########## @@ -108,95 +108,100 @@ abstract class ExpressionTestBase { @After def evaluateExprs(): Unit = { - val ctx = CodeGeneratorContext(config) - val inputType = if (containsLegacyTypes) { - fromTypeInfoToLogicalType(typeInfo) - } else { - resolvedDataType.getLogicalType - } - val exprGenerator = new ExprCodeGenerator(ctx, nullableInput = false).bindInput(inputType) - - // cast expressions to String - val stringTestExprs = testExprs.map(expr => relBuilder.cast(expr._2, VARCHAR)) - - // generate code - val resultType = RowType.of(Seq.fill(testExprs.size)( - new VarCharType(VarCharType.MAX_LENGTH)): _*) - - val exprs = stringTestExprs.map(exprGenerator.generateExpression) - val genExpr = exprGenerator.generateResultExpression(exprs, resultType, classOf[BinaryRowData]) - - val bodyCode = - s""" - |${genExpr.code} - |return ${genExpr.resultTerm}; - """.stripMargin - - val genFunc = FunctionCodeGenerator.generateFunction[MapFunction[RowData, BinaryRowData]]( - ctx, - "TestFunction", - classOf[MapFunction[RowData, BinaryRowData]], - bodyCode, - resultType, - inputType) - - val mapper = genFunc.newInstance(getClass.getClassLoader) - - val isRichFunction = mapper.isInstanceOf[RichFunction] - - // call setRuntimeContext method and open method for RichFunction - if (isRichFunction) { - val richMapper = mapper.asInstanceOf[RichMapFunction[_, _]] - val t = new RuntimeUDFContext( - new TaskInfo("ExpressionTest", 1, 0, 1, 1), - classOf[ExpressionTestBase].getClassLoader, - env.getConfig, - Collections.emptyMap(), - Collections.emptyMap(), - null) - richMapper.setRuntimeContext(t) - richMapper.open(new Configuration()) - } - - val testRow = if (containsLegacyTypes) { - val converter = DataFormatConverters - .getConverterForDataType(resolvedDataType) - .asInstanceOf[DataFormatConverter[RowData, Row]] - converter.toInternal(testData) - } else { - val converter = DataStructureConverters - .getConverter(resolvedDataType) - .asInstanceOf[DataStructureConverter[RowData, Row]] - converter.toInternalOrNull(testData) - } + try { + val ctx = CodeGeneratorContext(config) + val inputType = if (containsLegacyTypes) { + fromTypeInfoToLogicalType(typeInfo) + } else { + resolvedDataType.getLogicalType + } + val exprGenerator = new ExprCodeGenerator(ctx, nullableInput = false).bindInput(inputType) + + // cast expressions to String + val stringTestExprs = testExprs.map(expr => relBuilder.cast(expr._2, VARCHAR)) + + // generate code + val resultType = RowType.of(Seq.fill(testExprs.size)( + new VarCharType(VarCharType.MAX_LENGTH)): _*) + + val exprs = stringTestExprs.map(exprGenerator.generateExpression) + val genExpr = exprGenerator + .generateResultExpression(exprs, resultType, classOf[BinaryRowData]) + + val bodyCode = + s""" + |${genExpr.code} + |return ${genExpr.resultTerm}; + """.stripMargin + + val genFunc = FunctionCodeGenerator.generateFunction[MapFunction[RowData, BinaryRowData]]( + ctx, + "TestFunction", + classOf[MapFunction[RowData, BinaryRowData]], + bodyCode, + resultType, + inputType) + + val mapper = genFunc.newInstance(getClass.getClassLoader) + + val isRichFunction = mapper.isInstanceOf[RichFunction] + + // call setRuntimeContext method and open method for RichFunction + if (isRichFunction) { + val richMapper = mapper.asInstanceOf[RichMapFunction[_, _]] + val t = new RuntimeUDFContext( + new TaskInfo("ExpressionTest", 1, 0, 1, 1), + classOf[ExpressionTestBase].getClassLoader, + env.getConfig, + Collections.emptyMap(), + Collections.emptyMap(), + null) + richMapper.setRuntimeContext(t) + richMapper.open(new Configuration()) + } - val result = mapper.map(testRow) + val testRow = if (containsLegacyTypes) { + val converter = DataFormatConverters + .getConverterForDataType(resolvedDataType) + .asInstanceOf[DataFormatConverter[RowData, Row]] + converter.toInternal(testData) + } else { + val converter = DataStructureConverters + .getConverter(resolvedDataType) + .asInstanceOf[DataStructureConverter[RowData, Row]] + converter.toInternalOrNull(testData) + } - // call close method for RichFunction - if (isRichFunction) { - mapper.asInstanceOf[RichMapFunction[_, _]].close() - } + val result = mapper.map(testRow) - // compare - testExprs - .zipWithIndex - .foreach { - case ((originalExpr, optimizedExpr, expected), index) => - - // adapt string result - val actual = if(!result.asInstanceOf[BinaryRowData].isNullAt(index)) { - result.asInstanceOf[BinaryRowData].getString(index).toString - } else { - null - } - - val original = if (originalExpr == null) "" else s"for: [$originalExpr]" - - assertEquals( - s"Wrong result $original optimized to: [$optimizedExpr]", - expected, - if (actual == null) "null" else actual) + // call close method for RichFunction + if (isRichFunction) { + mapper.asInstanceOf[RichMapFunction[_, _]].close() } + + // compare + testExprs + .zipWithIndex + .foreach { + case ((originalExpr, optimizedExpr, expected), index) => + + // adapt string result + val actual = if(!result.asInstanceOf[BinaryRowData].isNullAt(index)) { + result.asInstanceOf[BinaryRowData].getString(index).toString + } else { + null + } + + val original = if (originalExpr == null) "" else s"for: [$originalExpr]" + + assertEquals( + s"Wrong result $original optimized to: [$optimizedExpr]", + expected, + if (actual == null) "null" else actual) + } + } finally { + testExprs.clear() Review comment: Is this necessary? I noticed we will clear this in `@Before` ########## File path: flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/functions/SqlDateTimeUtils.java ########## @@ -230,8 +230,68 @@ public static long toTimestamp(DecimalData v) { } // -------------------------------------------------------------------------------------------- - // String --> String/timestamp conversion + // TO_TIMESTAMP_LTZ(numeric, precision) function supports precision 0 or 3. // -------------------------------------------------------------------------------------------- + public static TimestampData toTimestampData(long v) { + return toTimestampData(v, 0); + } + + public static TimestampData toTimestampData(long v, int precision) { + switch (precision) { + case 0: + return TimestampData.fromEpochMillis(v * MILLIS_PER_SECOND); + case 3: + return TimestampData.fromEpochMillis(v); + default: + throw new TableException( + "The precision value '" + + precision + + "' for function " + + "TO_TIMESTAMP_LTZ(numeric, precision) is unsupported," + + " the supported value is '0' for second or '3' for millisecond."); + } + } + + public static TimestampData toTimestampData(double v) { + return toTimestampData(v, 0); + } + + public static TimestampData toTimestampData(double v, int precision) { + switch (precision) { + case 0: + return TimestampData.fromEpochMillis((long) v * MILLIS_PER_SECOND); Review comment: `(long) v` this might loose precision ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org