[ https://issues.apache.org/jira/browse/FLINK-6226?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Fabian Hueske closed FLINK-6226. -------------------------------- Resolution: Cannot Reproduce Fix Version/s: 1.3.3 1.4.0 Could not reproduce in 1.3.3 and 1.4.0 Added tests to 1.3.3 with 310817035fef1d843b37e00ccd4a32efffaad3dc Added tests to 1.4.0 with 6e118d1dc97b3a8c0b013d2002fad80219751253 > ScalarFunction and TableFunction do not support parameters of byte, short and > float > ----------------------------------------------------------------------------------- > > Key: FLINK-6226 > URL: https://issues.apache.org/jira/browse/FLINK-6226 > Project: Flink > Issue Type: Bug > Components: Table API & SQL > Reporter: Zhuoluo Yang > Assignee: Fabian Hueske > Priority: Major > Fix For: 1.4.0, 1.3.3 > > > It seems to be a problem that ScalarFunction and TableFunction do not support > types of byte, short or float. > It will throw some exceptions like following; > {panel} > org.apache.flink.table.api.ValidationException: Given parameters of function > 'org$apache$flink$table$expressions$utils$Func18$$98a126fbdab73f43d640516da603291a' > do not match any signature. > Actual: (java.lang.String, java.lang.Integer, java.lang.Integer, > java.lang.Integer, java.lang.Long) > Expected: (java.lang.String, byte, short, int, long) > at > org.apache.flink.table.functions.utils.ScalarSqlFunction$$anon$1.inferReturnType(ScalarSqlFunction.scala:82) > at > org.apache.calcite.sql.SqlOperator.inferReturnType(SqlOperator.java:469) > at > org.apache.calcite.rex.RexBuilder.deriveReturnType(RexBuilder.java:271) > at org.apache.calcite.tools.RelBuilder.call(RelBuilder.java:518) > at > org.apache.flink.table.expressions.ScalarFunctionCall.toRexNode(call.scala:68) > at > org.apache.flink.table.expressions.Alias.toRexNode(fieldExpression.scala:76) > at > org.apache.flink.table.plan.logical.Project$$anonfun$construct$1.apply(operators.scala:95) > at > org.apache.flink.table.plan.logical.Project$$anonfun$construct$1.apply(operators.scala:95) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > at scala.collection.immutable.List.foreach(List.scala:318) > at > scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:32) > at scala.collection.mutable.ListBuffer.foreach(ListBuffer.scala:45) > at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) > at scala.collection.AbstractTraversable.map(Traversable.scala:105) > at > org.apache.flink.table.plan.logical.Project.construct(operators.scala:95) > at > org.apache.flink.table.plan.logical.LogicalNode.toRelNode(LogicalNode.scala:77) > at org.apache.flink.table.api.Table.getRelNode(table.scala:72) > at > org.apache.flink.table.expressions.utils.ExpressionTestBase.addTableApiTestExpr(ExpressionTestBase.scala:215) > at > org.apache.flink.table.expressions.utils.ExpressionTestBase.testAllApis(ExpressionTestBase.scala:241) > at > org.apache.flink.table.expressions.UserDefinedScalarFunctionTest.testVariableArgs(UserDefinedScalarFunctionTest.scala:240) > {panel} > Testing code looks like following: > {code:java} > object Func18 extends ScalarFunction { > def eval(a: String, b: Byte, c: Short, d: Int, e: Long): String = { > a + "," + b + "," + c + "," + d + "," + e > } > } > class TableFunc4 extends TableFunction[Row] { > def eval(data: String, tinyInt: Byte, smallInt: Short, int: Int, long: > Long): Unit = { > val row = new Row(5) > row.setField(0, data) > row.setField(1, tinyInt) > row.setField(2, smallInt) > row.setField(3, int) > row.setField(4, long) > collect(row) > } > override def getResultType: TypeInformation[Row] = { > new RowTypeInfo( > BasicTypeInfo.STRING_TYPE_INFO, > BasicTypeInfo.BYTE_TYPE_INFO, > BasicTypeInfo.SHORT_TYPE_INFO, > BasicTypeInfo.INT_TYPE_INFO, > BasicTypeInfo.LONG_TYPE_INFO > ) > } > } > {code} -- This message was sent by Atlassian JIRA (v6.4.14#64029)