allisonwang-db commented on code in PR #49471:
URL: https://github.com/apache/spark/pull/49471#discussion_r1985588345


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala:
##########
@@ -1675,6 +1676,86 @@ class SessionCatalog(
     }
   }
 
+  /**
+   * Constructs a SQL table function plan.
+   * This function should be invoked with the captured SQL configs from the 
function.
+   *
+   * Example SQL table function:
+   *
+   *   CREATE FUNCTION foo(x INT) RETURNS TABLE(a INT) RETURN SELECT x + 1 AS 
x1
+   *
+   * Query:
+   *
+   *   SELECT * FROM foo(1);
+   *
+   * Plan:
+   *
+   *   Project [CAST(x1 AS INT) AS a]
+   *   +- LateralJoin lateral-subquery [x]
+   *      :  +- Project [(outer(x) + 1) AS x1]
+   *      :     +- OneRowRelation
+   *      +- Project [CAST(1 AS INT) AS x]
+   *         +- OneRowRelation
+   */
+  def makeSQLTableFunctionPlan(
+      name: String,
+      function: SQLFunction,
+      input: Seq[Expression],
+      outputAttrs: Seq[Attribute]): LogicalPlan = {
+    assert(function.isTableFunc)
+    val funcName = function.name.funcName
+    val inputParam = function.inputParam
+    val returnParam = function.getTableFuncReturnCols
+    val (_, query) = function.getExpressionAndQuery(parser, isTableFunc = true)
+    assert(query.isDefined)
+
+    // Check function arguments
+    val paramSize = inputParam.map(_.size).getOrElse(0)
+    if (input.size > paramSize) {
+      throw QueryCompilationErrors.wrongNumArgsError(
+        name, paramSize.toString, input.size)
+    }
+
+    val body = if (inputParam.isDefined) {
+      val param = inputParam.get
+      // Attributes referencing the input parameters inside the function can 
use the
+      // function name as a qualifier.
+      val qualifier = Seq(funcName)
+      val paddedInput = input ++
+        param.takeRight(paramSize - input.size).map { p =>

Review Comment:
   This `defaultRearrange` is only invoked when the there are named arguments. 
The rearrangeArguments will simply return the input expression if there is no 
named arguments:
   
https://github.com/apache/spark/blob/1c14b06f8224d0f5db072bdeabb1f36d7359b05a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala#L1792-L1796



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala:
##########
@@ -1675,6 +1676,86 @@ class SessionCatalog(
     }
   }
 
+  /**
+   * Constructs a SQL table function plan.
+   * This function should be invoked with the captured SQL configs from the 
function.
+   *
+   * Example SQL table function:
+   *
+   *   CREATE FUNCTION foo(x INT) RETURNS TABLE(a INT) RETURN SELECT x + 1 AS 
x1
+   *
+   * Query:
+   *
+   *   SELECT * FROM foo(1);
+   *
+   * Plan:
+   *
+   *   Project [CAST(x1 AS INT) AS a]
+   *   +- LateralJoin lateral-subquery [x]
+   *      :  +- Project [(outer(x) + 1) AS x1]
+   *      :     +- OneRowRelation
+   *      +- Project [CAST(1 AS INT) AS x]
+   *         +- OneRowRelation
+   */
+  def makeSQLTableFunctionPlan(
+      name: String,
+      function: SQLFunction,
+      input: Seq[Expression],
+      outputAttrs: Seq[Attribute]): LogicalPlan = {
+    assert(function.isTableFunc)
+    val funcName = function.name.funcName
+    val inputParam = function.inputParam
+    val returnParam = function.getTableFuncReturnCols
+    val (_, query) = function.getExpressionAndQuery(parser, isTableFunc = true)
+    assert(query.isDefined)
+
+    // Check function arguments
+    val paramSize = inputParam.map(_.size).getOrElse(0)
+    if (input.size > paramSize) {
+      throw QueryCompilationErrors.wrongNumArgsError(
+        name, paramSize.toString, input.size)
+    }
+
+    val body = if (inputParam.isDefined) {
+      val param = inputParam.get
+      // Attributes referencing the input parameters inside the function can 
use the
+      // function name as a qualifier.
+      val qualifier = Seq(funcName)
+      val paddedInput = input ++
+        param.takeRight(paramSize - input.size).map { p =>

Review Comment:
   Actually I realized the default argument is not really properly supported: 
   ```
   scala> spark.sql("""create function foo(x int default 0) returns int return 
x""")
   org.apache.spark.sql.catalyst.parser.ParseException:
   [PARSE_SYNTAX_ERROR] Syntax error at or near 'default'. SQLSTATE: 42601
   ```
   I will create a separate PR to make this work.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to