vladimirg-db commented on code in PR #49351: URL: https://github.com/apache/spark/pull/49351#discussion_r1919754080
########## sql/core/src/main/scala/org/apache/spark/sql/execution/ExplainUtils.scala: ########## @@ -297,6 +297,7 @@ object ExplainUtils extends AdaptiveSparkPlanHelper { /** * Generate detailed field string with different format based on type of input value */ + // TODO(nemanja.petro...@databricks.com) Delete method as it is duplicated in QueryPlan.scala. Review Comment: Are you planning to do that here or in the followup PR? ########## sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala: ########## @@ -4311,4 +4311,12 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat origin = origin ) } + + def recursiveCteError(error: String): Throwable = { Review Comment: How about ```suggestion def invalidRecursiveCteError(error: String): Throwable = { ``` ########## sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveRecursiveCTESuite.scala: ########## @@ -0,0 +1,144 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.spark.sql.catalyst.analysis + +import org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer.ResolveSubqueryColumnAliases +import org.apache.spark.sql.catalyst.expressions.{Alias, Literal} +import org.apache.spark.sql.catalyst.plans.logical._ +import org.apache.spark.sql.catalyst.rules.RuleExecutor + +class ResolveRecursiveCTESuite extends AnalysisTest { + // Motivated by: + // WITH RECURSIVE t AS (SELECT 1 UNION ALL SELECT * FROM t) SELECT * FROM t; + test("ResolveWithCTE rule on recursive CTE without UnresolvedSubqueryColumnAliases") { + // The analyzer will repeat ResolveWithCTE rule twice. + val rules = Seq(ResolveWithCTE, ResolveWithCTE) + val analyzer = new RuleExecutor[LogicalPlan] { + override val batches = Seq(Batch("Resolution", Once, rules: _*)) + } + // Since cteDef IDs need to be the same, cteDef for each case will be created by copying + // this one with its child replaced. + val cteDef = CTERelationDef(OneRowRelation()) + + def getBeforePlan(cteDef: CTERelationDef): LogicalPlan = { + val anchor = Project(Seq(Alias(Literal(1), "1")()), OneRowRelation()) + + val recursionPart = Project(anchor.output, Review Comment: The way the tree is decomposed here (using local variables) is very hard to read. Maybe we can actually have a solid tree structure instead of. this decomposition? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org