mihailotim-db commented on code in PR #50555:
URL: https://github.com/apache/spark/pull/50555#discussion_r2036834585


##########
sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/MetadataResolverSuite.scala:
##########
@@ -22,18 +22,12 @@ import scala.collection.mutable
 import org.apache.spark.sql.QueryTest
 import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier}
 import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
-import org.apache.spark.sql.catalyst.analysis.resolver.{
-  AnalyzerBridgeState,
-  BridgedRelationMetadataProvider,
-  MetadataResolver,
-  RelationId,
-  Resolver
-}
+import org.apache.spark.sql.catalyst.analysis.resolver.{AnalyzerBridgeState, 
BridgedRelationId, BridgedRelationMetadataProvider, MetadataResolver, 
RelationId, Resolver}
 import org.apache.spark.sql.catalyst.catalog.UnresolvedCatalogRelation
 import org.apache.spark.sql.catalyst.expressions.{Expression, PlanExpression}
 import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
 import org.apache.spark.sql.execution.datasources.{FileResolver, 
HadoopFsRelation, LogicalRelation}
-import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.sql.test.{SQLTestUtils, SharedSparkSession}

Review Comment:
   Unnecessary change



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ViewResolver.scala:
##########
@@ -90,6 +90,14 @@ class ViewResolver(resolver: Resolver, catalogManager: 
CatalogManager)
     unresolvedView.copy(child = resolvedChild)
   }
 
+  def getCatalogAndNamespace: Option[Seq[String]] =

Review Comment:
   Personal pref, but I think getters should be right below class fields



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/BridgedRelationMetadataProvider.scala:
##########
@@ -29,13 +29,13 @@ import org.apache.spark.sql.connector.catalog.CatalogManager
  * fixed-point [[Analyzer]] run. We strictly rely on the 
[[AnalyzerBridgeState]] to avoid any
  * blocking calls here.
  */
-class BridgedRelationMetadataProvider(
+case class BridgedRelationMetadataProvider(

Review Comment:
   Why are we changing this to case class?



##########
sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/MetadataResolverSuite.scala:
##########
@@ -22,18 +22,12 @@ import scala.collection.mutable
 import org.apache.spark.sql.QueryTest
 import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier}
 import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
-import org.apache.spark.sql.catalyst.analysis.resolver.{
-  AnalyzerBridgeState,
-  BridgedRelationMetadataProvider,
-  MetadataResolver,
-  RelationId,
-  Resolver
-}
+import org.apache.spark.sql.catalyst.analysis.resolver.{AnalyzerBridgeState, 
BridgedRelationId, BridgedRelationMetadataProvider, MetadataResolver, 
RelationId, Resolver}

Review Comment:
   Unnecessary change



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ViewResolver.scala:
##########
@@ -136,8 +145,12 @@ class ViewResolver(resolver: Resolver, catalogManager: 
CatalogManager)
  * @param nestedViewDepth Current nested view depth. Cannot exceed the 
`maxNestedViewDepth`.
  * @param maxNestedViewDepth Maximum allowed nested view depth. Configured in 
the upper context
  *   based on [[SQLConf.MAX_NESTED_VIEW_DEPTH]].
+ * @param catalogAndNamespace Catalog and camespace under which the [[View]] 
was created
  */
-case class ViewResolutionContext(nestedViewDepth: Int, maxNestedViewDepth: 
Int) {
+case class ViewResolutionContext(
+    nestedViewDepth: Int,
+    maxNestedViewDepth: Int,
+    catalogAndNamespace: Seq[String] = Seq()) {

Review Comment:
   Should this be a mandatory field? Or at least an `Option`? Empty Seq doesn't 
feel right



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ViewResolver.scala:
##########
@@ -136,8 +145,12 @@ class ViewResolver(resolver: Resolver, catalogManager: 
CatalogManager)
  * @param nestedViewDepth Current nested view depth. Cannot exceed the 
`maxNestedViewDepth`.
  * @param maxNestedViewDepth Maximum allowed nested view depth. Configured in 
the upper context
  *   based on [[SQLConf.MAX_NESTED_VIEW_DEPTH]].
+ * @param catalogAndNamespace Catalog and camespace under which the [[View]] 
was created

Review Comment:
   ```suggestion
    * @param catalogAndNamespace Catalog and camespace under which the [[View]] 
was created.
   ```



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/Resolver.scala:
##########
@@ -154,6 +148,23 @@ class Resolver(
     resolve(planAfterSubstitution)
   }
 
+  def recreateRelationMetadataProvider(analyzerBridgeState: 
Option[AnalyzerBridgeState]) : Unit =

Review Comment:
   This method should be private and should have a small scaladoc



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to