cloud-fan commented on code in PR #49466:
URL: https://github.com/apache/spark/pull/49466#discussion_r1912789362


##########
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DescribeTableSuite.scala:
##########
@@ -488,107 +391,64 @@ class DescribeTableSuite extends DescribeTableSuiteBase 
with CommandSuiteBase {
         bucket_columns = Some(Nil),
         sort_columns = Some(Nil),
         comment = Some("table_comment"),
-        serde_library = 
Some("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"),
-        inputformat = 
Some("org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat"),
-        outputformat = 
Some("org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat"),
+        serde_library = if (getProvider() == "hive") {
+          Some("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe")
+        } else {
+          None
+        },
         table_properties = None
       )
-      if (getProvider() == "hive") {
-        assert(
-          expectedOutput ==
-            parsedOutput.copy(location = None, created_time = None, owner = 
None)
-        )
-      } else {
-        assert(
-          expectedOutput.copy(inputformat = None, outputformat = None, 
serde_library = None) ==
-            parsedOutput.copy(location = None, created_time = None, owner = 
None)
-        )
-      }
-    }
-  }
-
-  test("DESCRIBE AS JSON temp view") {
-    withNamespaceAndTable("ns", "table") { t =>
-      withTempView("temp_view") {
-        val tableCreationStr =
-          s"""
-             |CREATE TABLE $t (id INT, name STRING, created_at TIMESTAMP)
-             |  USING parquet
-             |  OPTIONS ('compression' 'snappy')
-             |  CLUSTERED BY (id, name) SORTED BY (created_at) INTO 4 BUCKETS
-             |  COMMENT 'test temp view'
-             |  TBLPROPERTIES ('parquet.encryption' = 'true')
-             |""".stripMargin
-        spark.sql(tableCreationStr)
-        spark.sql(s"CREATE TEMPORARY VIEW temp_view AS SELECT * FROM $t")
-        val descriptionDf = spark.sql(s"DESCRIBE EXTENDED temp_view AS JSON")
-        val firstRow = descriptionDf.select("json_metadata").head()
-        val jsonValue = firstRow.getString(0)
-        val parsedOutput = parse(jsonValue).extract[DescribeTableJson]
-
-        val expectedOutput = DescribeTableJson(
-          columns = Some(List(
-            TableColumn("id", Type("integer")),
-            TableColumn("name", Type("string")),
-            TableColumn("created_at", Type("timestamp_ltz"))
-          ))
-        )
-
-        assert(expectedOutput == parsedOutput)
-      }
+      assert(parsedOutput.location.isDefined)
+      assert(expectedOutput == parsedOutput.copy(location = None))
     }
   }
 
-  test("DESCRIBE AS JSON persistent view") {
-    withNamespaceAndTable("ns", "table") { t =>
-      withView("view") {
-        val tableCreationStr =
-          s"""
-             |CREATE TABLE $t (id INT, name STRING, created_at TIMESTAMP)
-             |  USING parquet
-             |  OPTIONS ('compression' 'snappy')
-             |  CLUSTERED BY (id, name) SORTED BY (created_at) INTO 4 BUCKETS
-             |  COMMENT 'test temp view'
-             |  TBLPROPERTIES ('parquet.encryption' = 'true')
-             |""".stripMargin
-        spark.sql(tableCreationStr)
-        spark.sql(s"CREATE VIEW view AS SELECT * FROM $t")
-        val descriptionDf = spark.sql(s"DESCRIBE EXTENDED view AS JSON")
-        val firstRow = descriptionDf.select("json_metadata").head()
-        val jsonValue = firstRow.getString(0)
-        val parsedOutput = parse(jsonValue).extract[DescribeTableJson]
-
-        val expectedOutput = DescribeTableJson(
-          table_name = Some("view"),
-          catalog_name = Some("spark_catalog"),
-          namespace = Some(List("default")),
-          schema_name = Some("default"),
-          columns = Some(List(
-            TableColumn("id", Type("integer")),
-            TableColumn("name", Type("string")),
-            TableColumn("created_at", Type("timestamp_ltz"))
-          )),
-          serde_library = 
Some("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"),
-          inputformat = 
Some("org.apache.hadoop.mapred.SequenceFileInputFormat"),
-          outputformat = 
Some("org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"),
-          storage_properties = Some(Map("serialization.format" -> "1")),
-          last_access = Some("UNKNOWN"),
-          created_by = Some(s"Spark $SPARK_VERSION"),
-          `type` = Some("VIEW"),
-          view_text = Some("SELECT * FROM spark_catalog.ns.table"),
-          view_original_text = Some("SELECT * FROM spark_catalog.ns.table"),
-          view_schema_mode = Some("COMPENSATION"),
-          view_catalog_and_namespace = Some("spark_catalog.default"),
-          view_query_output_columns = Some(List("id", "name", "created_at"))
-        )
+  test("DESCRIBE AS JSON view") {
+    Seq(true, false).foreach { isTemp =>
+      withNamespaceAndTable("ns", "table") { t =>
+        withView("view") {
+          val tableCreationStr =
+            s"""
+               |CREATE TABLE $t (id INT, name STRING, created_at TIMESTAMP)
+               |  USING parquet
+               |  OPTIONS ('compression' 'snappy')
+               |  CLUSTERED BY (id, name) SORTED BY (created_at) INTO 4 BUCKETS
+               |  COMMENT 'test temp view'
+               |  TBLPROPERTIES ('parquet.encryption' = 'true')
+               |""".stripMargin
+          spark.sql(tableCreationStr)
+          val viewType = if (isTemp) "TEMP VIEW" else "VIEW"
+          spark.sql(s"CREATE $viewType view AS SELECT * FROM $t")
+          val descriptionDf = spark.sql(s"DESCRIBE EXTENDED view AS JSON")
+          val firstRow = descriptionDf.select("json_metadata").head()
+          val jsonValue = firstRow.getString(0)
+          val parsedOutput = parse(jsonValue).extract[DescribeTableJson]
+
+          val expectedOutput = DescribeTableJson(
+            table_name = Some("view"),
+            catalog_name = if (isTemp) Some("system") else 
Some("spark_catalog"),
+            namespace = if (isTemp) Some(List("session")) else 
Some(List("default")),
+            schema_name = if (isTemp) Some("session") else Some("default"),
+            columns = Some(List(
+              TableColumn("id", Type("integer")),
+              TableColumn("name", Type("string")),
+              TableColumn("created_at", Type("timestamp_ltz"))
+            )),
+            last_access = Some("UNKNOWN"),
+            created_by = Some(s"Spark $SPARK_VERSION"),
+            `type` = Some("VIEW"),
+            view_text = Some("SELECT * FROM spark_catalog.ns.table"),
+            view_original_text = Some("SELECT * FROM spark_catalog.ns.table"),
+            // TODO: this is unexpected and temp view should also use 
COMPENSATION mode.

Review Comment:
   Will fix it in a follow-up



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to