This is an automated email from the ASF dual-hosted git repository.

leesf pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 7a6eb0f6e1 [HUDI-4309] Spark3.2 custom parser should not throw 
exception (#5947)
7a6eb0f6e1 is described below

commit 7a6eb0f6e1cb14890506713d169b203ba87d6f42
Author: cxzl25 <[email protected]>
AuthorDate: Mon Jun 27 09:37:23 2022 +0800

    [HUDI-4309] Spark3.2 custom parser should not throw exception (#5947)
---
 .../org/apache/spark/sql/hudi/TestTimeTravelTable.scala     | 12 ++++++++++++
 .../spark/sql/parser/HoodieSpark3_2ExtendedSqlParser.scala  | 13 ++++++++++---
 2 files changed, 22 insertions(+), 3 deletions(-)

diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestTimeTravelTable.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestTimeTravelTable.scala
index ce0f17c3f5..4a871a82dd 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestTimeTravelTable.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestTimeTravelTable.scala
@@ -238,4 +238,16 @@ class TestTimeTravelTable extends HoodieSparkSqlTestBase {
       }
     }
   }
+
+  test("Test Unsupported syntax can be parsed") {
+    if (HoodieSparkUtils.gteqSpark3_2) {
+      checkAnswer("select 1 distribute by 1")(Seq(1))
+      withTempDir { dir =>
+        val path = dir.toURI.getPath
+        spark.sql(s"insert overwrite local directory '$path' using parquet 
select 1")
+        // Requires enable hive support, so didn't test it
+        // spark.sql(s"insert overwrite local directory '$path' stored as orc 
select 1")
+      }
+    }
+  }
 }
diff --git 
a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/parser/HoodieSpark3_2ExtendedSqlParser.scala
 
b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/parser/HoodieSpark3_2ExtendedSqlParser.scala
index 59ef8dfe09..2b8931ace3 100644
--- 
a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/parser/HoodieSpark3_2ExtendedSqlParser.scala
+++ 
b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/parser/HoodieSpark3_2ExtendedSqlParser.scala
@@ -32,6 +32,8 @@ import org.apache.spark.sql.catalyst.{FunctionIdentifier, 
TableIdentifier}
 import org.apache.spark.sql.types._
 import org.apache.spark.sql.{AnalysisException, SparkSession}
 
+import scala.util.control.NonFatal
+
 class HoodieSpark3_2ExtendedSqlParser(session: SparkSession, delegate: 
ParserInterface)
   extends ParserInterface with Logging {
 
@@ -39,9 +41,14 @@ class HoodieSpark3_2ExtendedSqlParser(session: SparkSession, 
delegate: ParserInt
   private lazy val builder = new HoodieSpark3_2ExtendedSqlAstBuilder(conf, 
delegate)
 
   override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) { 
parser =>
-    builder.visit(parser.singleStatement()) match {
-      case plan: LogicalPlan => plan
-      case _=> delegate.parsePlan(sqlText)
+    try {
+      builder.visit(parser.singleStatement()) match {
+        case plan: LogicalPlan => plan
+        case _=> delegate.parsePlan(sqlText)
+      }
+    } catch {
+      case NonFatal(_) =>
+        delegate.parsePlan(sqlText)
     }
   }
 

Reply via email to