HeartSaVioR commented on code in PR #50015:
URL: https://github.com/apache/spark/pull/50015#discussion_r1963061299


##########
sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ForeachBatchSinkSuite.scala:
##########
@@ -215,6 +215,46 @@ class ForeachBatchSinkSuite extends StreamTest {
     assert(ex.getCause == sparkEx)
   }
 
+  test("SPARK-51265 Running eagerlyExecuteCommand with streaming source in 
foreachBatch " +
+    "should give an user facing error") {
+    val mem = MemoryStream[Int]
+    val ds = mem.toDS().map(_ + 1)
+
+    def foreachBatchFn(df: Dataset[Int], batchId: Long): Unit = {
+      withTempView("param", "s") {
+        df.createOrReplaceTempView("param")
+        val streamDf = df.sparkSession.readStream.format("rate").load()
+        streamDf.createOrReplaceTempView("s")
+        withTable("output") {
+          val ex = intercept[AnalysisException] {
+            // Creates a table from streaming source with batch query. This 
should fail.
+            df.sparkSession.sql("CREATE TABLE output AS SELECT * FROM s")

Review Comment:
   Same reasoning as about, because this is a batch query.



##########
sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ForeachBatchSinkSuite.scala:
##########
@@ -215,6 +215,46 @@ class ForeachBatchSinkSuite extends StreamTest {
     assert(ex.getCause == sparkEx)
   }
 
+  test("SPARK-51265 Running eagerlyExecuteCommand with streaming source in 
foreachBatch " +
+    "should give an user facing error") {
+    val mem = MemoryStream[Int]
+    val ds = mem.toDS().map(_ + 1)
+
+    def foreachBatchFn(df: Dataset[Int], batchId: Long): Unit = {
+      withTempView("param", "s") {
+        df.createOrReplaceTempView("param")
+        val streamDf = df.sparkSession.readStream.format("rate").load()
+        streamDf.createOrReplaceTempView("s")
+        withTable("output") {
+          val ex = intercept[AnalysisException] {
+            // Creates a table from streaming source with batch query. This 
should fail.
+            df.sparkSession.sql("CREATE TABLE output AS SELECT * FROM s")

Review Comment:
   Same reasoning as above, because this is a batch query.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to