XuQianJin-Stars commented on code in PR #5620:
URL: https://github.com/apache/hudi/pull/5620#discussion_r875842634


##########
hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCompactionProcedure.scala:
##########
@@ -98,25 +124,37 @@ class TestCompactionProcedure extends 
HoodieSparkSqlTestBase {
       spark.sql(s"insert into $tableName values(3, 'a3', 10, 1000)")
       spark.sql(s"update $tableName set price = 11 where id = 1")
 
-      spark.sql(s"call run_compaction(op => 'run', path => 
'${tmp.getCanonicalPath}')")
+      spark.sql(s"call run_compaction(op => 'run', path => 
'${tmp.getCanonicalPath}')").show()
       checkAnswer(s"select id, name, price, ts from $tableName order by id")(
         Seq(1, "a1", 11.0, 1000),
         Seq(2, "a2", 10.0, 1000),
         Seq(3, "a3", 10.0, 1000)
       )
       assertResult(0)(spark.sql(s"call show_compaction(path => 
'${tmp.getCanonicalPath}')").collect().length)
-      // schedule compaction first
+
       spark.sql(s"update $tableName set price = 12 where id = 1")
-      spark.sql(s"call run_compaction(op=> 'schedule', path => 
'${tmp.getCanonicalPath}')")
 
-      // schedule compaction second
+      // Schedule the first compaction
+      val firstResult = spark.sql(s"call run_compaction(op=> 'schedule', path 
=> '${tmp.getCanonicalPath}')")
+        .collect()
+        .map(row => Seq(row.getString(0), row.getInt(1), row.getString(2)))
+
       spark.sql(s"update $tableName set price = 12 where id = 2")
-      spark.sql(s"call run_compaction(op => 'schedule', path => 
'${tmp.getCanonicalPath}')")
 
-      // show compaction
-      assertResult(2)(spark.sql(s"call show_compaction(path => 
'${tmp.getCanonicalPath}')").collect().length)
-      // run compaction for all the scheduled compaction
-      spark.sql(s"call run_compaction(op => 'run', path => 
'${tmp.getCanonicalPath}')")
+      // Schedule the second compaction
+      val secondResult = spark.sql(s"call run_compaction(op => 'schedule', 
path => '${tmp.getCanonicalPath}')")
+        .collect()
+        .map(row => Seq(row.getString(0), row.getInt(1), row.getString(2)))
+
+      assertResult(1)(firstResult.length)
+      assertResult(1)(secondResult.length)
+      checkAnswer(s"call show_compaction(path => '${tmp.getCanonicalPath}')")(
+        firstResult(0),
+        secondResult(0)
+      )
+
+      // Run compaction for all the scheduled compaction
+      spark.sql(s"call run_compaction(op => 'run', path => 
'${tmp.getCanonicalPath}')").show()

Review Comment:
   Ditto



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to