yihua commented on a change in pull request #4078:
URL: https://github.com/apache/hudi/pull/4078#discussion_r780750369



##########
File path: 
hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/io/TestHoodieTimelineArchiveLog.java
##########
@@ -174,6 +205,82 @@ public void testArchiveTableWithArchival(boolean 
enableMetadata) throws Exceptio
     }
   }
 
+  @Test
+  public void 
testArchiveTableWithArchivalSmallFileMergeEnableRecoverFromDeleteFailed() 
throws Exception {
+    HoodieWriteConfig writeConfig = initTestTableAndGetWriteConfig(false, 2, 
3, 2, true, 3, 209715200);
+    for (int i = 1; i < 8; i++) {
+      testTable.doWriteOperation("0000000" + i, WriteOperationType.UPSERT, i 
== 1 ? Arrays.asList("p1", "p2") : Collections.emptyList(), Arrays.asList("p1", 
"p2"), 2);
+      archiveAndGetCommitsList(writeConfig);
+    }
+
+    HoodieTable table = HoodieSparkTable.create(writeConfig, context, 
metaClient);
+    HoodieTimelineArchiveLog archiveLog = new 
HoodieTimelineArchiveLog(writeConfig, table);
+    FileStatus[] fsStatuses = metaClient.getFs().globStatus(
+        new Path(metaClient.getArchivePath() + "/.commits_.archive*"));
+    List<String> candidateFiles = Arrays.stream(fsStatuses).map(fs -> 
fs.getPath().toString()).collect(Collectors.toList());
+
+    archiveLog.reOpenWriter();
+
+    archiveLog.buildArchiveMergePlan(candidateFiles, new 
Path(metaClient.getArchivePath(), archiveLog.getMergeArchivePlanName()), 
".commits_.archive.3_1-0-1");
+    
archiveLog.mergeArchiveFiles(Arrays.stream(fsStatuses).collect(Collectors.toList()));
+    archiveLog.reOpenWriter();
+
+    metaClient.getFs().delete(fsStatuses[0].getPath());
+
+
+    HoodieActiveTimeline rawActiveTimeline = new 
HoodieActiveTimeline(metaClient, false);
+    HoodieArchivedTimeline archivedTimeLine = 
metaClient.getArchivedTimeline().reload();
+    assertEquals(7 * 3, rawActiveTimeline.countInstants() + 
archivedTimeLine.countInstants());
+
+
+    for (int i = 1; i < 10; i++) {
+      testTable.doWriteOperation("1000000" + i, WriteOperationType.UPSERT, i 
== 1 ? Arrays.asList("p1", "p2") : Collections.emptyList(), Arrays.asList("p1", 
"p2"), 2);
+      archiveAndGetCommitsList(writeConfig);
+    }
+
+    HoodieActiveTimeline rawActiveTimeline1 = new 
HoodieActiveTimeline(metaClient, false);
+    HoodieArchivedTimeline archivedTimeLine1 = 
metaClient.getArchivedTimeline().reload();
+
+    assertEquals(16 * 3, archivedTimeLine1.countInstants() + 
rawActiveTimeline1.countInstants());
+  }
+
+  @Test
+  public void 
testArchiveTableWithArchivalSmallFileMergeEnableRecoverFromMergeFailed() throws 
Exception {
+    HoodieWriteConfig writeConfig = initTestTableAndGetWriteConfig(false, 2, 
3, 2, true, 3, 209715200);
+    for (int i = 1; i < 8; i++) {
+      testTable.doWriteOperation("0000000" + i, WriteOperationType.UPSERT, i 
== 1 ? Arrays.asList("p1", "p2") : Collections.emptyList(), Arrays.asList("p1", 
"p2"), 2);
+      archiveAndGetCommitsList(writeConfig);
+    }
+
+    HoodieTable table = HoodieSparkTable.create(writeConfig, context, 
metaClient);
+    HoodieTimelineArchiveLog archiveLog = new 
HoodieTimelineArchiveLog(writeConfig, table);
+    FileStatus[] fsStatuses = metaClient.getFs().globStatus(
+        new Path(metaClient.getArchivePath() + "/.commits_.archive*"));
+    List<String> candidateFiles = Arrays.stream(fsStatuses).map(fs -> 
fs.getPath().toString()).collect(Collectors.toList());
+    archiveLog.reOpenWriter();
+
+

Review comment:
       nit: extra empty line

##########
File path: 
hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/io/TestHoodieTimelineArchiveLog.java
##########
@@ -174,6 +205,82 @@ public void testArchiveTableWithArchival(boolean 
enableMetadata) throws Exceptio
     }
   }
 
+  @Test
+  public void 
testArchiveTableWithArchivalSmallFileMergeEnableRecoverFromDeleteFailed() 
throws Exception {
+    HoodieWriteConfig writeConfig = initTestTableAndGetWriteConfig(false, 2, 
3, 2, true, 3, 209715200);
+    for (int i = 1; i < 8; i++) {
+      testTable.doWriteOperation("0000000" + i, WriteOperationType.UPSERT, i 
== 1 ? Arrays.asList("p1", "p2") : Collections.emptyList(), Arrays.asList("p1", 
"p2"), 2);
+      archiveAndGetCommitsList(writeConfig);
+    }
+
+    HoodieTable table = HoodieSparkTable.create(writeConfig, context, 
metaClient);
+    HoodieTimelineArchiveLog archiveLog = new 
HoodieTimelineArchiveLog(writeConfig, table);
+    FileStatus[] fsStatuses = metaClient.getFs().globStatus(
+        new Path(metaClient.getArchivePath() + "/.commits_.archive*"));
+    List<String> candidateFiles = Arrays.stream(fsStatuses).map(fs -> 
fs.getPath().toString()).collect(Collectors.toList());
+
+    archiveLog.reOpenWriter();
+
+    archiveLog.buildArchiveMergePlan(candidateFiles, new 
Path(metaClient.getArchivePath(), archiveLog.getMergeArchivePlanName()), 
".commits_.archive.3_1-0-1");
+    
archiveLog.mergeArchiveFiles(Arrays.stream(fsStatuses).collect(Collectors.toList()));
+    archiveLog.reOpenWriter();
+
+    metaClient.getFs().delete(fsStatuses[0].getPath());
+
+

Review comment:
       nit: extra empty line




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@hudi.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to