This is an automated email from the ASF dual-hosted git repository.
danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new b77c7e5eb4d9 fix(common): Handle zero byte properties file and ensure
atomic writes during modification (#18058)
b77c7e5eb4d9 is described below
commit b77c7e5eb4d93220f7edd19f4207a0a35bda15ca
Author: Prashant Wason <[email protected]>
AuthorDate: Tue Mar 10 20:45:49 2026 -0700
fix(common): Handle zero byte properties file and ensure atomic writes
during modification (#18058)
* fix: Handle zero byte properties file and ensure atomic writes during
modification
- Use temp file + atomic rename pattern when updating hoodie.properties
- Add enhanced verification to detect empty properties and size mismatch
- Improve error message with property count comparison
This addresses reliability issues when HDFS cluster is overloaded and
properties file updates may fail silently.
* Simplify to enhanced verification only, remove atomic write pattern
Per reviewer feedback, removed the temp file + rename approach since
the existing checksum validation already ensures integrity. Kept the
enhanced verification (separate verifyProps, isEmpty check, size
comparison) and improved error message with property count info.
---
.../org/apache/hudi/common/table/HoodieTableConfig.java | 16 ++++++++++------
1 file changed, 10 insertions(+), 6 deletions(-)
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableConfig.java
b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableConfig.java
index 29d2ee9f0ca5..ca775136257c 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableConfig.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableConfig.java
@@ -556,20 +556,24 @@ public class HoodieTableConfig extends HoodieConfig {
propsToDelete.forEach(propToDelete -> props.remove(propToDelete));
checksum = storeProperties(props, out, cfgPath);
}
+ LOG.warn(String.format("%s modified to: %s (at %s)", cfgPath.getName(),
props, cfgPath.getParent()));
- // 4. verify and remove backup.
+ // 5. verify and remove backup.
try (InputStream in = storage.open(cfgPath)) {
- props.clear();
- props.load(in);
- if (!props.containsKey(TABLE_CHECKSUM.key()) ||
!props.getProperty(TABLE_CHECKSUM.key()).equals(checksum)) {
+ Properties verifyProps = new Properties();
+ verifyProps.load(in);
+ if (verifyProps.isEmpty() || verifyProps.size() != props.size()
+ || !verifyProps.containsKey(TABLE_CHECKSUM.key())
+ ||
!verifyProps.getProperty(TABLE_CHECKSUM.key()).equals(checksum)) {
// delete the properties file and throw exception indicating update
failure
// subsequent writes will recover and update, reads will go to the
backup until then
deleteFile(storage, cfgPath);
- throw new HoodieIOException("Checksum property missing or does not
match.");
+ throw new HoodieIOException(String.format("Checksum property missing
or properties do not match. %d vs %d",
+ props.size(), verifyProps.size()));
}
}
- // 5. delete the backup properties file
+ // 6. delete the backup properties file
deleteFile(storage, backupCfgPath);
} catch (IOException e) {
throw new HoodieIOException("Error updating table configs.", e);