This is an automated email from the ASF dual-hosted git repository.
bhavanisudha pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/asf-site by this push:
new 799f1f5fea [DOCS] Add externalized configs
799f1f5fea is described below
commit 799f1f5fea5e90fc8152f3f8bd2654cec59649dd
Author: Bhavani Sudha Saktheeswaran <[email protected]>
AuthorDate: Thu Jun 9 00:02:42 2022 -0700
[DOCS] Add externalized configs
---
.../org/apache/hudi/utils/HoodieConfigDocGenerator.java | 15 ++++++++++++++-
1 file changed, 14 insertions(+), 1 deletion(-)
diff --git
a/hudi-utils/src/main/java/org/apache/hudi/utils/HoodieConfigDocGenerator.java
b/hudi-utils/src/main/java/org/apache/hudi/utils/HoodieConfigDocGenerator.java
index 48ce43072c..1533599e93 100644
---
a/hudi-utils/src/main/java/org/apache/hudi/utils/HoodieConfigDocGenerator.java
+++
b/hudi-utils/src/main/java/org/apache/hudi/utils/HoodieConfigDocGenerator.java
@@ -71,6 +71,11 @@ public class HoodieConfigDocGenerator {
"At a high level, you can control behaviour at few levels.";
private static final String FLINK_CONFIG_CLASS_NAME =
"org.apache.hudi.configuration.FlinkOptions";
private static final String CONFIG_PATH = "/tmp/configurations.md";
+ private static final String EXTERNALIZED_CONFIGS = "## Externalized Config
File\n" +
+ "Instead of directly passing configuration settings to every Hudi
job, you can also centrally set them in a configuration\n" +
+ "file `hudi-default.conf`. By default, Hudi would load the
configuration file under `/etc/hudi/conf` directory. You can\n" +
+ "specify a different configuration directory location by setting the
`HUDI_CONF_DIR` environment variable. This can be\n" +
+ "useful for uniformly enforcing repeated configs (like Hive sync or
write/index tuning), across your entire data lake.";
public static void main(String[] args) {
Reflections reflections = new Reflections("org.apache.hudi");
@@ -127,6 +132,7 @@ public class HoodieConfigDocGenerator {
try {
LOG.info("Generating markdown file");
mainDocBuilder.append(contentTableBuilder.build()).append(DOUBLE_NEWLINE);
+ mainDocBuilder.append(generateExternalizedConfigs());
contentMap.forEach((k, v) -> mainDocBuilder.append(v));
Files.write(Paths.get(CONFIG_PATH),
mainDocBuilder.toString().getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
@@ -147,7 +153,7 @@ public class HoodieConfigDocGenerator {
*/
LocalDateTime now = LocalDateTime.now();
builder.append(new HorizontalRule()).append(NEWLINE)
- .append("title: ").append("Configurations").append(NEWLINE)
+ .append("title: ").append("All Configurations").append(NEWLINE)
.append("keywords: [ configurations, default, flink options, spark,
configs, parameters ] ").append(NEWLINE)
.append("permalink: /docs/configurations.html").append(NEWLINE)
.append("summary: " + SUMMARY).append(NEWLINE)
@@ -178,6 +184,13 @@ public class HoodieConfigDocGenerator {
return contentMap;
}
+ private static StringBuilder generateExternalizedConfigs() {
+ StringBuilder stringBuilder = new StringBuilder();
+ stringBuilder.append(EXTERNALIZED_CONFIGS);
+ stringBuilder.append(DOUBLE_NEWLINE);
+ return stringBuilder;
+ }
+
private static void populateSparkConfigs(Map<ConfigGroups.Names,
StringBuilder> contentMap) {
StringBuilder configParamsBuilder =
contentMap.get(ConfigGroups.Names.SPARK_DATASOURCE);