This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 31f9c153fa8 [MINOR] Improve config docs to avoid build errors (#12444)
31f9c153fa8 is described below
commit 31f9c153fa893f688bc2d5bca534195a69988035
Author: Y Ethan Guo <[email protected]>
AuthorDate: Sat Dec 7 10:26:09 2024 -0800
[MINOR] Improve config docs to avoid build errors (#12444)
---
.../org/apache/hudi/utilities/config/CloudSourceConfig.java | 10 ++++++----
.../hudi/utilities/config/S3EventsHoodieIncrSourceConfig.java | 4 ++--
2 files changed, 8 insertions(+), 6 deletions(-)
diff --git
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/config/CloudSourceConfig.java
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/config/CloudSourceConfig.java
index 396309cebfb..70be2e8acce 100644
---
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/config/CloudSourceConfig.java
+++
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/config/CloudSourceConfig.java
@@ -117,7 +117,7 @@ public class CloudSourceConfig extends HoodieConfig {
.withAlternatives(DELTA_STREAMER_CONFIG_PREFIX +
"source.cloud.data.datasource.options")
.markAdvanced()
.withDocumentation("A JSON string passed to the Spark DataFrameReader
while loading the dataset. "
- + "Example:
hoodie.streamer.gcp.spark.datasource.options={\"header\":\"true\",\"encoding\":\"UTF-8\"}\n");
+ + "Example:
`hoodie.streamer.gcp.spark.datasource.options={\"header\":\"true\",\"encoding\":\"UTF-8\"}`\n");
public static final ConfigProperty<String> CLOUD_DATAFILE_EXTENSION =
ConfigProperty
.key(STREAMER_CONFIG_PREFIX + "source.cloud.data.select.file.extension")
@@ -146,9 +146,11 @@ public class CloudSourceConfig extends HoodieConfig {
.defaultValue(false)
.markAdvanced()
.sinceVersion("0.14.1")
- .withDocumentation("Boolean value for specifying path format in load
args of spark.read.format(\"..\").load(\"a.xml,b.xml,c.xml\"),\n"
- + " * set true if path format needs to be comma separated string
value, if false it's passed as array of strings like\n"
- + " * spark.read.format(\"..\").load(new
String[]{a.xml,b.xml,c.xml})");
+ .withDocumentation("Boolean value for specifying path format in load
args of "
+ + "`spark.read.format(\"..\").load(\"a.xml,b.xml,c.xml\")`. "
+ + "Set true if path format needs to be comma separated string value;
"
+ + "false if it's passed as array of strings like"
+ + "`spark.read.format(\"..\").load(new
String[]{a.xml,b.xml,c.xml})`");
public static final ConfigProperty<String> SOURCE_MAX_BYTES_PER_PARTITION =
ConfigProperty
.key(STREAMER_CONFIG_PREFIX + "source.cloud.data.partition.max.size")
diff --git
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/config/S3EventsHoodieIncrSourceConfig.java
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/config/S3EventsHoodieIncrSourceConfig.java
index 58a7bc957d3..031d2cb41fc 100644
---
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/config/S3EventsHoodieIncrSourceConfig.java
+++
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/config/S3EventsHoodieIncrSourceConfig.java
@@ -89,6 +89,6 @@ public class S3EventsHoodieIncrSourceConfig extends
HoodieConfig {
.noDefaultValue()
.withAlternatives(DELTA_STREAMER_CONFIG_PREFIX +
"source.s3incr.spark.datasource.options")
.markAdvanced()
- .withDocumentation("Json string, passed to the reader while loading
dataset. Example Hudi Streamer conf \n"
- + " --hoodie-conf
hoodie.streamer.source.s3incr.spark.datasource.options={\"header\":\"true\",\"encoding\":\"UTF-8\"}");
+ .withDocumentation("Json string, passed to the reader while loading
dataset. Example Hudi Streamer conf "
+ + "`--hoodie-conf
hoodie.streamer.source.s3incr.spark.datasource.options={\"header\":\"true\",\"encoding\":\"UTF-8\"}`");
}