rdblue commented on a change in pull request #7: Allow custom hadoop properties to be loaded in the Spark data source URL: https://github.com/apache/incubator-iceberg/pull/7#discussion_r240717846
########## File path: spark/src/main/java/com/netflix/iceberg/spark/source/IcebergSource.java ########## @@ -89,30 +92,51 @@ public DataSourceReader createReader(DataSourceOptions options) { .toUpperCase(Locale.ENGLISH)); } - return Optional.of(new Writer(table, lazyConf(), format)); + return Optional.of(new Writer(table, conf, format)); } - protected Table findTable(DataSourceOptions options) { + protected Table findTable(DataSourceOptions options, Configuration conf) { Optional<String> location = options.get("path"); Preconditions.checkArgument(location.isPresent(), "Cannot open table without a location: path is not set"); - HadoopTables tables = new HadoopTables(lazyConf()); + HadoopTables tables = new HadoopTables(conf); return tables.load(location.get()); } - protected SparkSession lazySparkSession() { + private SparkSession lazySparkSession() { if (lazySpark == null) { this.lazySpark = SparkSession.builder().getOrCreate(); } return lazySpark; } - protected Configuration lazyConf() { + private Configuration lazyBaseConf() { if (lazyConf == null) { this.lazyConf = lazySparkSession().sparkContext().hadoopConfiguration(); } return lazyConf; } + + private Table getTableAndResolveHadoopConfiguration( + DataSourceOptions options, Configuration conf) { + // Overwrite configurations from the Spark Context with configurations from the options. + mergeIcebergHadoopConfs(conf, options.asMap(), true); + Table table = findTable(options, conf); + // Set confs from table properties, but do not overwrite options from the Spark Context with + // configurations from the table + mergeIcebergHadoopConfs(conf, table.properties(), false); + // Re-overwrite values set in options and table properties but were not in the environment. + mergeIcebergHadoopConfs(conf, options.asMap(), true); + return table; + } + + private static void mergeIcebergHadoopConfs( + Configuration baseConf, Map<String, String> options, boolean overwrite) { + options.keySet().stream() + .filter(key -> key.startsWith("iceberg.hadoop")) + .filter(key -> overwrite || baseConf.get(key.replaceFirst("iceberg.hadoop", "")) == null) Review comment: Doesn't overwrite discard all keys? I don't think it matters now because it isn't needed anymore. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services