YannByron commented on a change in pull request #4714: URL: https://github.com/apache/hudi/pull/4714#discussion_r798478214
########## File path: hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java ########## @@ -759,6 +780,11 @@ public PropertyBuilder fromMetaClient(HoodieTableMetaClient metaClient) { public PropertyBuilder fromProperties(Properties properties) { HoodieConfig hoodieConfig = new HoodieConfig(properties); + + for (String key : hoodieConfig.getProps().stringPropertyNames()) { Review comment: you're right. set key-value If the key configured in the `hoodieConfig ` is in `PERSISTED_CONFIG_LIST `. ########## File path: hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestMORDataSource.scala ########## @@ -770,4 +775,79 @@ class TestMORDataSource extends HoodieClientTestBase { .load(basePath + "/*/*/*/*") assertEquals(numRecords - numRecordsToDelete, snapshotDF2.count()) } + + /** + * This tests the case that query by with a specified partition condition on hudi table which is + * different between the value of the partition field and the actual partition path, + * like hudi table written by TimestampBasedKeyGenerator. + * + * For MOR table, test all the three query modes. + */ + @Test + def testPrunePartitionForTimestampBasedKeyGenerator(): Unit = { + val options = commonOpts ++ Map( + "hoodie.compact.inline" -> "false", + DataSourceWriteOptions.TABLE_TYPE.key -> DataSourceWriteOptions.MOR_TABLE_TYPE_OPT_VAL, + DataSourceWriteOptions.KEYGENERATOR_CLASS_NAME.key -> "org.apache.hudi.keygen.TimestampBasedKeyGenerator", + Config.TIMESTAMP_TYPE_FIELD_PROP -> "DATE_STRING", + Config.TIMESTAMP_OUTPUT_DATE_FORMAT_PROP -> "yyyy/MM/dd", + Config.TIMESTAMP_TIMEZONE_FORMAT_PROP -> "GMT+8:00", + Config.TIMESTAMP_INPUT_DATE_FORMAT_PROP -> "yyyy-MM-dd" + ) + + val dataGen1 = new HoodieTestDataGenerator(Array("2022-01-01")) + val records1 = recordsToStrings(dataGen1.generateInserts("001", 50)).toList + val inputDF1 = spark.read.json(spark.sparkContext.parallelize(records1, 2)) + inputDF1.write.format("org.apache.hudi") + .options(options) + .mode(SaveMode.Overwrite) + .save(basePath) + metaClient = HoodieTableMetaClient.builder() + .setBasePath(basePath) + .setConf(spark.sessionState.newHadoopConf) + .build() + val commit1Time = metaClient.getActiveTimeline.lastInstant().get().getTimestamp + + val dataGen2 = new HoodieTestDataGenerator(Array("2022-01-02")) + val records2 = recordsToStrings(dataGen2.generateInserts("002", 50)).toList Review comment: ok -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: commits-unsubscr...@hudi.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org