[ https://issues.apache.org/jira/browse/HUDI-3879?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
ASF GitHub Bot updated HUDI-3879: --------------------------------- Labels: pull-request-available (was: ) > Suppress exceptions that are not fatal in HoodieMetadataTableValidator > ---------------------------------------------------------------------- > > Key: HUDI-3879 > URL: https://issues.apache.org/jira/browse/HUDI-3879 > Project: Apache Hudi > Issue Type: Improvement > Reporter: Ethan Guo > Assignee: Yue Zhang > Priority: Blocker > Labels: pull-request-available > Fix For: 0.12.0 > > > If there is no partition available yet, only print a warning message and > continue, without printing the exception. > {code:java} > org.apache.hudi.exception.HoodieException: Unable to do hoodie metadata table > validation in > file:/Users/ethan/Work/scripts/mt_rollout_testing/deploy_c_multi_writer/c5_mor_09mt_011mt/test_table > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.run(HoodieMetadataTableValidator.java:364) > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.main(HoodieMetadataTableValidator.java:345) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at > org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) > at > org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:955) > at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) > at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) > at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) > at > org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1043) > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1052) > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > Caused by: java.lang.IllegalArgumentException: Positive number of partitions > required > at > org.apache.spark.rdd.ParallelCollectionRDD$.slice(ParallelCollectionRDD.scala:118) > at > org.apache.spark.rdd.ParallelCollectionRDD.getPartitions(ParallelCollectionRDD.scala:96) > at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) > at scala.Option.getOrElse(Option.scala:189) > at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) > at > org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) > at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) > at scala.Option.getOrElse(Option.scala:189) > at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) > at org.apache.spark.SparkContext.runJob(SparkContext.scala:2279) > at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1030) > at > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) > at > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) > at org.apache.spark.rdd.RDD.withScope(RDD.scala:414) > at org.apache.spark.rdd.RDD.collect(RDD.scala:1029) > at org.apache.spark.api.java.JavaRDDLike.collect(JavaRDDLike.scala:362) > at org.apache.spark.api.java.JavaRDDLike.collect$(JavaRDDLike.scala:361) > at > org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45) > at > org.apache.hudi.data.HoodieJavaRDD.collectAsList(HoodieJavaRDD.java:157) > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.doMetadataTableValidation(HoodieMetadataTableValidator.java:451) > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.doHoodieMetadataTableValidationOnce(HoodieMetadataTableValidator.java:375) > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.run(HoodieMetadataTableValidator.java:361) > ... 13 more {code} > Suppress the TableNotFound exception if Metadata table is not available to > read for now: > {code:java} > 22/04/11 17:05:57 WARN HoodieMetadataTableValidator: Metadata table is not > available to ready for now, > org.apache.hudi.exception.TableNotFoundException: Hoodie table not found in > path > file:/Users/ethan/Work/scripts/mt_rollout_testing/deploy_c_multi_writer/c5_mor_09mt_011mt/test_table/.hoodie/metadata/.hoodie > at > org.apache.hudi.exception.TableNotFoundException.checkTableValidity(TableNotFoundException.java:57) > at > org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:125) > at > org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:79) > at > org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:670) > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.checkMetadataTableIsAvailable(HoodieMetadataTableValidator.java:473) > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.doMetadataTableValidation(HoodieMetadataTableValidator.java:402) > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.doHoodieMetadataTableValidationOnce(HoodieMetadataTableValidator.java:375) > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.run(HoodieMetadataTableValidator.java:361) > at > org.apache.hudi.utilities.HoodieMetadataTableValidator.main(HoodieMetadataTableValidator.java:345) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at > org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) > at > org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:955) > at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) > at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) > at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) > at > org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1043) > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1052) > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > Caused by: java.io.FileNotFoundException: File > file:/Users/ethan/Work/scripts/mt_rollout_testing/deploy_c_multi_writer/c5_mor_09mt_011mt/test_table/.hoodie/metadata/.hoodie > does not exist > at > org.apache.hadoop.fs.RawLocalFileSystem.deprecatedGetFileStatus(RawLocalFileSystem.java:779) > at > org.apache.hadoop.fs.RawLocalFileSystem.getFileLinkStatusInternal(RawLocalFileSystem.java:1100) > at > org.apache.hadoop.fs.RawLocalFileSystem.getFileStatus(RawLocalFileSystem.java:769) > at > org.apache.hadoop.fs.FilterFileSystem.getFileStatus(FilterFileSystem.java:462) > at > org.apache.hudi.common.fs.HoodieWrapperFileSystem.lambda$getFileStatus$17(HoodieWrapperFileSystem.java:394) > at > org.apache.hudi.common.fs.HoodieWrapperFileSystem.executeFuncWithTimeMetrics(HoodieWrapperFileSystem.java:101) > at > org.apache.hudi.common.fs.HoodieWrapperFileSystem.getFileStatus(HoodieWrapperFileSystem.java:388) > at > org.apache.hudi.exception.TableNotFoundException.checkTableValidity(TableNotFoundException.java:51) > ... 20 more {code} > > > > -- This message was sent by Atlassian Jira (v8.20.1#820001)