This is an automated email from the ASF dual-hosted git repository.

xushiyan pushed a commit to branch rc3-patched-for-test
in repository https://gitbox.apache.org/repos/asf/hudi.git

commit cbf1a1585ece155b3ba631dfca1eaea28910ad23
Author: Raymond Xu <2701446+xushi...@users.noreply.github.com>
AuthorDate: Thu Apr 21 20:45:22 2022 +0800

    use tail to get VectorizedParquetRecordReader ctor
---
 .../datasources/parquet/Spark32HoodieParquetFileFormat.scala        | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git 
a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala
 
b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala
index ccd93b6fd3..5d9d2a737d 100644
--- 
a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala
+++ 
b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala
@@ -412,7 +412,11 @@ object Spark32HoodieParquetFileFormat {
   }
 
   private def createVectorizedParquetRecordReader(args: Any*): 
VectorizedParquetRecordReader = {
-    val ctor = classOf[VectorizedParquetRecordReader].getConstructors.head
+    // NOTE: ParquetReadSupport ctor args contain Scala enum, therefore we 
can't look it
+    //       up by arg types, and have to instead rely on relative order of 
ctors
+    // NOTE: VectorizedParquetRecordReader has 2 ctors and the one we need is 
2nd on the array
+    //       This is a hacky workaround for the fixed version of Class.
+    val ctor = classOf[VectorizedParquetRecordReader].getConstructors.tail
     ctor.newInstance(args.map(_.asInstanceOf[AnyRef]): _*)
       .asInstanceOf[VectorizedParquetRecordReader]
   }

Reply via email to