[ 
https://issues.apache.org/jira/browse/HUDI-4532?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

jimmyz closed HUDI-4532.
------------------------
    Resolution: Duplicate

> Got Exception when run 'call run_clean(table =>xx)'
> ---------------------------------------------------
>
>                 Key: HUDI-4532
>                 URL: https://issues.apache.org/jira/browse/HUDI-4532
>             Project: Apache Hudi
>          Issue Type: Bug
>          Components: spark-sql
>            Reporter: jimmyz
>            Assignee: jimmyz
>            Priority: Major
>              Labels: pull-request-available
>
> {code:java}
> Caused by: java.lang.ArrayIndexOutOfBoundsException: 0  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.expressions.GenericInternalRow.genericGet(rows.scala:201)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.expressions.BaseGenericInternalRow.getAs(rows.scala:35)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.expressions.BaseGenericInternalRow.isNullAt(rows.scala:36)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.expressions.BaseGenericInternalRow.isNullAt$(rows.scala:36)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.expressions.GenericInternalRow.isNullAt(rows.scala:195)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.InternalRow$.$anonfun$getAccessor$16(InternalRow.scala:152)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.InternalRow$.$anonfun$getAccessor$16$adapted(InternalRow.scala:151)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.expressions.BoundReference.eval(BoundAttribute.scala:41)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.expressions.UnaryExpression.eval(Expression.scala:472)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.expressions.Alias.eval(namedExpressions.scala:160)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.apply(InterpretedMutableProjection.scala:97)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$19.$anonfun$applyOrElse$71(Optimizer.scala:1591)
>   
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)  
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)  
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)  
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)  
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.TraversableLike.map(TraversableLike.scala:238)  
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.TraversableLike.map$(TraversableLike.scala:231)  
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.AbstractTraversable.map(Traversable.scala:108)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$19.applyOrElse(Optimizer.scala:1591)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$19.applyOrElse(Optimizer.scala:1586)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:318)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:74)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:318)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:323)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:408)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:244)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:406)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:359)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:323)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:323)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:408)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:244)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:406)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:359)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:323)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:307)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:1586)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:1585)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:216)
>   
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.IndexedSeqOptimized.foldLeft(IndexedSeqOptimized.scala:60)  
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.IndexedSeqOptimized.foldLeft$(IndexedSeqOptimized.scala:68)  
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.mutable.WrappedArray.foldLeft(WrappedArray.scala:38)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:213)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:205)
>   
> 2022-08-03 20:05:49 INFO      at 
> scala.collection.immutable.List.foreach(List.scala:392)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:205)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:183)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:183)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:87)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:143)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:798)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:143)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:84)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:84)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:95)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:113)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:110)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.$anonfun$simpleString$2(QueryExecution.scala:161)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.ExplainUtils$.processPlan(ExplainUtils.scala:115)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:161)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:206)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:175)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:172)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:92)
>   
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:798)  
> 2022-08-03 20:05:49 INFO      at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
>   
> {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to