CYDL0414 opened a new issue, #6178:
URL: https://github.com/apache/hudi/issues/6178

   /* 1 */
   /* 2 */      public class StreamExecCalc$718 extends 
org.apache.flink.table.runtime.operators.TableStreamOperator
   /* 3 */          implements 
org.apache.flink.streaming.api.operators.OneInputStreamOperator {
   /* 4 */
   /* 5 */        private final Object[] references;
   /* 6 */        
   /* 7 */        private final 
org.apache.flink.table.data.binary.BinaryStringData str$0 = 
org.apache.flink.table.data.binary.BinaryStringData.fromString("_");
   /* 8 */                   
   /* 9 */        private transient 
org.apache.flink.table.runtime.typeutils.StringDataSerializer typeSerializer$2;
   /* 10 */        org.apache.flink.table.data.BoxedWrapperRowData out = new 
org.apache.flink.table.data.BoxedWrapperRowData(358);
   /* 11 */        private final 
org.apache.flink.streaming.runtime.streamrecord.StreamRecord outElement = new 
org.apache.flink.streaming.runtime.streamrecord.StreamRecord(null);
   /* 12 */
   /* 13 */        public StreamExecCalc$718(
   /* 14 */            Object[] references,
   /* 15 */            org.apache.flink.streaming.runtime.tasks.StreamTask task,
   /* 16 */            org.apache.flink.streaming.api.graph.StreamConfig config,
   /* 17 */            org.apache.flink.streaming.api.operators.Output output,
   /* 18 */            
org.apache.flink.streaming.runtime.tasks.ProcessingTimeService 
processingTimeService) throws Exception {
   /* 19 */          this.references = references;
   /* 20 */          typeSerializer$2 = 
(((org.apache.flink.table.runtime.typeutils.StringDataSerializer) 
references[0]));
   /* 21 */          this.setup(task, config, output);
   /* 22 */          if (this instanceof 
org.apache.flink.streaming.api.operators.AbstractStreamOperator) {
   /* 23 */            
((org.apache.flink.streaming.api.operators.AbstractStreamOperator) this)
   /* 24 */              .setProcessingTimeService(processingTimeService);
   /* 25 */          }
   /* 26 */        }
   /* 27 */
   /* 28 */        @Override
   /* 29 */        public void open() throws Exception {
   /* 30 */          super.open();
   /* 31 */          
   /* 32 */        }
   /* 33 */
   /* 34 */        @Override
   /* 35 */        public void 
processElement(org.apache.flink.streaming.runtime.streamrecord.StreamRecord 
element) throws Exception {
   /* 36 */          org.apache.flink.table.data.RowData in1 = 
(org.apache.flink.table.data.RowData) element.getValue();
   /* 37 */          
   /* 38 */          org.apache.flink.table.data.binary.BinaryStringData 
field$1;
   /* 39 */          boolean isNull$1;
   /* 40 */          org.apache.flink.table.data.binary.BinaryStringData 
field$3;
   /* 41 */          org.apache.flink.table.data.binary.BinaryStringData 
field$4;
   /* 42 */          boolean isNull$4;
   /* 43 */          org.apache.flink.table.data.binary.BinaryStringData 
field$5;
   /* 44 */          org.apache.flink.table.data.binary.BinaryStringData 
field$6;
   /* 45 */          boolean isNull$6;
   /* 46 */          org.apache.flink.table.data.binary.BinaryStringData 
field$7;
   /* 47 */          boolean isNull$8;
   /* 48 */          org.apache.flink.table.data.binary.BinaryStringData 
result$9;
   /* 49 */          org.apache.flink.table.data.binary.BinaryStringData 
field$10;
   /* 50 */          boolean isNull$10;
   
   Exception in thread "main" org.apache.flink.table.api.TableException: Failed 
to execute sql
        at 
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:777)
        at 
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:742)
        at 
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:856)
        at 
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeSql(TableEnvironmentImpl.java:730)
        at 
cn.cy.app.FlinkHudiWideTableDriver.main(FlinkHudiWideTableDriver.java:82)
   Caused by: org.apache.flink.util.FlinkRuntimeException: 
org.apache.flink.api.common.InvalidProgramException: Table program cannot be 
compiled. This is a bug. Please file an issue.
        at 
org.apache.flink.table.runtime.generated.CompileUtils.compile(CompileUtils.java:76)
        at 
org.apache.flink.table.runtime.generated.GeneratedClass.compile(GeneratedClass.java:77)
        at 
org.apache.flink.table.runtime.generated.GeneratedClass.getClass(GeneratedClass.java:95)
        at 
org.apache.flink.table.runtime.operators.CodeGenOperatorFactory.getStreamOperatorClass(CodeGenOperatorFactory.java:51)
        at 
org.apache.flink.streaming.api.graph.StreamingJobGraphGenerator.preValidate(StreamingJobGraphGenerator.java:249)
        at 
org.apache.flink.streaming.api.graph.StreamingJobGraphGenerator.createJobGraph(StreamingJobGraphGenerator.java:159)
        at 
org.apache.flink.streaming.api.graph.StreamingJobGraphGenerator.createJobGraph(StreamingJobGraphGenerator.java:114)
        at 
org.apache.flink.streaming.api.graph.StreamGraph.getJobGraph(StreamGraph.java:959)
        at 
org.apache.flink.client.StreamGraphTranslator.translateToJobGraph(StreamGraphTranslator.java:50)
        at 
org.apache.flink.client.FlinkPipelineTranslationUtil.getJobGraph(FlinkPipelineTranslationUtil.java:39)
        at 
org.apache.flink.client.deployment.executors.PipelineExecutorUtils.getJobGraph(PipelineExecutorUtils.java:56)
        at 
org.apache.flink.client.deployment.executors.LocalExecutor.getJobGraph(LocalExecutor.java:104)
        at 
org.apache.flink.client.deployment.executors.LocalExecutor.execute(LocalExecutor.java:82)
        at 
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1956)
        at 
org.apache.flink.table.planner.delegation.ExecutorBase.executeAsync(ExecutorBase.java:55)
        at 
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:759)
        ... 4 more
   Caused by: 
org.apache.flink.shaded.guava18.com.google.common.util.concurrent.UncheckedExecutionException:
 org.apache.flink.api.common.InvalidProgramException: Table program cannot be 
compiled. This is a bug. Please file an issue.
        at 
org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2203)
        at 
org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache.get(LocalCache.java:3937)
        at 
org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4739)
        at 
org.apache.flink.table.runtime.generated.CompileUtils.compile(CompileUtils.java:74)
        ... 19 more
   Caused by: org.apache.flink.api.common.InvalidProgramException: Table 
program cannot be compiled. This is a bug. Please file an issue.
        at 
org.apache.flink.table.runtime.generated.CompileUtils.doCompile(CompileUtils.java:89)
        at 
org.apache.flink.table.runtime.generated.CompileUtils.lambda$compile$1(CompileUtils.java:74)
        at 
org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4742)
        at 
org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3527)
        at 
org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2319)
        at 
org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2282)
        at 
org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2197)
        ... 22 more
   Caused by: java.lang.StackOverflowError
        at 
org.codehaus.janino.CodeContext.extract16BitValue(CodeContext.java:734)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:512)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
        at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:591)
   Disconnected from the target VM, address: '127.0.0.1:57748', transport: 
'socket'


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to