[ https://issues.apache.org/jira/browse/FLINK-29556?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Luochg updated FLINK-29556: --------------------------- Description: Zeppelin Flink Interprete Create some temporary views first: {code:java} %flink val table = btenv.sqlQuery(""" select a.agentcode,b.upagent from b_186_LAAGENT a,b_186_LATREE b where a.agentcode = b.agentcode and b.agentgrade = 'P2' """); btenv.dropTemporaryView("b_186_P2") btenv.createTemporaryView("b_186_P2", table) table = btenv.sqlQuery("""SELECT _t.* from b_186_P2 _t"""); btenv.dropTemporaryView("b_186_P2_1") btenv.createTemporaryView("b_186_P2_1", table) table = btenv.sqlQuery("""SELECT _t.*,(select sum(attrate) from b_186_LAATTENDANCE where agentcode = _t.agentcode and indexcalno = '202209') as ccccc from b_186_P2_1 _t """); btenv.dropTemporaryView("b_186_P2_2") btenv.createTemporaryView("b_186_P2_2", table) {code} error occurrs when query view b_186_P2_2 {code:java} %flink.bsql SELECT * from b_186_P2_2 Fail to run sql command: SELECT * from b_186_P2_2 org.apache.flink.table.api.TableException: implicit type conversion between BIGINT and VARCHAR(2147483647) is not supported on join's condition now at org.apache.flink.table.planner.plan.rules.logical.JoinConditionTypeCoerceRule$$anonfun$onMatch$1.apply(JoinConditionTypeCoerceRule.scala:76) at org.apache.flink.table.planner.plan.rules.logical.JoinConditionTypeCoerceRule$$anonfun$onMatch$1.apply(JoinConditionTypeCoerceRule.scala:65) at scala.collection.Iterator$class.foreach(Iterator.scala:891) at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at scala.collection.AbstractIterable.foreach(Iterable.scala:54) at org.apache.flink.table.planner.plan.rules.logical.JoinConditionTypeCoerceRule.onMatch(JoinConditionTypeCoerceRule.scala:65) at org.apache.calcite.plan.AbstractRelOptPlanner.fireRule(AbstractRelOptPlanner.java:333) at org.apache.calcite.plan.hep.HepPlanner.applyRule(HepPlanner.java:542) at org.apache.calcite.plan.hep.HepPlanner.applyRules(HepPlanner.java:407) at org.apache.calcite.plan.hep.HepPlanner.executeInstruction(HepPlanner.java:243) at org.apache.calcite.plan.hep.HepInstruction$RuleInstance.execute(HepInstruction.java:127) at org.apache.calcite.plan.hep.HepPlanner.executeProgram(HepPlanner.java:202) at org.apache.calcite.plan.hep.HepPlanner.findBestExp(HepPlanner.java:189) at org.apache.flink.table.planner.plan.optimize.program.FlinkHepProgram.optimize(FlinkHepProgram.scala:69) at org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgram.optimize(FlinkHepRuleSetProgram.scala:87) at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:62) at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:58) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.Iterator$class.foreach(Iterator.scala:891) at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at scala.collection.AbstractIterable.foreach(Iterable.scala:54) at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104) at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram.optimize(FlinkChainedProgram.scala:57) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.optimizeTree(BatchCommonSubGraphBasedOptimizer.scala:87) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.org$apache$flink$table$planner$plan$optimize$BatchCommonSubGraphBasedOptimizer$$optimizeBlock(BatchCommonSubGraphBasedOptimizer.scala:58) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.doOptimize(BatchCommonSubGraphBasedOptimizer.scala:46) at org.apache.flink.table.planner.plan.optimize.CommonSubGraphBasedOptimizer.optimize(CommonSubGraphBasedOptimizer.scala:77) at org.apache.flink.table.planner.delegation.PlannerBase.optimize(PlannerBase.scala:279) at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:163) at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1518) at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:791) at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1225) at org.apache.flink.table.api.internal.TableImpl.execute(TableImpl.java:577) at org.apache.zeppelin.flink.Flink113Shims.collectToList(Flink113Shims.java:227) at org.apache.zeppelin.flink.FlinkZeppelinContext.showData(FlinkZeppelinContext.scala:110) at org.apache.zeppelin.interpreter.ZeppelinContext.showData(ZeppelinContext.java:67) at org.apache.zeppelin.flink.FlinkBatchSqlInterpreter.callInnerSelect(FlinkBatchSqlInterpreter.java:60) at org.apache.zeppelin.flink.FlinkSqlInterpreter.callSelect(FlinkSqlInterpreter.java:494) at org.apache.zeppelin.flink.FlinkSqlInterpreter.callCommand(FlinkSqlInterpreter.java:257) at org.apache.zeppelin.flink.FlinkSqlInterpreter.runSqlList(FlinkSqlInterpreter.java:151) at org.apache.zeppelin.flink.FlinkSqlInterpreter.internalInterpret(FlinkSqlInterpreter.java:109) at org.apache.zeppelin.interpreter.AbstractInterpreter.interpret(AbstractInterpreter.java:55) at org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:110) at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:860) at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:752) at org.apache.zeppelin.scheduler.Job.run(Job.java:172) at org.apache.zeppelin.scheduler.AbstractScheduler.runJob(AbstractScheduler.java:132) at org.apache.zeppelin.scheduler.ParallelScheduler.lambda$runJobInScheduler$0(ParallelScheduler.java:46) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:750) {code} ======================================================================== *table b_186_P2:* !image-2022-10-10-00-47-38-627.png! *table b_186_P2_1:* !image-2022-10-10-00-47-58-672.png! *Error Info:* *!image-2022-10-10-00-40-03-422.png!* *Zeppelin Interpreter configuration:* !screenshot-1.png! was: Zeppelin Flink Interprete Create some temporary views first: {code:java} %flink val table = btenv.sqlQuery(""" select a.agentcode,b.upagent from b_186_LAAGENT a,b_186_LATREE b where a.agentcode = b.agentcode and b.agentgrade = 'P2' """); btenv.dropTemporaryView("b_186_P2") btenv.createTemporaryView("b_186_P2", table) table = btenv.sqlQuery("""SELECT _t.* from b_186_P2 _t"""); btenv.dropTemporaryView("b_186_P2_1") btenv.createTemporaryView("b_186_P2_1", table) table = btenv.sqlQuery("""SELECT _t.*,(select sum(attrate) from b_186_LAATTENDANCE where agentcode = _t.agentcode and indexcalno = '202209') as ccccc from b_186_P2_1 _t """); btenv.dropTemporaryView("b_186_P2_2") btenv.createTemporaryView("b_186_P2_2", table) {code} error occurrs when query view b_186_P2_2 {code:java} %flink.bsql SELECT * from b_186_P2_2 Fail to run sql command: SELECT * from b_186_P2_2 org.apache.flink.table.api.TableException: implicit type conversion between BIGINT and VARCHAR(2147483647) is not supported on join's condition now at org.apache.flink.table.planner.plan.rules.logical.JoinConditionTypeCoerceRule$$anonfun$onMatch$1.apply(JoinConditionTypeCoerceRule.scala:76) at org.apache.flink.table.planner.plan.rules.logical.JoinConditionTypeCoerceRule$$anonfun$onMatch$1.apply(JoinConditionTypeCoerceRule.scala:65) at scala.collection.Iterator$class.foreach(Iterator.scala:891) at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at scala.collection.AbstractIterable.foreach(Iterable.scala:54) at org.apache.flink.table.planner.plan.rules.logical.JoinConditionTypeCoerceRule.onMatch(JoinConditionTypeCoerceRule.scala:65) at org.apache.calcite.plan.AbstractRelOptPlanner.fireRule(AbstractRelOptPlanner.java:333) at org.apache.calcite.plan.hep.HepPlanner.applyRule(HepPlanner.java:542) at org.apache.calcite.plan.hep.HepPlanner.applyRules(HepPlanner.java:407) at org.apache.calcite.plan.hep.HepPlanner.executeInstruction(HepPlanner.java:243) at org.apache.calcite.plan.hep.HepInstruction$RuleInstance.execute(HepInstruction.java:127) at org.apache.calcite.plan.hep.HepPlanner.executeProgram(HepPlanner.java:202) at org.apache.calcite.plan.hep.HepPlanner.findBestExp(HepPlanner.java:189) at org.apache.flink.table.planner.plan.optimize.program.FlinkHepProgram.optimize(FlinkHepProgram.scala:69) at org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgram.optimize(FlinkHepRuleSetProgram.scala:87) at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:62) at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:58) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.Iterator$class.foreach(Iterator.scala:891) at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at scala.collection.AbstractIterable.foreach(Iterable.scala:54) at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104) at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram.optimize(FlinkChainedProgram.scala:57) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.optimizeTree(BatchCommonSubGraphBasedOptimizer.scala:87) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.org$apache$flink$table$planner$plan$optimize$BatchCommonSubGraphBasedOptimizer$$optimizeBlock(BatchCommonSubGraphBasedOptimizer.scala:58) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.doOptimize(BatchCommonSubGraphBasedOptimizer.scala:46) at org.apache.flink.table.planner.plan.optimize.CommonSubGraphBasedOptimizer.optimize(CommonSubGraphBasedOptimizer.scala:77) at org.apache.flink.table.planner.delegation.PlannerBase.optimize(PlannerBase.scala:279) at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:163) at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1518) at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:791) at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1225) at org.apache.flink.table.api.internal.TableImpl.execute(TableImpl.java:577) at org.apache.zeppelin.flink.Flink113Shims.collectToList(Flink113Shims.java:227) at org.apache.zeppelin.flink.FlinkZeppelinContext.showData(FlinkZeppelinContext.scala:110) at org.apache.zeppelin.interpreter.ZeppelinContext.showData(ZeppelinContext.java:67) at org.apache.zeppelin.flink.FlinkBatchSqlInterpreter.callInnerSelect(FlinkBatchSqlInterpreter.java:60) at org.apache.zeppelin.flink.FlinkSqlInterpreter.callSelect(FlinkSqlInterpreter.java:494) at org.apache.zeppelin.flink.FlinkSqlInterpreter.callCommand(FlinkSqlInterpreter.java:257) at org.apache.zeppelin.flink.FlinkSqlInterpreter.runSqlList(FlinkSqlInterpreter.java:151) at org.apache.zeppelin.flink.FlinkSqlInterpreter.internalInterpret(FlinkSqlInterpreter.java:109) at org.apache.zeppelin.interpreter.AbstractInterpreter.interpret(AbstractInterpreter.java:55) at org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:110) at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:860) at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:752) at org.apache.zeppelin.scheduler.Job.run(Job.java:172) at org.apache.zeppelin.scheduler.AbstractScheduler.runJob(AbstractScheduler.java:132) at org.apache.zeppelin.scheduler.ParallelScheduler.lambda$runJobInScheduler$0(ParallelScheduler.java:46) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:750) {code} ======================================================================== *table b_186_P2:* !image-2022-10-10-00-47-38-627.png! *table b_186_P2_1:* !image-2022-10-10-00-47-58-672.png! *Error Info:* *!image-2022-10-10-00-40-03-422.png!* *Zeppelin Interpreter configuration* !screenshot-1.png! > Flink Sql: implicit type conversion between BIGINT and VARCHAR(2147483647) is > not supported on join's condition now > ------------------------------------------------------------------------------------------------------------------- > > Key: FLINK-29556 > URL: https://issues.apache.org/jira/browse/FLINK-29556 > Project: Flink > Issue Type: Bug > Components: Table SQL / Planner > Affects Versions: 1.13.6 > Environment: Flink 1.13.6, Apache Zeppelin, Mysql > Reporter: Luochg > Priority: Blocker > Attachments: image-2022-10-10-00-40-03-422.png, > image-2022-10-10-00-41-18-313.png, image-2022-10-10-00-42-29-986.png, > image-2022-10-10-00-47-38-627.png, image-2022-10-10-00-47-58-672.png, > screenshot-1.png > > > Zeppelin Flink Interprete > > Create some temporary views first: > {code:java} > %flink > val table = btenv.sqlQuery(""" > select a.agentcode,b.upagent > from b_186_LAAGENT a,b_186_LATREE b > where a.agentcode = b.agentcode and b.agentgrade = 'P2' > """); > btenv.dropTemporaryView("b_186_P2") > btenv.createTemporaryView("b_186_P2", table) > > table = btenv.sqlQuery("""SELECT _t.* from b_186_P2 _t"""); > btenv.dropTemporaryView("b_186_P2_1") > btenv.createTemporaryView("b_186_P2_1", table) > > table = btenv.sqlQuery("""SELECT _t.*,(select sum(attrate) from > b_186_LAATTENDANCE where agentcode = _t.agentcode and indexcalno = '202209') > as ccccc from b_186_P2_1 _t """); > btenv.dropTemporaryView("b_186_P2_2") > btenv.createTemporaryView("b_186_P2_2", table) > {code} > error occurrs when query view b_186_P2_2 > {code:java} > %flink.bsql > SELECT * from b_186_P2_2 > > Fail to run sql command: SELECT * from b_186_P2_2 > org.apache.flink.table.api.TableException: implicit type conversion between > BIGINT and VARCHAR(2147483647) is not supported on join's condition now at > org.apache.flink.table.planner.plan.rules.logical.JoinConditionTypeCoerceRule$$anonfun$onMatch$1.apply(JoinConditionTypeCoerceRule.scala:76) > at > org.apache.flink.table.planner.plan.rules.logical.JoinConditionTypeCoerceRule$$anonfun$onMatch$1.apply(JoinConditionTypeCoerceRule.scala:65) > at scala.collection.Iterator$class.foreach(Iterator.scala:891) at > scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at > scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at > scala.collection.AbstractIterable.foreach(Iterable.scala:54) at > org.apache.flink.table.planner.plan.rules.logical.JoinConditionTypeCoerceRule.onMatch(JoinConditionTypeCoerceRule.scala:65) > at > org.apache.calcite.plan.AbstractRelOptPlanner.fireRule(AbstractRelOptPlanner.java:333) > at org.apache.calcite.plan.hep.HepPlanner.applyRule(HepPlanner.java:542) at > org.apache.calcite.plan.hep.HepPlanner.applyRules(HepPlanner.java:407) at > org.apache.calcite.plan.hep.HepPlanner.executeInstruction(HepPlanner.java:243) > at > org.apache.calcite.plan.hep.HepInstruction$RuleInstance.execute(HepInstruction.java:127) > at > org.apache.calcite.plan.hep.HepPlanner.executeProgram(HepPlanner.java:202) at > org.apache.calcite.plan.hep.HepPlanner.findBestExp(HepPlanner.java:189) at > org.apache.flink.table.planner.plan.optimize.program.FlinkHepProgram.optimize(FlinkHepProgram.scala:69) > at > org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgram.optimize(FlinkHepRuleSetProgram.scala:87) > at > org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:62) > at > org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:58) > at > scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) > at > scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) > at scala.collection.Iterator$class.foreach(Iterator.scala:891) at > scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at > scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at > scala.collection.AbstractIterable.foreach(Iterable.scala:54) at > scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) at > scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104) at > org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram.optimize(FlinkChainedProgram.scala:57) > at > org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.optimizeTree(BatchCommonSubGraphBasedOptimizer.scala:87) > at > org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.org$apache$flink$table$planner$plan$optimize$BatchCommonSubGraphBasedOptimizer$$optimizeBlock(BatchCommonSubGraphBasedOptimizer.scala:58) > at > org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46) > at > org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46) > at scala.collection.immutable.List.foreach(List.scala:392) at > org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.doOptimize(BatchCommonSubGraphBasedOptimizer.scala:46) > at > org.apache.flink.table.planner.plan.optimize.CommonSubGraphBasedOptimizer.optimize(CommonSubGraphBasedOptimizer.scala:77) > at > org.apache.flink.table.planner.delegation.PlannerBase.optimize(PlannerBase.scala:279) > at > org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:163) > at > org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1518) > at > org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:791) > at > org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1225) > at org.apache.flink.table.api.internal.TableImpl.execute(TableImpl.java:577) > at > org.apache.zeppelin.flink.Flink113Shims.collectToList(Flink113Shims.java:227) > at > org.apache.zeppelin.flink.FlinkZeppelinContext.showData(FlinkZeppelinContext.scala:110) > at > org.apache.zeppelin.interpreter.ZeppelinContext.showData(ZeppelinContext.java:67) > at > org.apache.zeppelin.flink.FlinkBatchSqlInterpreter.callInnerSelect(FlinkBatchSqlInterpreter.java:60) > at > org.apache.zeppelin.flink.FlinkSqlInterpreter.callSelect(FlinkSqlInterpreter.java:494) > at > org.apache.zeppelin.flink.FlinkSqlInterpreter.callCommand(FlinkSqlInterpreter.java:257) > at > org.apache.zeppelin.flink.FlinkSqlInterpreter.runSqlList(FlinkSqlInterpreter.java:151) > at > org.apache.zeppelin.flink.FlinkSqlInterpreter.internalInterpret(FlinkSqlInterpreter.java:109) > at > org.apache.zeppelin.interpreter.AbstractInterpreter.interpret(AbstractInterpreter.java:55) > at > org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:110) > at > org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:860) > at > org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:752) > at org.apache.zeppelin.scheduler.Job.run(Job.java:172) at > org.apache.zeppelin.scheduler.AbstractScheduler.runJob(AbstractScheduler.java:132) > at > org.apache.zeppelin.scheduler.ParallelScheduler.lambda$runJobInScheduler$0(ParallelScheduler.java:46) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:750) > > {code} > ======================================================================== > *table b_186_P2:* > !image-2022-10-10-00-47-38-627.png! > *table b_186_P2_1:* > !image-2022-10-10-00-47-58-672.png! > > *Error Info:* > *!image-2022-10-10-00-40-03-422.png!* > *Zeppelin Interpreter configuration:* > !screenshot-1.png! > -- This message was sent by Atlassian Jira (v8.20.10#820010)