[ 
https://issues.apache.org/jira/browse/FLINK-11484?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

pj updated FLINK-11484:
-----------------------
    Comment: was deleted

(was:  source code:
{code:java}
// code placeholder
package com.blinktest.main;

import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.sinks.TableSink;
import org.apache.flink.table.sinks.csv.CsvTableSink;
import org.apache.flink.types.Row;

import java.io.File;

/**
 * Created by dell on 2019/2/13.
 * ./flink run -m yarn-cluster -ynm BLINKTEST -ys 4 -yn 10 -ytm 5120 -p 40 -c 
com.blinktest.main.BlinkMain ~/blinktest-1.0-SNAPSHOT-jar-with-dependencies.jar
 */
public class BlinkMain {

   public void startApp(){
      try {
         StreamExecutionEnvironment env = 
StreamExecutionEnvironment.getExecutionEnvironment();
         DataStream<Tuple4<String, String, Integer, Integer>> ds1 = 
env.fromElements(
               new Tuple4<String, String, Integer, Integer>("1.1.1.1", 
"1.1.1.2", 13888, 80),
               new Tuple4<String, String, Integer, Integer>("1.1.1.3", 
"1.1.1.4", 13888, 80));
         StreamTableEnvironment tableEnvironment = 
TableEnvironment.getTableEnvironment(env);

         tableEnvironment.registerDataStream("tb_test", ds1, 
"sip,dip,sport,dport,proctime.proctime");

         //假设执行2个sql
         Table tb1 = tableEnvironment.sqlQuery("select sip,count(1) as num from 
tb_test group by sip,tumble(proctime,interval '10' second)");
         Table tb2 = tableEnvironment.sqlQuery("select dip,count(1) as num from 
tb_test group by dip,tumble(proctime,interval '10' second)");

         String path = System.getProperty("java.io.tmpdir");
         String path1 = path + "/1.csv";
         String path2 = path + "/2.csv";

         tb1.writeToSink(new CsvTableSink(
               path1,
               "|",
               1,
               FileSystem.WriteMode.OVERWRITE));
         tb2.writeToSink(new CsvTableSink(
               path2,
               "|",
               1,
               FileSystem.WriteMode.OVERWRITE));

         env.execute("blinkmain");
      } catch (Exception e) {
         e.printStackTrace();
      }
   }

   public static void main(String[] args){
      new BlinkMain().startApp();
   }
}

{code}
You could run this jar on blink with command "

./flink run -m yarn-cluster -ynm BLINKTEST -ys 4 -yn 10 -ytm 5120 -p 40 -c 
com.blinktest.main.BlinkMain ~/blinktest-1.0-SNAPSHOT-jar-with-dependencies.jar

"

and could reproduce the bug.)

> Blink java.util.concurrent.TimeoutException
> -------------------------------------------
>
>                 Key: FLINK-11484
>                 URL: https://issues.apache.org/jira/browse/FLINK-11484
>             Project: Flink
>          Issue Type: Bug
>          Components: Table API &amp; SQL
>    Affects Versions: 1.5.5
>         Environment: The link of blink source code: 
> [github.com/apache/flink/tree/blink|https://github.com/apache/flink/tree/blink]
>            Reporter: pj
>            Priority: Major
>              Labels: blink
>         Attachments: 1.png, code.png, dashboard.png, error.png, 
> image-2019-02-13-10-54-16-880.png
>
>
> *If I run blink application on yarn and the parallelism number larger than 1.*
> *Following is the command :*
> ./flink run -m yarn-cluster -ynm FLINK_NG_ENGINE_1 -ys 4 -yn 10 -ytm 5120 -p 
> 40 -c XXMain ~/xx.jar
> *Following is the code:*
> {{DataStream outputStream = tableEnv.toAppendStream(curTable, Row.class); 
> outputStream.print();}}
> *{{The whole subtask of application will hang a long time and finally the 
> }}{{toAppendStream()}}{{ function will throw an exception like below:}}*
> {{org.apache.flink.client.program.ProgramInvocationException: Job failed. 
> (JobID: f5e4f7243d06035202e8fa250c364304) at 
> org.apache.flink.client.program.rest.RestClusterClient.submitJob(RestClusterClient.java:276)
>  at org.apache.flink.client.program.ClusterClient.run(ClusterClient.java:482) 
> at 
> org.apache.flink.streaming.api.environment.StreamContextEnvironment.executeInternal(StreamContextEnvironment.java:85)
>  at 
> org.apache.flink.streaming.api.environment.StreamContextEnvironment.executeInternal(StreamContextEnvironment.java:37)
>  at 
> org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1893)
>  at com.ngengine.main.KafkaMergeMain.startApp(KafkaMergeMain.java:352) at 
> com.ngengine.main.KafkaMergeMain.main(KafkaMergeMain.java:94) at 
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:561)
>  at 
> org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:445)
>  at org.apache.flink.client.program.ClusterClient.run(ClusterClient.java:419) 
> at 
> org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:786) 
> at org.apache.flink.client.cli.CliFrontend.runProgram(CliFrontend.java:280) 
> at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:215) at 
> org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:1029)
>  at 
> org.apache.flink.client.cli.CliFrontend.lambda$main$9(CliFrontend.java:1105) 
> at java.security.AccessController.doPrivileged(Native Method) at 
> javax.security.auth.Subject.doAs(Subject.java:422) at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1692)
>  at 
> org.apache.flink.runtime.security.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
>  at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:1105) 
> Caused by: java.util.concurrent.TimeoutException at 
> org.apache.flink.runtime.concurrent.FutureUtils$Timeout.run(FutureUtils.java:834)
>  at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) 
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at 
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
>  at 
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
>  at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>  at java.lang.Thread.run(Thread.java:745)}}{{}}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to