Dear Roman,

this is my pom.xml file, which is the file from the template project from the 
official flink website. Only the main class has been changed.

<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements.  See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.  The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License.  You may obtain a copy of the License at

  http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied.  See the License for the
specific language governing permissions and limitations
under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0";
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
   <modelVersion>4.0.0</modelVersion>

   <groupId>org.myorg.quickstart</groupId>
   <artifactId>quickstart</artifactId>
   <version>0.1</version>
   <packaging>jar</packaging>

   <name>Flink Quickstart Job</name>
   <url>http://www.myorganization.org</url>

   <repositories>
      <repository>
         <id>apache.snapshots</id>
         <name>Apache Development Snapshot Repository</name>
         
<url>https://repository.apache.org/content/repositories/snapshots/</url>
         <releases>
            <enabled>false</enabled>
         </releases>
         <snapshots>
            <enabled>true</enabled>
         </snapshots>
      </repository>
   </repositories>

   <properties>
      <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
      <flink.version>1.10.0</flink.version>
      <scala.binary.version>2.11</scala.binary.version>
      <scala.version>2.11.12</scala.version>
   </properties>

   <dependencies>
      <!-- Apache Flink dependencies -->
      <!-- These dependencies are provided, because they should not be packaged 
into the JAR file. -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-scala_${scala.binary.version}</artifactId>
         <version>${flink.version}</version>
         <scope>provided</scope>
      </dependency>
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-streaming-scala_${scala.binary.version}</artifactId>
         <version>${flink.version}</version>
         <scope>provided</scope>
      </dependency>

      <!-- Scala Library, provided by Flink as well. -->
      <dependency>
         <groupId>org.scala-lang</groupId>
         <artifactId>scala-library</artifactId>
         <version>${scala.version}</version>
         <scope>provided</scope>
      </dependency>

      <!-- Add connector dependencies here. They must be in the default scope 
(compile). -->

      <!-- Example:

      <dependency>
         <groupId>org.apache.flink</groupId>
         
<artifactId>flink-connector-kafka-0.10_${scala.binary.version}</artifactId>
         <version>${flink.version}</version>
      </dependency>
      -->

      <!-- Add logging framework, to produce console output when running in the 
IDE. -->
      <!-- These dependencies are excluded from the application JAR by default. 
-->
      <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-log4j12</artifactId>
         <version>1.7.7</version>
         <scope>runtime</scope>
      </dependency>
      <dependency>
         <groupId>log4j</groupId>
         <artifactId>log4j</artifactId>
         <version>1.2.17</version>
         <scope>runtime</scope>
      </dependency>
   </dependencies>

   <build>
      <plugins>
         <!-- We use the maven-shade plugin to create a fat jar that contains 
all necessary dependencies. -->
         <!-- Change the value of <mainClass>...</mainClass> if your program 
entry point changes. -->
         <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-shade-plugin</artifactId>
            <version>3.1.1</version>
            <executions>
               <!-- Run shade goal on package phase -->
               <execution>
                  <phase>package</phase>
                  <goals>
                     <goal>shade</goal>
                  </goals>
                  <configuration>
                     <artifactSet>
                        <excludes>
                           <exclude>org.apache.flink:force-shading</exclude>
                           <exclude>com.google.code.findbugs:jsr305</exclude>
                           <exclude>org.slf4j:*</exclude>
                           <exclude>log4j:*</exclude>
                        </excludes>
                     </artifactSet>
                     <filters>
                        <filter>
                           <!-- Do not copy the signatures in the META-INF 
folder.
                           Otherwise, this might cause SecurityExceptions when 
using the JAR. -->
                           <artifact>*:*</artifact>
                           <excludes>
                              <exclude>META-INF/*.SF</exclude>
                              <exclude>META-INF/*.DSA</exclude>
                              <exclude>META-INF/*.RSA</exclude>
                           </excludes>
                        </filter>
                     </filters>
                     <transformers>
                        <transformer 
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
                           <mainClass>org.myorg.quickstart.BatchJob</mainClass>
                        </transformer>
                     </transformers>
                  </configuration>
               </execution>
            </executions>
         </plugin>

         <!-- Java Compiler -->
         <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-compiler-plugin</artifactId>
            <version>3.1</version>
            <configuration>
               <source>1.8</source>
               <target>1.8</target>
            </configuration>
         </plugin>

         <!-- Scala Compiler -->
         <plugin>
            <groupId>net.alchim31.maven</groupId>
            <artifactId>scala-maven-plugin</artifactId>
            <version>3.2.2</version>
            <executions>
               <execution>
                  <goals>
                     <goal>compile</goal>
                     <goal>testCompile</goal>
                  </goals>
               </execution>
            </executions>
            <configuration>
               <args>
                  <arg>-nobootcp</arg>
               </args>
            </configuration>
         </plugin>

         <!-- Eclipse Scala Integration -->
         <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-eclipse-plugin</artifactId>
            <version>2.8</version>
            <configuration>
               <downloadSources>true</downloadSources>
               <projectnatures>
                  
<projectnature>org.scala-ide.sdt.core.scalanature</projectnature>
                  <projectnature>org.eclipse.jdt.core.javanature</projectnature>
               </projectnatures>
               <buildcommands>
                  
<buildcommand>org.scala-ide.sdt.core.scalabuilder</buildcommand>
               </buildcommands>
               <classpathContainers>
                  
<classpathContainer>org.scala-ide.sdt.launching.SCALA_CONTAINER</classpathContainer>
                  
<classpathContainer>org.eclipse.jdt.launching.JRE_CONTAINER</classpathContainer>
               </classpathContainers>
               <excludes>
                  <exclude>org.scala-lang:scala-library</exclude>
                  <exclude>org.scala-lang:scala-compiler</exclude>
               </excludes>
               <sourceIncludes>
                  <sourceInclude>**/*.scala</sourceInclude>
                  <sourceInclude>**/*.java</sourceInclude>
               </sourceIncludes>
            </configuration>
         </plugin>
         <plugin>
            <groupId>org.codehaus.mojo</groupId>
            <artifactId>build-helper-maven-plugin</artifactId>
            <version>1.7</version>
            <executions>
               <!-- Add src/main/scala to eclipse build path -->
               <execution>
                  <id>add-source</id>
                  <phase>generate-sources</phase>
                  <goals>
                     <goal>add-source</goal>
                  </goals>
                  <configuration>
                     <sources>
                        <source>src/main/scala</source>
                     </sources>
                  </configuration>
               </execution>
               <!-- Add src/test/scala to eclipse build path -->
               <execution>
                  <id>add-test-source</id>
                  <phase>generate-test-sources</phase>
                  <goals>
                     <goal>add-test-source</goal>
                  </goals>
                  <configuration>
                     <sources>
                        <source>src/test/scala</source>
                     </sources>
                  </configuration>
               </execution>
            </executions>
         </plugin>
      </plugins>
   </build>
</project>

> Am 02.06.2020 um 19:22 schrieb Khachatryan Roman 
> <khachatryan.ro...@gmail.com>:
> 
> Dear Tom,
> 
> This is likely a scala version issue.
> Can you post your pom.xml?
> 
> Regards,
> Roman
> 
> 
> On Tue, Jun 2, 2020 at 6:34 PM Tom Burgert <tom.burg...@gmx.de 
> <mailto:tom.burg...@gmx.de>> wrote:
> Dear all, 
> 
> I am trying to set up flink and after hours I still fail to make a simple 
> program run even though I follow every recommended step in the tutorials.
> 
> My operating system is OSX (therefore everything was installed via brew) and 
> I am using Maven as a build tool. I used the quick start script for scala to 
> set up a new project.
> Then, I only did two things:
> 1) paced the code from word count object to the BatchJob.scala file 
> (https://ci.apache.org/projects/flink/flink-docs-release-1.10/dev/batch/ 
> <https://ci.apache.org/projects/flink/flink-docs-release-1.10/dev/batch/>)
> 2) alternated the MainClass in the pom.xml file from StreamJob to BatchJob
> 
> Then, I build via "mvn clean package" and run the .jar. file.
> 
> When compiling, I receive the following warning: 
> 
> ————————————————————————————————————————————————————————————————————
> Expected all dependencies to require Scala version: 2.11.12
>  org.scala-lang:scala-reflect:2.11.12 requires scala version: 2.11.12
>  org.apache.flink:flink-scala_2.11:1.10.0 requires scala version: 2.11.12
>  org.apache.flink:flink-scala_2.11:1.10.0 requires scala version: 2.11.12
>  org.scala-lang:scala-compiler:2.11.12 requires scala version: 2.11.12
>  org.scala-lang.modules:scala-xml_2.11:1.0.5 requires scala version: 2.11.7
> Multiple versions of scala libraries detected!
> ————————————————————————————————————————————————————————————————————
> 
> Which is weird, since the pom.xml file should keep all versions the same (at 
> least the structures seems like it) and by playing around with the pom.xml 
> file I could not prevent this warning. Either way, I could not find any 
> information on the internet if the warning might be the reason for the error 
> I am getting when running the jar file.
> 
> The error I get when I run my .jar file is the following:
> 
> ————————————————————————————————————————————————————————————————————
>  The program finished with the following exception:
> 
> org.apache.flink.client.program.ProgramInvocationException: The main method 
> caused an error: org.apache.flink.client.program.ProgramInvocationException: 
> Job failed (JobID: 32d249e2b67ef0ecbc72bd164fe1dfb9)
>       at 
> org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:335)
>       at 
> org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:205)
>       at 
> org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:138)
>       at 
> org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:662)
>       at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:210)
>       at 
> org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:893)
>       at 
> org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:966)
>       at 
> org.apache.flink.runtime.security.NoOpSecurityContext.runSecured(NoOpSecurityContext.java:30)
>       at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:966)
> Caused by: java.util.concurrent.ExecutionException: 
> org.apache.flink.client.program.ProgramInvocationException: Job failed 
> (JobID: 32d249e2b67ef0ecbc72bd164fe1dfb9)
>       at 
> java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357)
>       at 
> java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1895)
>       at 
> org.apache.flink.client.program.ContextEnvironment.execute(ContextEnvironment.java:73)
>       at 
> org.apache.flink.api.java.ExecutionEnvironment.execute(ExecutionEnvironment.java:844)
>       at org.apache.flink.api.java.DataSet.collect(DataSet.java:413)
>       at org.apache.flink.api.java.DataSet.print(DataSet.java:1652)
>       at org.apache.flink.api.scala.DataSet.print(DataSet.scala:1864)
>       at org.myorg.quickstart.BatchJob$.main(BatchJob.scala:19)
>       at org.myorg.quickstart.BatchJob.main(BatchJob.scala)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at 
> org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:321)
>       ... 8 more
> Caused by: org.apache.flink.client.program.ProgramInvocationException: Job 
> failed (JobID: 32d249e2b67ef0ecbc72bd164fe1dfb9)
>       at 
> org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:112)
>       at 
> java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602)
>       at 
> java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:577)
>       at 
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
>       at 
> java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962)
>       at 
> org.apache.flink.client.program.rest.RestClusterClient.lambda$pollResourceAsync$21(RestClusterClient.java:565)
>       at 
> java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760)
>       at 
> java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736)
>       at 
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
>       at 
> java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962)
>       at 
> org.apache.flink.runtime.concurrent.FutureUtils.lambda$retryOperationWithDelay$8(FutureUtils.java:291)
>       at 
> java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760)
>       at 
> java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736)
>       at 
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
>       at 
> java.util.concurrent.CompletableFuture.postFire(CompletableFuture.java:561)
>       at 
> java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:929)
>       at 
> java.util.concurrent.CompletableFuture$Completion.run(CompletableFuture.java:442)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>       at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.flink.runtime.client.JobExecutionException: Job 
> execution failed.
>       at 
> org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:147)
>       at 
> org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:110)
>       ... 19 more
> Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by 
> NoRestartBackoffTimeStrategy
>       at 
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:110)
>       at 
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:76)
>       at 
> org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:192)
>       at 
> org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:186)
>       at 
> org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:180)
>       at 
> org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:496)
>       at 
> org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:380)
>       at sun.reflect.GeneratedMethodAccessor47.invoke(Unknown Source)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at 
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:284)
>       at 
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:199)
>       at 
> org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74)
>       at 
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:152)
>       at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26)
>       at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21)
>       at scala.PartialFunction.applyOrElse(PartialFunction.scala:123)
>       at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122)
>       at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21)
>       at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
>       at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
>       at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
>       at akka.actor.Actor.aroundReceive(Actor.scala:517)
>       at akka.actor.Actor.aroundReceive$(Actor.scala:515)
>       at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225)
>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:592)
>       at akka.actor.ActorCell.invoke(ActorCell.scala:561)
>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258)
>       at akka.dispatch.Mailbox.run(Mailbox.scala:225)
>       at akka.dispatch.Mailbox.exec(Mailbox.scala:235)
>       at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>       at 
> akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>       at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>       at 
> akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> Caused by: java.lang.NoSuchMethodError: 
> scala.Predef$.refArrayOps([Ljava/lang/Object;)Lscala/collection/mutable/ArrayOps;
>       at org.myorg.quickstart.BatchJob$$anonfun$1.apply(BatchJob.scala:14)
>       at org.myorg.quickstart.BatchJob$$anonfun$1.apply(BatchJob.scala:14)
>       at org.apache.flink.api.scala.DataSet$$anon$5.flatMap(DataSet.scala:609)
>       at 
> org.apache.flink.runtime.operators.chaining.ChainedFlatMapDriver.collect(ChainedFlatMapDriver.java:80)
>       at 
> org.apache.flink.runtime.operators.util.metrics.CountingCollector.collect(CountingCollector.java:35)
>       at 
> org.apache.flink.runtime.operators.DataSourceTask.invoke(DataSourceTask.java:196)
>       at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:708)
>       at org.apache.flink.runtime.taskmanager.Task.run(Task.java:533)
>       at java.lang.Thread.run(Thread.java:745)
> 
> ————————————————————————————————————————————————————————————————————
> 
> 
> I would be very glad, if some can help me out with this issue.
> 
> Best,
> Tom

Reply via email to