[ https://issues.apache.org/jira/browse/FLINK-12493?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
ASF GitHub Bot updated FLINK-12493: ----------------------------------- Labels: pull-request-available (was: ) > AWS EMR instructions lead to ClassNotFoundException > --------------------------------------------------- > > Key: FLINK-12493 > URL: https://issues.apache.org/jira/browse/FLINK-12493 > Project: Flink > Issue Type: Bug > Components: Documentation > Environment: AWS EMR 5.19.0 > Reporter: Alberto Romero > Priority: Major > Labels: pull-request-available > > Running jobs as described on the AWS EMR section > ([https://ci.apache.org/projects/flink/flink-docs-stable/ops/deployment/aws.html|[https://ci.apache.org/projects/flink/flink-docs-stable/ops/deployment/aws.html)] > leads to ClassNotFound error: > {code:java} > $ HADOOP_CONF_DIR=/etc/hadoop/conf ./bin/flink run -m yarn-cluster -yn 1 > examples/streaming/WordCount.jar > 2019-05-12 18:14:15,386 INFO org.apache.flink.yarn.cli.FlinkYarnSessionCli - > Found Yarn properties file under /tmp/.yarn-properties-hadoop. > 2019-05-12 18:14:15,386 INFO org.apache.flink.yarn.cli.FlinkYarnSessionCli - > Found Yarn properties file under /tmp/.yarn-properties-hadoop. > java.lang.NoClassDefFoundError: javax/ws/rs/ext/MessageBodyReader > at java.lang.ClassLoader.defineClass1(Native Method) > at java.lang.ClassLoader.defineClass(ClassLoader.java:763) > at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142) > at java.net.URLClassLoader.defineClass(URLClassLoader.java:468) > at java.net.URLClassLoader.access$100(URLClassLoader.java:74) > at java.net.URLClassLoader$1.run(URLClassLoader.java:369) > at java.net.URLClassLoader$1.run(URLClassLoader.java:363) > at java.security.AccessController.doPrivileged(Native Method) > at java.net.URLClassLoader.findClass(URLClassLoader.java:362) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > at java.lang.ClassLoader.defineClass1(Native Method) > at java.lang.ClassLoader.defineClass(ClassLoader.java:763) > at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142) > at java.net.URLClassLoader.defineClass(URLClassLoader.java:468) > at java.net.URLClassLoader.access$100(URLClassLoader.java:74) > at java.net.URLClassLoader$1.run(URLClassLoader.java:369) > at java.net.URLClassLoader$1.run(URLClassLoader.java:363) > at java.security.AccessController.doPrivileged(Native Method) > at java.net.URLClassLoader.findClass(URLClassLoader.java:362) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > at java.lang.ClassLoader.defineClass1(Native Method) > at java.lang.ClassLoader.defineClass(ClassLoader.java:763) > at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142) > at java.net.URLClassLoader.defineClass(URLClassLoader.java:468) > at java.net.URLClassLoader.access$100(URLClassLoader.java:74) > at java.net.URLClassLoader$1.run(URLClassLoader.java:369) > at java.net.URLClassLoader$1.run(URLClassLoader.java:363) > at java.security.AccessController.doPrivileged(Native Method) > at java.net.URLClassLoader.findClass(URLClassLoader.java:362) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > at > org.apache.hadoop.yarn.util.timeline.TimelineUtils.<clinit>(TimelineUtils.java:50) > at > org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceInit(YarnClientImpl.java:179) > at org.apache.hadoop.service.AbstractService.init(AbstractService.java:163) > at > org.apache.flink.yarn.cli.FlinkYarnSessionCli.getClusterDescriptor(FlinkYarnSessionCli.java:1012) > at > org.apache.flink.yarn.cli.FlinkYarnSessionCli.createDescriptor(FlinkYarnSessionCli.java:274) > at > org.apache.flink.yarn.cli.FlinkYarnSessionCli.createClusterDescriptor(FlinkYarnSessionCli.java:454) > at > org.apache.flink.yarn.cli.FlinkYarnSessionCli.createClusterDescriptor(FlinkYarnSessionCli.java:97) > at org.apache.flink.client.cli.CliFrontend.runProgram(CliFrontend.java:224) > at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:213) > at > org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:1050) > at > org.apache.flink.client.cli.CliFrontend.lambda$main$11(CliFrontend.java:1126) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:422) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1836) > at > org.apache.flink.runtime.security.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41) > at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:1126) > Caused by: java.lang.ClassNotFoundException: javax.ws.rs.ext.MessageBodyReader > at java.net.URLClassLoader.findClass(URLClassLoader.java:382) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > ... 52 more > {code} > > This is due to missing Hadoop classpaths, and requires exporting the relevant > variable for it to work, ie: > {code:java} > $ export HADOOP_CLASSPATH=`hadoop classpath` > $ HADOOP_CONF_DIR=/etc/hadoop/conf ./bin/flink run -m yarn-cluster -yn 1 > examples/streaming/WordCount.jar > ... > 2019-05-12 18:19:19,607 INFO > org.apache.hadoop.yarn.client.api.impl.YarnClientImpl - Submitted application > application_1543518955410_85464 > 2019-05-12 18:19:19,608 INFO > org.apache.flink.yarn.AbstractYarnClusterDescriptor - Waiting for the cluster > to be allocated > 2019-05-12 18:19:19,611 INFO > org.apache.flink.yarn.AbstractYarnClusterDescriptor - Deploying cluster, > current state ACCEPTED > 2019-05-12 18:19:24,130 INFO > org.apache.flink.yarn.AbstractYarnClusterDescriptor - YARN application has > been deployed successfully. > Starting execution of program > Executing WordCount example with default input data set. > Use --input to specify file input. > Printing result to stdout. Use --output to specify output path. > Program execution finished > Job with JobID xxxxxxx has finished. > Job Runtime: 9640 ms > {code} > > A reference to that should be added on the documentation, even if it's just a > link to: > [https://ci.apache.org/projects/flink/flink-docs-stable/ops/deployment/hadoop.html] -- This message was sent by Atlassian JIRA (v7.6.3#76005)