Was there any error prior to 'LifecycleExecutionException' ? On Fri, Sep 30, 2016 at 2:43 PM, satyajit vegesna < satyajit.apas...@gmail.com> wrote:
> >> i am trying to compile code using maven ,which was working with spark >> 1.6.2, but when i try for spark 2.0.0 then i get below error, >> >> org.apache.maven.lifecycle.LifecycleExecutionException: Failed to >> execute goal net.alchim31.maven:scala-maven-plugin:3.2.2:compile >> (default) on project NginxLoads-repartition: wrap: >> org.apache.commons.exec.ExecuteException: Process exited with an error: >> 1 (Exit value: 1) >> at org.apache.maven.lifecycle.internal.MojoExecutor.execute(Moj >> oExecutor.java:212) >> at org.apache.maven.lifecycle.internal.MojoExecutor.execute(Moj >> oExecutor.java:153) >> at org.apache.maven.lifecycle.internal.MojoExecutor.execute(Moj >> oExecutor.java:145) >> at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.b >> uildProject(LifecycleModuleBuilder.java:116) >> at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.b >> uildProject(LifecycleModuleBuilder.java:80) >> at org.apache.maven.lifecycle.internal.builder.singlethreaded.S >> ingleThreadedBuilder.build(SingleThreadedBuilder.java:51) >> at org.apache.maven.lifecycle.internal.LifecycleStarter.execute >> (LifecycleStarter.java:128) >> at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:307) >> at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:193) >> at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:106) >> at org.apache.maven.cli.MavenCli.execute(MavenCli.java:863) >> at org.apache.maven.cli.MavenCli.doMain(MavenCli.java:288) >> at org.apache.maven.cli.MavenCli.main(MavenCli.java:199) >> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) >> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAcce >> ssorImpl.java:57) >> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMe >> thodAccessorImpl.java:43) >> at java.lang.reflect.Method.invoke(Method.java:606) >> at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnha >> nced(Launcher.java:289) >> at org.codehaus.plexus.classworlds.launcher.Launcher.launch( >> Launcher.java:229) >> at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithEx >> itCode(Launcher.java:415) >> at org.codehaus.plexus.classworlds.launcher.Launcher.main( >> Launcher.java:356) >> Caused by: org.apache.maven.plugin.MojoExecutionException: wrap: >> org.apache.commons.exec.ExecuteException: Process exited with an error: >> 1 (Exit value: 1) >> at scala_maven.ScalaMojoSupport.execute(ScalaMojoSupport.java:490) >> at org.apache.maven.plugin.DefaultBuildPluginManager.executeMoj >> o(DefaultBuildPluginManager.java:134) >> at org.apache.maven.lifecycle.internal.MojoExecutor.execute(Moj >> oExecutor.java:207) >> ... 20 more >> Caused by: org.apache.commons.exec.ExecuteException: Process exited with >> an error: 1 (Exit value: 1) >> at org.apache.commons.exec.DefaultExecutor.executeInternal(Defa >> ultExecutor.java:377) >> at org.apache.commons.exec.DefaultExecutor.execute(DefaultExecu >> tor.java:160) >> at org.apache.commons.exec.DefaultExecutor.execute(DefaultExecu >> tor.java:147) >> at scala_maven_executions.JavaMainCallerByFork.run(JavaMainCall >> erByFork.java:100) >> at scala_maven.ScalaCompilerSupport.compile(ScalaCompilerSuppor >> t.java:161) >> at scala_maven.ScalaCompilerSupport.doExecute(ScalaCompilerSupp >> ort.java:99) >> at scala_maven.ScalaMojoSupport.execute(ScalaMojoSupport.java:482) >> ... 22 more >> >> >> PFB pom.xml that i am using, any help would be appreciated. >> >> <?xml version="1.0" encoding="UTF-8"?> >> <project xmlns="http://maven.apache.org/POM/4.0.0" >> xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" >> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 >> http://maven.apache.org/xsd/maven-4.0.0.xsd"> >> <modelVersion>4.0.0</modelVersion> >> >> <groupId>NginxLoads-repartition</groupId> >> <artifactId>NginxLoads-repartition</artifactId> >> <version>1.1-SNAPSHOT</version> >> <name>${project.artifactId}</name> >> <description>This is a boilerplate maven project to start using Spark in >> Scala</description> >> <inceptionYear>2010</inceptionYear> >> >> <properties> >> <maven.compiler.source>1.6</maven.compiler.source> >> <maven.compiler.target>1.6</maven.compiler.target> >> <encoding>UTF-8</encoding> >> <scala.tools.version>2.11</scala.tools.version> >> <scalaCompatVersion>2.11</scalaCompatVersion> >> <!-- Put the Scala version of the cluster --> >> <scala.version>2.11.8</scala.version> >> </properties> >> >> <!-- repository to add org.apache.spark --> >> <repositories> >> <repository> >> <id>cloudera-repo-releases</id> >> <url>https://repository.cloudera.com/artifactory/repo/</url> >> </repository> >> </repositories> >> >> <build> >> <sourceDirectory>src/main/scala</sourceDirectory> >> <testSourceDirectory>src/test/scala</testSourceDirectory> >> <plugins> >> <!-- any other plugins --> >> <plugin> >> <artifactId>maven-assembly-plugin</artifactId> >> <executions> >> <execution> >> <phase>package</phase> >> <goals> >> <goal>single</goal> >> </goals> >> </execution> >> </executions> >> <configuration> >> <descriptorRefs> >> <descriptorRef>jar-with-dependencies</descriptorRef> >> </descriptorRefs> >> </configuration> >> </plugin> >> <plugin> >> <groupId>org.apache.maven.plugins</groupId> >> <artifactId>maven-compiler-plugin</artifactId> >> <version>3.5.1</version> >> <configuration> >> <source>1.7</source> >> <target>1.7</target> >> </configuration> >> </plugin> >> <plugin> >> <!-- see http://davidb.github.com/scala-maven-plugin --> >> <groupId>net.alchim31.maven</groupId> >> <artifactId>scala-maven-plugin</artifactId> >> <version>3.2.2</version> >> <configuration> >> <!--<recompileMode>incremental</recompileMode>--> >> <!--<useZincServer>true</useZincServer>--> >> </configuration> >> <executions> >> <execution> >> <goals> >> <goal>compile</goal> >> <goal>testCompile</goal> >> </goals> >> <configuration> >> <args> >> <arg>-make:transitive</arg> >> <arg>dependencyfile</arg> >> >> <arg>${project.build.directory}/.scala_dependencies</arg> >> </args> >> </configuration> >> </execution> >> </executions> >> </plugin> >> >> >> <plugin> >> <artifactId>maven-antrun-plugin</artifactId> >> <executions> >> <execution> >> <phase>deploy</phase> >> <configuration> >> <!-- <tasks> >> >> <resources> >> <resource> >> >> <directory>src/main/resources</directory> >> <includes> >> <include>regexes.yaml</include> >> <include>patterns.txt</include> >> </includes> >> </resource> >> </resources> >> </tasks>--> >> </configuration> >> <goals> >> <goal>run</goal> >> </goals> >> </execution> >> </executions> >> </plugin> >> >> <!-- "package" command plugin --> >> <plugin> >> <artifactId>maven-assembly-plugin</artifactId> >> <version>2.4.1</version> >> <configuration> >> <descriptorRefs> >> <descriptorRef>jar-with-dependencies</descriptorRef> >> </descriptorRefs> >> </configuration> >> <executions> >> <execution> >> <id>make-assembly</id> >> <phase>package</phase> >> <goals> >> <goal>single</goal> >> </goals> >> </execution> >> </executions> >> </plugin> >> </plugins> >> <!-- <resources> >> <resource> >> <directory>src/main/resources</directory> >> <includes> >> <include>regexes.yaml</include> >> <include>patterns.txt</include> >> </includes> >> </resource> >> </resources>--> >> </build> >> <pluginRepositories> >> <pluginRepository> >> <id>scala-tools.org</id> >> <name>Scala-tools Maven2 Repository</name> >> <url>http://scala-tools.org/repo-releases</url> >> </pluginRepository> >> </pluginRepositories> >> <dependencies> >> <dependency> >> <groupId>org.scala-lang</groupId> >> <artifactId>scala-library</artifactId> >> <version>${scala.version}</version> >> </dependency> >> <dependency> >> <groupId>org.apache.commons</groupId> >> <artifactId>commons-csv</artifactId> >> <version>1.2</version> >> </dependency> >> <dependency> >> <groupId>org.apache.spark</groupId> >> <artifactId>spark-streaming_2.11</artifactId> >> <version>2.0.0</version> >> </dependency> >> <dependency> >> <groupId>org.apache.spark</groupId> >> <artifactId>spark-core_2.11</artifactId> >> <version>2.0.0</version> >> </dependency> >> <dependency> >> <groupId>org.apache.spark</groupId> >> <artifactId>spark-sql_2.11</artifactId> >> <version>2.0.0</version> >> </dependency> >> <dependency> >> <groupId>org.apache.hadoop</groupId> >> <artifactId>hadoop-hdfs</artifactId> >> <version>2.6.0</version> >> </dependency> >> <dependency> >> <groupId>org.apache.hadoop</groupId> >> <artifactId>hadoop-auth</artifactId> >> <version>2.6.0</version> >> </dependency> >> <dependency> >> <groupId>org.apache.hadoop</groupId> >> <artifactId>hadoop-common</artifactId> >> <version>2.6.0</version> >> </dependency> >> <dependency> >> <groupId>org.apache.hadoop</groupId> >> <artifactId>hadoop-yarn-api</artifactId> >> <version>2.7.2</version> >> </dependency> >> <dependency> >> <groupId>org.apache.hadoop</groupId> >> <artifactId>hadoop-core</artifactId> >> <version>1.2.1</version> >> </dependency> >> <dependency> >> <groupId>org.yaml</groupId> >> <artifactId>snakeyaml</artifactId> >> <version>1.17</version> >> </dependency> >> <dependency> >> <groupId>com.twitter</groupId> >> <artifactId>util-collection_2.10</artifactId> >> <version>6.23.0</version> >> </dependency> >> <dependency> >> <groupId>org.specs2</groupId> >> <artifactId>specs2-core_2.10</artifactId> >> <version>2.4.15</version> >> </dependency> >> <dependency> >> <groupId>org.apache.hive</groupId> >> <artifactId>hive-jdbc</artifactId> >> <version>1.2.1</version> >> </dependency> >> <dependency> >> <groupId>io.thekraken</groupId> >> <artifactId>grok</artifactId> >> <version>0.1.3</version> >> </dependency> >> </dependencies> >> >> >> </project> >> >> >