nirav patel created ZEPPELIN-1380: ------------------------------------- Summary: Inconsistent error reporting on "Zeppelin Tutorial" notebook Key: ZEPPELIN-1380 URL: https://issues.apache.org/jira/browse/ZEPPELIN-1380 Project: Zeppelin Issue Type: Bug Affects Versions: 0.6.1 Reporter: nirav patel
I built Zeppelin using following : mvn clean -Pmapr41 -Pyarn -Pbuild-distr -Pspark-1.5 -Phadoop-2.6 -Ppyspark package -DskipTests -B Tutorial notebook is failing with following. Besides this it also inconsistently report errors in U. as you can see from screenshot stacktrace is visible only for one paragraph (last one) instead of appearing for every paragraph . ERROR [2016-08-26 14:06:32,166] ({sparkDriver-akka.actor.default-dispatcher-5} Slf4jLogger.scala[apply$mcV$sp]:66) - Uncaught fatal error from thread [sparkDriver-akka.remote.default-remote-dispatcher-6] shutting down ActorSystem [sparkDriver] java.lang.VerifyError: (class: org/jboss/netty/channel/socket/nio/NioWorkerPool, method: createWorker signature: (Ljava/util/concurrent/Executor;)Lorg/jboss/netty/channel/socket/nio/AbstractNioWorker;) Wrong return type in function at akka.remote.transport.netty.NettyTransport.<init>(NettyTransport.scala:283) at akka.remote.transport.netty.NettyTransport.<init>(NettyTransport.scala:240) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:526) at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78) at scala.util.Try$.apply(Try.scala:161) at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73) at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) at scala.util.Success.flatMap(Try.scala:200) at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84) at akka.remote.EndpointManager$$anonfun$9.apply(Remoting.scala:711) at akka.remote.EndpointManager$$anonfun$9.apply(Remoting.scala:703) at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722) at scala.collection.Iterator$class.foreach(Iterator.scala:727) at scala.collection.AbstractIterator.foreach(Iterator.scala:1157) at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at scala.collection.AbstractIterable.foreach(Iterable.scala:54) at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721) at akka.remote.EndpointManager.akka$remote$EndpointManager$$listens(Remoting.scala:703) at akka.remote.EndpointManager$$anonfun$receive$2.applyOrElse(Remoting.scala:491) at akka.actor.Actor$class.aroundReceive(Actor.scala:467) at akka.remote.EndpointManager.aroundReceive(Remoting.scala:394) at akka.actor.ActorCell.receiveMessage(ActorCell.scala:516) at akka.actor.ActorCell.invoke(ActorCell.scala:487) at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:238) at akka.dispatch.Mailbox.run(Mailbox.scala:220) at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:397) at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) INFO [2016-08-26 14:06:32,167] ({sparkDriver-akka.actor.default-dispatcher-5} Slf4jLogger.scala[apply$mcV$sp]:74) - Shutting down remote daemon. INFO [2016-08-26 14:06:32,168] ({sparkDriver-akka.actor.default-dispatcher-5} Slf4jLogger.scala[apply$mcV$sp]:74) - Remote daemon shut down; proceeding with flushing remote transports. ERROR [2016-08-26 14:06:32,168] ({sparkDriver-akka.actor.default-dispatcher-5} Slf4jLogger.scala[apply$mcV$sp]:65) - Remoting system has been terminated abrubtly. Attempting to shut down transports ERROR [2016-08-26 14:06:42,165] ({pool-1-thread-6} Logging.scala[logError]:96) - Error initializing SparkContext. java.util.concurrent.TimeoutException: Futures timed out after [10000 milliseconds] at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:219) at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223) at scala.concurrent.Await$$anonfun$result$1.apply(package.scala:107) at scala.concurrent.BlockContext$DefaultBlockContext$.blockOn(BlockContext.scala:53) at scala.concurrent.Await$.result(package.scala:107) at akka.remote.Remoting.start(Remoting.scala:179) at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:184) at akka.actor.ActorSystemImpl.liftedTree2$1(ActorSystem.scala:620) at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:617) at akka.actor.ActorSystemImpl._start(ActorSystem.scala:617) at akka.actor.ActorSystemImpl.start(ActorSystem.scala:634) at akka.actor.ActorSystem$.apply(ActorSystem.scala:142) at akka.actor.ActorSystem$.apply(ActorSystem.scala:119) at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121) at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53) at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52) at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920) at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911) at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55) at org.apache.spark.rpc.akka.AkkaRpcEnvFactory.create(AkkaRpcEnv.scala:253) at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53) at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254) at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194) at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277) at org.apache.spark.SparkContext.<init>(SparkContext.scala:450) at org.apache.zeppelin.spark.SparkInterpreter.createSparkContext_1(SparkInterpreter.java:442) at org.apache.zeppelin.spark.SparkInterpreter.createSparkContext(SparkInterpreter.java:356) at org.apache.zeppelin.spark.SparkInterpreter.getSparkContext(SparkInterpreter.java:139) at org.apache.zeppelin.spark.SparkInterpreter.open(SparkInterpreter.java:745) at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:69) at org.apache.zeppelin.spark.SparkSqlInterpreter.getSparkInterpreter(SparkSqlInterpreter.java:77) at org.apache.zeppelin.spark.SparkSqlInterpreter.getProgress(SparkSqlInterpreter.java:150) at org.apache.zeppelin.interpreter.LazyOpenInterpreter.getProgress(LazyOpenInterpreter.java:111) at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer.getProgress(RemoteInterpreterServer.java:447) at org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService$Processor$getProgress.getResult(RemoteInterpreterService.java:1701) at org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService$Processor$getProgress.getResult(RemoteInterpreterService.java:1686) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:285) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:744) INFO [2016-08-26 14:06:42,165] ({pool-1-thread-6} Logging.scala[logInfo]:59) - Successfully stopped SparkContext ERROR [2016-08-26 14:06:42,166] ({pool-1-thread-6} TThreadPoolServer.java[run]:296) - Error occurred during processing of message. java.util.concurrent.TimeoutException: Futures timed out after [10000 milliseconds] at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:219) at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223) at scala.concurrent.Await$$anonfun$result$1.apply(package.scala:107) at scala.concurrent.BlockContext$DefaultBlockContext$.blockOn(BlockContext.scala:53) at scala.concurrent.Await$.result(package.scala:107) at akka.remote.Remoting.start(Remoting.scala:179) at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:184) at akka.actor.ActorSystemImpl.liftedTree2$1(ActorSystem.scala:620) at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:617) at akka.actor.ActorSystemImpl._start(ActorSystem.scala:617) at akka.actor.ActorSystemImpl.start(ActorSystem.scala:634) at akka.actor.ActorSystem$.apply(ActorSystem.scala:142) at akka.actor.ActorSystem$.apply(ActorSystem.scala:119) at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121) at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53) at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52) at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920) at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911) at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55) at org.apache.spark.rpc.akka.AkkaRpcEnvFactory.create(AkkaRpcEnv.scala:253) at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53) at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254) at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194) at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277) at org.apache.spark.SparkContext.<init>(SparkContext.scala:450) at org.apache.zeppelin.spark.SparkInterpreter.createSparkContext_1(SparkInterpreter.java:442) at org.apache.zeppelin.spark.SparkInterpreter.createSparkContext(SparkInterpreter.java:356) at org.apache.zeppelin.spark.SparkInterpreter.getSparkContext(SparkInterpreter.java:139) at org.apache.zeppelin.spark.SparkInterpreter.open(SparkInterpreter.java:745) at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:69) at org.apache.zeppelin.spark.SparkSqlInterpreter.getSparkInterpreter(SparkSqlInterpreter.java:77) at org.apache.zeppelin.spark.SparkSqlInterpreter.getProgress(SparkSqlInterpreter.java:150) at org.apache.zeppelin.interpreter.LazyOpenInterpreter.getProgress(LazyOpenInterpreter.java:111) at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer.getProgress(RemoteInterpreterServer.java:447) at org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService$Processor$getProgress.getResult(RemoteInterpreterService.java:1701) at org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService$Processor$getProgress.getResult(RemoteInterpreterService.java:1686) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:285) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:744) -- This message was sent by Atlassian JIRA (v6.3.4#6332)