Hello,
I am trying to debug a PySpark program and quite frankly, I am stumped.
I see the following error in the logs. I verified the input parameters - all 
appear to be in order. Driver and executors appear to be proper - about 3MB of 
7GB being used on each node.
I do see that the DAG plan that is being created is huge. Could it be due to 
that?
Thanks!
Vinay

18/02/17 00:59:02 ERROR Utils: throw uncaught fatal error in thread 
SparkListenerBus
java.lang.OutOfMemoryError: Java heap space
        at java.util.Arrays.copyOfRange(Arrays.java:3664)
        at java.lang.String.<init>(String.java:207)
        at java.lang.StringBuilder.toString(StringBuilder.java:407)
        at 
com.fasterxml.jackson.core.util.TextBuffer.contentsAsString(TextBuffer.java:356)
        at 
com.fasterxml.jackson.core.json.ReaderBasedJsonParser.getText(ReaderBasedJsonParser.java:235)
        at 
org.json4s.jackson.JValueDeserializer.deserialize(JValueDeserializer.scala:20)
        at 
org.json4s.jackson.JValueDeserializer.deserialize(JValueDeserializer.scala:42)
        at 
org.json4s.jackson.JValueDeserializer.deserialize(JValueDeserializer.scala:35)
        at 
com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3736)
        at 
com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2726)
        at org.json4s.jackson.JsonMethods$class.parse(JsonMethods.scala:20)
        at org.json4s.jackson.JsonMethods$.parse(JsonMethods.scala:50)
        at 
org.apache.spark.util.JsonProtocol$.sparkEventToJson(JsonProtocol.scala:103)
        at 
org.apache.spark.scheduler.EventLoggingListener.logEvent(EventLoggingListener.scala:134)
        at 
org.apache.spark.scheduler.EventLoggingListener.onOtherEvent(EventLoggingListener.scala:202)
        at 
org.apache.spark.scheduler.SparkListenerBus$class.doPostEvent(SparkListenerBus.scala:67)
        at 
org.apache.spark.scheduler.LiveListenerBus.doPostEvent(LiveListenerBus.scala:36)
        at 
org.apache.spark.scheduler.LiveListenerBus.doPostEvent(LiveListenerBus.scala:36)
        at 
org.apache.spark.util.ListenerBus$class.postToAll(ListenerBus.scala:63)
        at 
org.apache.spark.scheduler.LiveListenerBus.postToAll(LiveListenerBus.scala:36)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1$$anonfun$run$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(LiveListenerBus.scala:94)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1$$anonfun$run$1$$anonfun$apply$mcV$sp$1.apply(LiveListenerBus.scala:79)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1$$anonfun$run$1$$anonfun$apply$mcV$sp$1.apply(LiveListenerBus.scala:79)
        at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1$$anonfun$run$1.apply$mcV$sp(LiveListenerBus.scala:78)
        at org.apache.spark.util.Utils$.tryOrStopSparkContext(Utils.scala:1245)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1.run(LiveListenerBus.scala:77)
Exception in thread "SparkListenerBus" java.lang.OutOfMemoryError: Java heap 
space
        at java.util.Arrays.copyOfRange(Arrays.java:3664)
        at java.lang.String.<init>(String.java:207)
        at java.lang.StringBuilder.toString(StringBuilder.java:407)
        at 
com.fasterxml.jackson.core.util.TextBuffer.contentsAsString(TextBuffer.java:356)
        at 
com.fasterxml.jackson.core.json.ReaderBasedJsonParser.getText(ReaderBasedJsonParser.java:235)
        at 
org.json4s.jackson.JValueDeserializer.deserialize(JValueDeserializer.scala:20)
        at 
org.json4s.jackson.JValueDeserializer.deserialize(JValueDeserializer.scala:42)
        at 
org.json4s.jackson.JValueDeserializer.deserialize(JValueDeserializer.scala:35)
        at 
com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3736)
        at 
com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2726)
        at org.json4s.jackson.JsonMethods$class.parse(JsonMethods.scala:20)
        at org.json4s.jackson.JsonMethods$.parse(JsonMethods.scala:50)
        at 
org.apache.spark.util.JsonProtocol$.sparkEventToJson(JsonProtocol.scala:103)
        at 
org.apache.spark.scheduler.EventLoggingListener.logEvent(EventLoggingListener.scala:134)
        at 
org.apache.spark.scheduler.EventLoggingListener.onOtherEvent(EventLoggingListener.scala:202)
        at 
org.apache.spark.scheduler.SparkListenerBus$class.doPostEvent(SparkListenerBus.scala:67)
        at 
org.apache.spark.scheduler.LiveListenerBus.doPostEvent(LiveListenerBus.scala:36)
        at 
org.apache.spark.scheduler.LiveListenerBus.doPostEvent(LiveListenerBus.scala:36)
        at 
org.apache.spark.util.ListenerBus$class.postToAll(ListenerBus.scala:63)
        at 
org.apache.spark.scheduler.LiveListenerBus.postToAll(LiveListenerBus.scala:36)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1$$anonfun$run$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(LiveListenerBus.scala:94)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1$$anonfun$run$1$$anonfun$apply$mcV$sp$1.apply(LiveListenerBus.scala:79)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1$$anonfun$run$1$$anonfun$apply$mcV$sp$1.apply(LiveListenerBus.scala:79)
        at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1$$anonfun$run$1.apply$mcV$sp(LiveListenerBus.scala:78)
        at org.apache.spark.util.Utils$.tryOrStopSparkContext(Utils.scala:1245)
        at 
org.apache.spark.scheduler.LiveListenerBus$$anon$1.run(LiveListenerBus.scala:77)

Reply via email to