I've reduced the code to the code below - no streaming, no Kafka, no
checkpoint. Unfortunately the end result is the same - "broadcastVar is
null" printed in the worker log. Any suggestion on what I'm missing would be
very much appreciated !


object BroadcastTest extends App {
  val logger = LoggerFactory.getLogger("OinkSparkMain")
  logger.info("OinkSparkMain - Setup Logger")

  val sparkConf = new SparkConf().setAppName("OinkSparkMain")
  val sc : SparkContext = new SparkContext(sparkConf)

  val rdd = sc.parallelize(Array(1,2,3));

  val arr = Array(1, 2, 3)
  val broadcastVar = sc.broadcast(arr)

  val mappedEvents =  rdd.map(e => {
    val l = LoggerFactory.getLogger("OinkSparkMain1")

    if (broadcastVar == null) {
      l.info("broadcastVar is null")
      (e, "empty")
    }
    else {
      val str = broadcastVar.value.mkString(" | ")
      l.info("broadcastVar is " + str)
      (e, str)
    }
  })

  logger.info("****** Total reduced count: " +
mappedEvents.collect().length)
}






--
View this message in context: 
http://apache-spark-user-list.1001560.n3.nabble.com/Broadcast-var-is-null-tp24927p24950.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.

---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscr...@spark.apache.org
For additional commands, e-mail: user-h...@spark.apache.org

Reply via email to