Hi All, figured it out for got mention local as loca[2] , at least two node required.
package com.examples /** * Created by kalit_000 on 19/09/2015. */ import org.apache.spark._ import org.apache.spark.SparkContext._ import org.apache.spark.sql.SQLContext import org.apache.spark.SparkConf import org.apache.log4j.Logger import org.apache.log4j.Level import org.apache.spark.streaming.{Seconds,StreamingContext} import org.apache.spark._ import org.apache.spark.streaming._ import org.apache.spark.streaming.StreamingContext._ import org.apache.spark.streaming.kafka.KafkaUtils object SparkStreamingKafka { def main(args: Array[String]): Unit = { Logger.getLogger("org").setLevel(Level.WARN) Logger.getLogger("akka").setLevel(Level.WARN) val conf = new SparkConf().setMaster("local[2]").setAppName("KafkaStreaming").set("spark.executor.memory", "1g") val sc=new SparkContext(conf) val ssc= new StreamingContext(sc,Seconds(2)) val zkQuorm="localhost:2181" val group="test-group" val topics="first" val numThreads=1 val topicMap=topics.split(",").map((_,numThreads.toInt)).toMap val lineMap=KafkaUtils.createStream(ssc,zkQuorm,group,topicMap) val lines=lineMap.map(_._2) lines.print //lines.print() //val words=lines.flatMap(_.split(" ")) // val pair=words.map( x => (x,1)) //val wordcount=pair.reduceByKeyAndWindow(_+_,_-_,Minutes(1),Seconds(2),2) //wordcount.print //ssc.checkpoint("hdfs://localhost:9000/user/hduser/checkpoint") ssc.checkpoint("C:\\scalatutorials\\sparkstreaming_checkpoint_folder") //C:\scalatutorials\sparkstreaming_checkpoint_folder ssc.start() ssc.awaitTermination() } } Thanks Sri -- View this message in context: http://apache-spark-user-list.1001560.n3.nabble.com/Unable-to-see-my-kafka-spark-streaming-output-tp24750p24751.html Sent from the Apache Spark User List mailing list archive at Nabble.com. --------------------------------------------------------------------- To unsubscribe, e-mail: user-unsubscr...@spark.apache.org For additional commands, e-mail: user-h...@spark.apache.org