intln("Usage: JavaWordCount ");
> System.exit(1);
> }
>
> //boiler plate needed to run locally
> SparkConf conf = new SparkConf().setAppName("Word Count
> Application").setMaster("local[*]");
&g
>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
&
uot;).setMaster("local[*]");
> JavaSparkContext sc = new JavaSparkContext(conf);
>
> SparkSession spark = SparkSession
> .builder()
> .appName("Word Count")
> .getOrCreate()
>
t;>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>> public final class JavaWordCount {
>> private static final Pattern SPACE = Pattern.compile(" ");
>>
>> public static void main(String[] args) throws Exception {
>>
>> if (args.length < 1) {
>> System.err.println("Usage: JavaWordCount ");
>> System.exit(1);
>> }
>>
>> //boiler plate needed to run locally
>> SparkConf conf = new SparkConf().setAppName("Word Count
>> Application").setMaster("local[*]");
>> JavaSparkContext sc = new JavaSparkContext(conf);
>>
>> SparkSession spark = SparkSession
>> .builder()
>> .appName("Word Count")
>> .getOrCreate()
>> .newSession();
>>
>>
>> JavaRDD lines = spark.read().textFile(args[0]).javaRDD();
>>
>>
>> JavaRDD words = lines.flatMap(new FlatMapFunction> String>() {
>> @Override
>> public Iterator call(String s) {
>> return Arrays.asList(SPACE.split(s)).iterator();
>> }
>> });
>>
>> JavaPairRDD ones = words.mapToPair(
>> new PairFunction() {
>> @Override
>> public Tuple2 call(String s) {
>> return new Tuple2<>(s, 1);
>> }
>> });
>>
>> JavaPairRDD counts = ones.reduceByKey(
>> new Function2() {
>> @Override
>> public Integer call(Integer i1, Integer i2) {
>> return i1 + i2;
>> }
>> });
>>
>> List> output = counts.collect();
>> for (Tuple2 tuple : output) {
>> System.out.println(tuple._1() + ": " + tuple._2());
>> }
>> spark.stop();
>> }
>> }
>>
>>
>>
>>
>> --
>> View this message in context: http://apache-spark-user-list.
>> 1001560.n3.nabble.com/Running-spark-from-Eclipse-and-then-
>> Jar-tp28182.html
>> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>>
>> -
>> To unsubscribe e-mail: user-unsubscr...@spark.apache.org
>>
>>
>>
ication").setMaster("local[*]");
> JavaSparkContext sc = new JavaSparkContext(conf);
>
> SparkSession spark = SparkSession
> .builder()
> .appName("Word Count")
> .getOrCrea
vaRDD words = lines.flatMap(new FlatMapFunction String>() {
> @Override
> public Iterator call(String s) {
> return Arrays.asList(SPACE.split(s)).iterator();
> }
> });
>
> JavaPairRDD ones = words.mapToPair(
> new PairFuncti
DD lines = spark.read().textFile(args[0]).javaRDD();
>
>
>JavaRDD words = lines.flatMap(new FlatMapFunction String>() {
> @Override
> public Iterator call(String s) {
>return Arrays.asList(SPACE.split(s)).iterator();
> }
ByKey(
new Function2() {
@Override
public Integer call(Integer i1, Integer i2) {
return i1 + i2;
}
});
List> output = counts.collect();
for (Tuple2 tuple : output) {
System.out.println(tuple._1() + ": " + tuple._2());
}
spark.stop();
}
}