package com.tjetc;

import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Iterator;

public class WordCount {
    public static void main(String[] args) {
        JavaSparkContext jsc = new JavaSparkContext("local","wordcount");
        JavaRDD<String> stringJavaRDD = jsc.textFile(args[0]);

        JavaRDD<String> stringJavaRDD1 = stringJavaRDD.flatMap(line -> Arrays.asList(line.split(" ")).iterator());
        JavaPairRDD<String, Integer> stringIntegerJavaPairRDD = stringJavaRDD1.mapToPair(w -> new Tuple2<>(w, 1));
        JavaPairRDD<String, Integer> stringIntegerJavaPairRDD1 = stringIntegerJavaPairRDD.reduceByKey((a, b) -> (a + b));
        stringIntegerJavaPairRDD1.saveAsTextFile("resultjava/");
    }

}
