package com.haozhen;

import javafx.util.Pair;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

import java.security.Principal;

/**
 * @author haozhen
 * @email haozh@ync1.com
 * @date 2021/1/3  1:01
 */
public class WordCount {

    public static void main(String[] args) {
        SparkConf javaConf = new SparkConf().setAppName("javaWorkdCount")
                .setMaster("local[*]");

        JavaSparkContext javaSparkContext = new JavaSparkContext(javaConf);

        javaSparkContext.setLogLevel("WARN");

        JavaRDD<String> stringJavaRDD = javaSparkContext.textFile("data/words.txt");

        JavaPairRDD<String,Integer> wordMap = stringJavaRDD.mapToPair(word -> new Tuple2<>(word,1));

        JavaPairRDD<String, Integer> results = wordMap.reduceByKey((x, y) -> x + y);


        results.foreach(System.out::println);

        javaSparkContext.stop();
    }
}
