package com.catmiao.spark.core;

import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.rdd.RDD;
import scala.Tuple2;

import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
 * @author ChengMiao
 * @title: WordCount
 * @projectName spark_study
 * @description: TODO
 * @date 2023/12/5 15:49
 */
public class WordCount {
    public static void main(String[] args) {

        SparkConf sparkConf = new SparkConf();
        sparkConf.setMaster("spark://hadoop102:7077");
        sparkConf.setAppName("wordCount");

        // 建立与Spark 框架的连接
        JavaSparkContext sparkContext = new JavaSparkContext(sparkConf);

        // 业务
        JavaRDD<String> lines = sparkContext.textFile("datas", 1);

        // 扁平化：将整体拆分成个体
        JavaRDD<String> words = lines.flatMap((FlatMapFunction<String, String>) s -> {
            Stream<String> split = Stream.of(s.split(" "));
            return split.iterator();
        });


        JavaRDD<Map<String, Integer>> map = words.map((Function<String, Map<String, Integer>>) s -> {
            Map<String, Integer> m = new HashMap<>();
            m.put(s, 1);
            return m;
        });
        JavaPairRDD<String, Iterable<Map<String, Integer>>> javaPairRDD = map.groupBy(item -> item.keySet().stream().collect(Collectors.toList()).get(0));


//        JavaPairRDD<String, Integer> wordCounts = words.mapToPair(word -> new Tuple2<>(word, 1))
//                .reduceByKey((x, y) -> x + y);
//        Map<String, Integer> wordCountsMap = wordCounts.collectAsMap();
//
//        for (Map.Entry<String, Integer> entry : wordCountsMap.entrySet()) {
//
//            System.out.println(entry.getKey() + " ：" + entry.getValue());
//
//        }


        // 关闭
//        sparkContext.stop();
    }
}
