package com.sub.spark.core.rdd.demo;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

import java.util.Arrays;

/**
 * @ClassName SubWordCount
 * @Description: 案例：统计单词出现次数
 * @Author Submerge.
 * @Since 2025/5/18 23:51
 * @Version 1.0
 */
public class SubWordCount {

    private static JavaSparkContext javaSparkContext;

    public static void main(String[] args) {

        //打印参数
        Arrays.asList(args).forEach(System.out::println);

        //spark conf
        SparkConf conf = new SparkConf();
        conf.setMaster("local[*]");
        conf.setAppName("SubWordCount");
        //spark context
        javaSparkContext = new JavaSparkContext(conf);

        //word count
        wordCount();
    }

    private static void wordCount() {
        //读取文件
        javaSparkContext.textFile("data/demo/spark/wordcount.txt")
                //切分压平
                .flatMap(line -> Arrays.asList(line.split(" ")).iterator())
                //map
                .mapToPair(word -> new Tuple2<>(word, 1))
                //reduce
                .reduceByKey(Integer::sum)
                .mapToPair(tuple2 -> new Tuple2<>(tuple2._2, tuple2._1))
                .sortByKey(false)
                .mapToPair(tuple2 -> new Tuple2<>(tuple2._2, tuple2._1))
                //collect
                .collect()
                .forEach(System.out::println);
    }

}
