package spark_core;

import java.util.Arrays;
import java.util.List;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

/**
 * @author shihb
 * @date 2020/1/6 10:15
 */
public class WordCount {

  public static void main(String[] args) {
    //local模式,创建SparkConf对象设定spark的部署环境
    SparkConf sparkConf = new SparkConf().setAppName("wordCount").setMaster("local[*]");
    //创建spark上下文对象（这边是java上下文）
    JavaSparkContext sc = new JavaSparkContext(sparkConf);

    //读取文件
    JavaRDD<String> lines = sc.textFile("D:\\SHBData\\IDEAProjects\\spark-parent\\SparkTestDemo\\src\\main\\resources\\words",1);
    //将一行分割成单个单词
    JavaRDD<String> words = lines.flatMap(s -> Arrays.asList(s.split(" ")).iterator());
    //为了统计方便，将单词数据进行结构转换
    JavaPairRDD<String, Integer> pairs = words
        .mapToPair(s -> new Tuple2<>(s, 1));
    //对转后后的结构进行分组聚合
    JavaPairRDD<String, Integer> wordsCount = pairs.reduceByKey((v1, v2) -> v1 + v2);
    //将统计结果采集打印到控制台
    List<Tuple2<String, Integer>> result = wordsCount.collect();
    result.forEach(System.out::println);
//    wordsCount.foreach(s-> System.out.println(s));

    //停止
    sc.stop();
  }

}
