package com.atguigu1.core.wc

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * @description: word count入门案例
 * @time: 2021-03-11 15:40
 * @author: baojinlong 
 **/
object Spark02_WordCount {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("WordCount")
    val sc = new SparkContext(sparkConf)
    // 1.读取文件,获取一行一行的数据 hello world
    val lines: RDD[String] = sc.textFile("datas")
    // 2.将一行数据进行拆分,形成一个个的单词 hello,world,hello,world
    val words: RDD[String] = lines.flatMap(_.split(" "))
    val wordToOne: RDD[(String, Int)] = words.map(item => (item, 1))

    // 3.将数据根据单词进行分组,便于统计 hello hello world world
    val wordGroup: RDD[(String, Iterable[(String, Int)])] = wordToOne.groupBy(word => word._1)
    // 4.对分组后的数据进行转换 hello 2 world 4
    val wordToCount: RDD[(String, Int)] = wordGroup.map {
      case (_, list) =>
        list.reduce(
          (t1, t2) => {
            (t1._1, t1._2 + t2._2)
          }
        )
    }
    wordToCount.collect().foreach(println)
    // 打印输出
    sc.stop()
  }

}
