import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * ClassName: Test <br/>
 * Description: <br/>
 * date: 2020/8/4 9:41<br/>
 *
 * @author Hesion<br/>
 * @version
 * @since JDK 1.8
 */
object WordCountDemo {
  def main(args: Array[String]): Unit = {
    //TODO 1.创建SparkContext
    val conf = new SparkConf().setMaster("local[*]").setAppName("word_count")
    val sc = new SparkContext(conf)

    //TODO 2.加载文件
    //        2.1 准备文件
    //        2.2 读取文件
      val rdd1: RDD[String] = sc.textFile("input/word*.txt")

    //TODO 3.处理
    //3.1把一句话拆分成多个单词
    val rdd2: RDD[String] = rdd1.flatMap(item => item.split(" ") )
    //3.2 每个点单词指定词频为1
    val rdd3: RDD[(String, Int)] = rdd2.map(item => (item, 1))
    //3.3 统计聚合
    val rdd4: RDD[(String, Int)] = rdd3.reduceByKey((curr, agg) => curr + agg )
    //4 得到结果
    val rdd5: Array[(String, Int)] = rdd4.collect()
    //5 遍历输出
    rdd5.foreach(println)

  }
}
