package com.doit.spark.practise


import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

/**
 * @DATE 2022/1/8/22:12
 * @Author MDK
 * @Version 2021.2.2
 * */
object C03_WordCount {
  def main(args: Array[String]): Unit = {

    val inPath = args(0)
    val outPath = args(1)

    val conf = new SparkConf()
    conf.setAppName("wc")
    val sc = new SparkContext(conf)
    val rdd = sc.textFile(inPath)
    val rdd2 = rdd.flatMap(_.split("\\s+")).map((_, 1)).groupBy(_._1)

    /*
    * (ABC,CompactBuffer((ABC,1), (ABC,1)))
      (spark,CompactBuffer((spark,1), (spark,1)))
      (scala,CompactBuffer((scala,1), (scala,1), (scala,1)))
    *
    * */
    val rdd3: RDD[(String, Int)] = rdd2.mapValues(_.toList.length)
    rdd3.saveAsTextFile(outPath)

    sc.stop()
  }
}
