package com.atguigu.bigdata.spark.core.wc

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable


/**
  * create by undeRdoG on  2021-06-13  17:57
  * 凡心所向，素履以往，生如逆旅，一苇以航。
  */
object Spark04_WordCount {
  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local").setAppName("WordCount")
    val sc = new SparkContext(sparkConf)
    wordCount1(sc)
  }


  //  groupByKey
  def wordCount1(sc:SparkContext)={

    val rdd: RDD[String] = sc.makeRDD(List("Hello scala","Hello Spark"))

    val words: RDD[String] = rdd.flatMap(_.split(" "))

    val group: RDD[(String, Iterable[String])] = words.groupBy(word => word)

    val res: RDD[(String, Int)] = group.mapValues(iter => iter.size)

    res.collect().foreach(println)
  }


  // reduceByKey
  def wordCount2(sc:SparkContext)={

    val rdd: RDD[String] = sc.makeRDD(List("Hello scala","Hello Spark"))

    val words: RDD[String] = rdd.flatMap(_.split(" "))

    val wordOne = words.map((_,1))

    val res: RDD[(String, Int)] = wordOne.reduceByKey(_ + _)
  }


  // aggregateByKey
  def wordCount3(sc:SparkContext)={

    val rdd: RDD[String] = sc.makeRDD(List("Hello scala","Hello Spark"))

    val words: RDD[String] = rdd.flatMap(_.split(" "))

    val wordOne = words.map((_,1))

    val res: RDD[(String, Int)] = wordOne.aggregateByKey(0)(_ + _,_ + _)
  }


  // foldByKey
  def wordCount4(sc:SparkContext)={

    val rdd: RDD[String] = sc.makeRDD(List("Hello scala","Hello Spark"))

    val words: RDD[String] = rdd.flatMap(_.split(" "))

    val wordOne = words.map((_,1))

    val res: RDD[(String, Int)] = wordOne.foldByKey(0)(_ + _)
  }

  // CombineByKey   三个参数：① 第一个参数要干什么，② 分区内计算规则    ③  分区间计算规则
  def wordCount5(sc:SparkContext)={

    val rdd: RDD[String] = sc.makeRDD(List("Hello scala","Hello Spark"))

    val words: RDD[String] = rdd.flatMap(_.split(" "))

    val wordOne = words.map((_,1))

    val res: RDD[(String, Int)] = wordOne.combineByKey(
      v => v,
      (x:Int,y:Int) => x + y,
      (x:Int,y:Int) => x + y
    )
  }

  // CountByKey
  def wordCount6(sc:SparkContext)={

    val rdd: RDD[String] = sc.makeRDD(List("Hello scala","Hello Spark"))

    val words: RDD[String] = rdd.flatMap(_.split(" "))

    val wordOne = words.map((_,1))

    val res: collection.Map[String, Long] = wordOne.countByKey()
  }


  // countByValue  这里就不需要key了，有单词就直接统计个数
  def wordCount7(sc:SparkContext)={

    val rdd: RDD[String] = sc.makeRDD(List("Hello scala","Hello Spark"))

    val words: RDD[String] = rdd.flatMap(_.split(" "))

    val res: collection.Map[String, Long] = words.countByValue()
  }



}
