package com.wuji1626.spark.cases

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

object RDD_WordCount {
  def main(args: Array[String]): Unit = {
    // Step1: 准备环境
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("Operator")
    val sc = new SparkContext(sparkConf)

    // Step2: 各种计算 wordCount 的方法
    wordCount1(sc)

    wordCount2(sc)

    wordCount3(sc)

    wordCount4(sc)

    wordCount5(sc)

    wordCount6(sc)

    wordCount7(sc)

    wordCount8(sc)

    wordCount9(sc)

    // Step3:关闭
    sc.stop()

  }

  // groupBy
  private def wordCount1(sc: SparkContext):Unit = {
    println("start--------------1")
    val rdd = sc.makeRDD(List("Hello World", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val group = words.groupBy(word => word)
    val wordCount = group.mapValues(iter=>iter.size)
    wordCount.collect().foreach(println)
    println("end--------------")
  }

  // groupByKey 效率不高，groupByKey 有 Shuffle 的过程
  def wordCount2(sc: SparkContext):Unit = {
    println("start--------------2")
    val rdd = sc.makeRDD(List("Hello World", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val group = wordOne.groupByKey()
    val wordCount = group.mapValues(iter=>iter.size)
    wordCount.collect().foreach(println)
    println("end--------------")
  }

  // reduceByKey
  def wordCount3(sc: SparkContext):Unit = {
    println("start--------------3")
    val rdd = sc.makeRDD(List("Hello World", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val wordCount = wordOne.reduceByKey(_+_)
    wordCount.collect().foreach(println)
    println("end--------------")
  }
  // aggregateByKey
  def wordCount4(sc: SparkContext):Unit = {
    println("start--------------4")
    val rdd = sc.makeRDD(List("Hello World", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val wordCount = wordOne.aggregateByKey(zeroValue = 0)(_+_,_+_)
    wordCount.collect().foreach(println)
    println("end--------------")
  }
  // foldByKey
  def wordCount5(sc: SparkContext):Unit = {
    println("start--------------5")
    val rdd = sc.makeRDD(List("Hello World", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val wordCount = wordOne.foldByKey(zeroValue = 0)(_+_)
    wordCount.collect().foreach(println)
    println("end--------------")
  }
  // combineByKey
  def wordCount6(sc: SparkContext):Unit = {
    println("start--------------6")
    val rdd = sc.makeRDD(List("Hello World", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val wordCount = wordOne.combineByKey(
      y => y,
      (x:Int, y)=>x + y,
      (x:Int, y:Int) => x + y
    )
    wordCount.collect().foreach(println)
    println("end--------------")
  }
  // countByKey
  def wordCount7(sc: SparkContext):Unit = {
    println("start--------------7")
    val rdd = sc.makeRDD(List("Hello World", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val stringToLong = wordOne.countByKey()
    stringToLong.foreach(println)
    println("end--------------")
  }
  // countByValue
  def wordCount8(sc: SparkContext):Unit = {
    println("start--------------8")
    val rdd = sc.makeRDD(List("Hello World", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordCount = words.countByValue()
    wordCount.foreach(println)
    println("end--------------")
  }
  // reduce
  def wordCount9(sc: SparkContext):Unit = {
    println("start--------------9")
    val rdd = sc.makeRDD(List("Hello World", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val mapWord = words.map(
      word => {
        mutable.Map[String, Long]((word, 1))
      }
    )
    val wordCount = mapWord.reduce(
      (map1, map2) => {
        map2.foreach{
          case (word, count) =>{
            val newCount = map1.getOrElse(word, 0L) + count
            map1.update(word, newCount)
          }
        }
        map1
      }
    )
    wordCount.foreach(println)
    println("end--------------")
  }
}
