package com.at.bigdata.spark.core.wc

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 *
 * @author cdhuangchao3
 * @date 2023/3/6 8:40 PM
 */
object Spark04_WordCount {

  def main(args: Array[String]): Unit = {
    // TODO 建立和spark框架的连接
    val sparConf = new SparkConf()
      .setMaster("local")
      .setAppName("WordCount")
    val sc = new SparkContext(sparConf)

    groupBy(sc)
    groupByKey(sc)
    reduceByKey(sc)
    aggregateByKey(sc)
    foldByKey(sc)
    combineByKey(sc)

    countByKey(sc)
    countByValue(sc)

    reduce(sc)
    aggregate(sc)
    fold(sc)
    sc.stop()
  }

  def groupBy(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val group = words.groupBy(word => word)
    val wordCount = group.mapValues(iter => iter.size)
    println(wordCount.collect().mkString(","))
  }
  def groupByKey(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_,1))
    val group = wordOne.groupByKey()
    val wordCount = group.mapValues(iter => iter.size)
    println(wordCount.collect().mkString(","))
  }
  def reduceByKey(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_,1))
    val wordCount = wordOne.reduceByKey(_+_)
    println(wordCount.collect().mkString(","))
  }
  def aggregateByKey(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_,1))
    val wordCount = wordOne.aggregateByKey(0)(_+_,_+_)
    println(wordCount.collect().mkString(","))
  }
  def foldByKey(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_,1))
    val wordCount = wordOne.foldByKey(0)(_+_)
    println(wordCount.collect().mkString(","))
  }
  def combineByKey(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_,1))
    val wordCount = wordOne.combineByKey(x=>x,(x:Int,y) => x+y,(x:Int,y:Int) =>x+y)
    println(wordCount.collect().mkString(","))
  }
  def countByKey(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_,1))
    val wordCount = wordOne.countByKey()
    println(wordCount)
  }
  def countByValue(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordCount = words.countByValue()
    println(wordCount)
  }
  // reduce aggregate fold
  def reduce(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val mapWord = words.map(
      word => {
        mutable.Map((word, 1))
      }
    )
    val wordCount = mapWord.reduce(
      (map1, map2) => {
        map2.foreach{
          case (word, count) => {
            map1.update(word, map1.getOrElse(word, 0) + count)
          }
        }
        map1
      }
    )
    println(wordCount)
  }
  def aggregate(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val mapWord = words.map(
      word => {
        mutable.Map((word, 1))
      }
    )
    val wordCount = mapWord.aggregate(mutable.Map[String,Int]())(
      (map1, map2) => {
        map2.foreach{
          case (word, count) => {
            map1.update(word, map1.getOrElse(word, 0) + count)
          }
        }
        map1
      },
      (map1, map2) => {
        map2.foreach{
          case (word, count) => {
            map1.update(word, map1.getOrElse(word, 0) + count)
          }
        }
        map1
      }
    )
    println(wordCount)
  }
  def fold(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello spark"))
    val words = rdd.flatMap(_.split(" "))
    val mapWord = words.map(
      word => {
        mutable.Map((word, 1))
      }
    )
    val wordCount = mapWord.fold(mutable.Map[String,Int]())(
      (map1, map2) => {
        map2.foreach{
          case (word, count) => {
            map1.update(word, map1.getOrElse(word, 0) + count)
          }
        }
        map1
      }
    )
    println(wordCount)
  }
}
