package com.atguigu.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

import scala.Console.println
import scala.collection.mutable

object WordCount {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local").setAppName("WordCount")
    conf .set("spark.testing.memory","2147480000")
    val sc: SparkContext = new SparkContext(conf)
    val data: RDD[String] = sc.textFile("spark-core/data/word.txt")
    val worlds: RDD[String] = data.flatMap(_.split(" "))
    val worldToOne: RDD[(String, Int)] = worlds.map((_, 1))
    /* 第一种 groupBy */
    //println(worldToOne.groupBy(_._1).map(a => (a._1, a._2.size)).collect().mkString(" , "))

    /* 第二种 groupByKey */
    //println(worldToOne.groupByKey().map(x => (x._1, x._2.size)).collect().mkString(" , "))

    /* 第三种 aggregateByKey */
    //println(worldToOne.aggregateByKey(0)(_ + _, _ + _).collect().mkString(" , "))

    /* 第四种 foldByKey */
    //println(worldToOne.foldByKey(0)(_ + _).collect().mkString(" , "))

    /* 第五种 reduceByKey */
    //println(worldToOne.reduceByKey(_ + _).collect().mkString(" , "))

    /* 第六种 combineByKey */
//    println(worldToOne.combineByKey(v => v,
//      (left: Int, right: Int) => {
//        left + right
//      },
//      (left: Int, right: Int) => {
//        left + right
//      }
//    ).collect().mkString(" , "))

    /* 第七种 countByKey */
    //println(worldToOne.countByKey())

    /* 第八种 countByValue */
    //println(worlds.countByValue())

    /* 第九种 countByValue */
    val acc: MyAcc = new MyAcc
    sc.register(acc,"wordCount")
    worlds.foreach(x=>acc.add(x))
    println(acc.value)
    sc.stop()
  }
}
class MyAcc extends AccumulatorV2[String,mutable.Map[String,Long]]{
  var map:mutable.Map[String,Long] = mutable.Map()
  override def isZero: Boolean = {
    map.isEmpty
  }

  override def copy(): AccumulatorV2[String, mutable.Map[String, Long]] = {
    new MyAcc
  }

  override def reset(): Unit = {
    map.clear()
  }

  override def add(word: String): Unit = {
    val old: Long = map.getOrElse(word, 0)
    map.update(word,old+1)
  }

  override def merge(other: AccumulatorV2[String, mutable.Map[String, Long]]): Unit = {
    val map2: mutable.Map[String, Long] = other.value
    map2.foreach {
      case (word, count) => {
        val oldCnt: Long = map.getOrElse(word, 0)
        map.update(word, oldCnt + count)
      }
    }
//    map.foldLeft(map2)(
//      (left,right)=>{
//        left.getOrElse()
//      }
//    )
  }

  override def value: mutable.Map[String, Long] = {
    map
  }
}