package com.bigdata.core.action

import org.apache.spark.{SparkConf, SparkContext}

/**
 * countByValue
 * 计数RDD中相同的value 出现的次数,不必须是K,V格式的RDD
 * 这里的value不是K,V中的v, 而是value, 也就是KV整体, 可以对非kv格式的RDD做计算
 */
object Demo9_countByValue {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("countByKey").setMaster("local")
    val sc = new SparkContext(conf)
    sc.setLogLevel("error")


    val rdd = sc.makeRDD(List[(String, Integer)](("a", 1), ("a", 1),
      ("a", 1000), ("b", 2), ("b", 200), ("c", 3), ("c", 3)))

    val result1: collection.Map[(String, Integer), Long] = rdd.countByValue()
    result1.foreach(println)

    println("=" * 100)
    val rdd1 = sc.makeRDD(List[String]("zs", "ls", "zs", "ls", "tq", "nicai"))
    val result2: collection.Map[String, Long] = rdd1.countByValue()
    result2.foreach(println)

  }
}
