package com.fwmagic.spark.core.action

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 对(K,V)类型的RDD有效，返回一个(K,Int)对的map，表示每一个可以对应的元素个数
  */
object CountByKeyDemo {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                .setMaster("local[*]")

        val sc = new SparkContext(conf)

        val rdd1 = sc.makeRDD(List("spark,flink,spark,flink,scala,flink"))

        val flatMaped: RDD[String] = rdd1.flatMap(_.split(","))

        val maped: RDD[(String, Int)] = flatMaped.map((_, 1))

        //Map(flink -> 3, spark -> 2, scala -> 1)
        val counted: collection.Map[String, Long] = maped.countByKey()

        println(counted)

        sc.stop()
    }
}
