package com.atbeijing.bigdata.spark.core.rdd.operator.action

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Spark05_Oper_Action {

    def main(args: Array[String]): Unit = {

        val conf = new SparkConf().setMaster("local[*]").setAppName("ActionOperator")
        val sc = new SparkContext(conf)

        // TODO 算子 - 行动

        val rdd : RDD[(String, Int)] = sc.makeRDD(List(
            ("a",1), ("a",1) , ("b",1), ("c",3), ("c",2)
        ),2)
        // TODO countByKey (7 / 10)
        //统计每种key出现次数,不考虑value
        val intToLong: collection.Map[String, Long] = rdd.countByKey()
        println(intToLong)//Map(b -> 1, a -> 2, c -> 2)

        val rdd1 = sc.makeRDD(
            List("Hello", "Hello", "Hello","scala","scala","spark")
        )
        // TODO countByValue (8 / 10)
        //统计相同数据出现次数
        val stringToLong: collection.Map[String, Long] = rdd1.countByValue()
        println(stringToLong)//Map(Hello -> 3, spark -> 1, scala -> 2)


        sc.stop()

    }
}
