package com.dxf.bigdata.D05_spark_again.action

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *  key出现次数
 */
object countByKey {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")
    sparkConf.set("spark.port.maxRetries","100")
    val sc = new SparkContext(sparkConf)

    val value = sc.makeRDD(List(("a",1),("b",2),("a",3)),2)

    val stringToLong: collection.Map[String, Long] = value.countByKey()

    println(stringToLong) // Map(b -> 1, a -> 2)

    sc.stop()

  }

}
