package hfy

import org.apache.spark.{SparkConf, SparkContext}

object optobj {
  def main(agrs:Array[String]):Unit= {
    val conf = new SparkConf().setAppName("EmployeeSalaryAnalysis").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val rdd_1 =sc.parallelize((List(('a',1),('a',2),('b',1),('c',1))))
    val re_rdd_1=rdd_1.reduceByKey((a,b)=>a+b)
    re_rdd_1.collect().foreach(println)
    sc.stop()
  }
}
