package com.cqk.bigdata.spark

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author: cqk
 * @date: 2018/4/13 9:43
 * @param null
*/
object Test {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("Test").setMaster("local[*]")
    val sc= new SparkContext(conf)
    val data = Array(1, 2, 3, 4, 5,6,7,8,9,0,
      1,2,3,4,5,6,7,8,9,0,
      1,3,5,7,9,
      2,4,6,8)
    val distData = sc.parallelize(data)
   // distData.collect().foreach(println)
    val linx=distData.map(x=>(x,1))
    //linx.collect().foreach(println)
    val counts=linx.reduceByKey((a,b)=>a+b)
    counts.collect().foreach(println)
  }
}
