package com._51doit.spark02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

//val d1 = Array(("bj",28.1), ("sh",28.7), ("gz",32.0), ("sz", 33.1))
//val d2 = Array(("bj",27.3), ("sh",30.1), ("gz",33.3))
//val d3 = Array(("bj",28.2), ("sh",29.1), ("gz",32.0), ("sz", 30.5))


object AugWd {

  def main(args: Array[String]): Unit = {

    val d1 = Array(("bj",28.1), ("sh",28.7), ("gz",32.0), ("sz", 33.1))
    val d2 = Array(("bj",27.3), ("sh",30.1), ("gz",33.3))
    val d3 = Array(("bj",28.2), ("sh",29.1), ("gz",32.0), ("sz", 30.5))

    val conf: SparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName(this.getClass.getSimpleName)
    val sc: SparkContext = new SparkContext(conf)

    val arr: Array[(String, Double)] = d1++d2++d3

    val res1: RDD[(String, Double)] = sc.makeRDD(arr)

    val res2: RDD[(String, Iterable[Double])] = res1.groupByKey()

    //res2.foreach(println)


  val res3: RDD[(String, Array[Double])] = res1.mapValues(Array(_))

    val res4: RDD[(String, Array[Double])] = res3.reduceByKey(_++_)

    val res5: RDD[(String, Double)] = res4.mapValues(tr=>tr.sum/tr.size)

    res5.foreach(println)
    sc.stop()





  }





}
