package com.doit.day02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo04GroupBy {
  def main(args: Array[String]): Unit = {

       val  conf  = new SparkConf()
             .setMaster("local[*]")
             .setAppName(this.getClass.getSimpleName)
           val sc = SparkContext.getOrCreate(conf)


    val  ls = List(1,2,3,4,5,6)
    val rdd = sc.parallelize(ls)


    rdd.groupBy(e=>{
      if(e%2==0){
        "even"
      }else{
        "odd"
      }
    }).foreach(println)


    rdd.groupBy(e=>e).foreach(println)


    val ls2 = List("a" , "a"  ,"a"  ,"b" , "c" ,"b")
    val rdd2 = sc.parallelize(ls2)

    val grouped: RDD[(String, Iterable[String])] = rdd2.groupBy(e => e)
    grouped.map(tp=>{(tp._1 , tp._2.size)})


    val  arr = Array[(String,String,Double)](
      ("BJ" ,"A" , 100) ,
      ("BJ" ,"A" , 100) ,
      ("BJ" ,"A" , 100) ,
      ("BJ" ,"B" , 100) ,
      ("SH" ,"B" , 100)  ,
      ("SH" ,"A" , 100)
    )

    val rdd3: RDD[(String, String, Double)] = sc.makeRDD(arr)
    val res: RDD[(String, Iterable[(String, String, Double)])] = rdd3.groupBy(tp3 => tp3._1 + ":" + tp3._2)

    res.map(tp=>{
      val cityCategory = tp._1
      val money = tp._2.map(_._3).sum

      (cityCategory , money)
    }).foreach(println)


    val res2: RDD[((String, String), Iterable[(String, String, Double)])] = rdd3.groupBy(tp3 => (tp3._1, tp3._2))

    res2.map(tp=>{
      tp._1._1
      tp._1._2
      tp._2.map(_._3).sum
      tp._2.size
    })






  }

}
