package com.zhaosc.spark.core

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import scala.util.Random

object DoubleAggregate {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("DoubleAggregate").setMaster("local")
    val sc = new SparkContext(conf)
    val list = List(
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1),
      new Tuple2("hello", 1))

    val listRdd = sc.parallelize(list);

    val random = new Random;
    listRdd.map(v => {
      val prefix = random.nextInt(3)
      Tuple2(prefix + "_" + v._1, v._2);
    }).reduceByKey(_ + _)
      .map(v=>{
        Tuple2(v._1.split("_")(1),v._2)
      }).reduceByKey(_+_)
      .collect().foreach(println _)
  }
  
}