package com.linys.scala.qf.day06_spark

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
  * 其他算子
  */
object ExeciseAggregate_07 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("ExeciseAggregate07").setMaster("local[*]")
    val sc = new SparkContext(conf)

    println("-------------------------------------------------------------------------------------------")
    val initialScores = Array((("1", "011"), 1), (("1", "012"), 1), (("2", "011"), 1), (("2", "013"), 1), (("2", "014"), 1))
    val d1 = sc.parallelize(initialScores)
    d1.map(x => (x._1._1, (x._1._2, 1))).combineByKey(
      (v: (String, Int)) => (mutable.Set[String](v._1), (mutable.Set[Int](v._2))),
      (acc: (mutable.Set[String], mutable.Set[Int]), v: (String, Int)) => (acc._1 + v._1, acc._2 + v._2),
      (acc1: (mutable.Set[String], mutable.Set[Int]), acc2: (mutable.Set[String], mutable.Set[Int])) => (acc1._1 ++ acc2._1, acc1._2 ++ acc2._2)
    ).collect().foreach(println)
    println("-------------------------------------------------------------------------------------------")

    sc.stop

  }

}
