package com.linys.scala.qf.day06_spark

import org.apache.spark.{SparkConf, SparkContext}

/**
  * combineByKey
  * 第一个参数: 拿到分区内的第一个元素，并按照你给的函数进行返回相应的类型
  * 第二个参数：局部聚合
  * 第三个参数：全局聚合
  */
object ExeciseAggregate_04 {

  def func1(index: Int, iter: Iterator[(Int)]) : Iterator[String] = {
    iter.toList.map(x => "[partID:" +  index + ", val: " + x + "]").iterator
  }

  def func2(index: Int, iter: Iterator[(String)]) : Iterator[String] = {
    iter.toList.map(x => "[partID:" +  index + ", val: " + x + "]").iterator
  }

  def func3(index: Int, iter: Iterator[(String, Int)]) : Iterator[String] = {
    iter.toList.map(x => "[partID:" +  index + ", val: " + x + "]").iterator
  }

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("ExeciseAggregate").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val rdd1 = sc.parallelize(List("JAVA C# SCALA SCALA JAVA JAVA", "C++ SCALA JAVA JAVA", "C++ SCALA JAVA PYTHON", "SCALA")).flatMap(_.split(" ")).map((_, 1))
    println(rdd1.collect.toBuffer)
    val rdd2 = rdd1.combineByKey(
      x => x,
      (a: Int, b: Int) => a + b,
      (m: Int, n: Int) => m + n)
    println(rdd2.collect.toBuffer)
    println(rdd2.partitions.length)

    //val rdd3 = rdd1.combineByKey(x => x + 10, (a: Int, b: Int) => a + b, (m: Int, n: Int) => m + n)
    //println(rdd3.collect.toBuffer)


    println("-----------求平均成绩-----------")
    val rdd8 = sc.parallelize(Array(("tom", 88),("dog", 95),("cat", 99),("tom", 95),("tom", 100),("cat", 90),("tom", 92),("cat", 94),("dog", 97)))
    println(rdd8.collect.toBuffer)
    val rdd9 = rdd8.combineByKey(
      score => (1, score),
      (x: Tuple2[Int, Int], y: Int) => (x._1 + 1, x._2 + y),
      (m: Tuple2[Int, Int], n: Tuple2[Int, Int]) => (m._1 + n._1, m._2 + n._2))
    println(rdd9.collect.toBuffer)
    val rdd10 = rdd9.map{case (name, (num, scores)) => (name, scores / num)}
    println(rdd10.collect.toBuffer)

    val rdd11 = rdd8.combineByKey(
      score => (score, 1),
      (x: Tuple2[Int, Int], y: Int) => (x._1 + y, x._2 + 1),
      (m: Tuple2[Int, Int], n: Tuple2[Int, Int]) => (m._1 + n._1, m._2 + n._2))
    println(rdd11.collect.toBuffer)

    println("----------------------")
    val rdd4 = sc.parallelize(List("dog","cat","gnu","salmon","rabbit","turkey","wolf","bear","bee"), 3)
    val rdd5 = sc.parallelize(List(1,1,2,2,2,1,2,2,2), 3)
    val rdd6 = rdd5.zip(rdd4)
    println(rdd6.collect.toBuffer)
    val rdd7 = rdd6.combineByKey(
      List(_),
      (x: List[String], y: String) => x :+ y,
      (m: List[String], n: List[String]) => m ++ n)
    println(rdd7.collect.toBuffer)

    val l1 = List(1,2,3,5)
    val l2 = List(1,2,3,8)
    val l3 = l1 ++ l2
    val l4 = l3 :+ 23 :+ 34
    println(l4.toBuffer)

    sc.stop()

  }

}
