package chapter03

import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}

object Test21_reduceByKey {
  def main(args: Array[String]): Unit = {
    val logger = Logger.getLogger("org.apache.spark")
    logger.setLevel(Level.WARN)
    val conf = new SparkConf().setMaster("local[*]").setAppName("partitionBy")
    val sc = new SparkContext(conf)
    val value =
      sc.makeRDD(List(("a", 1), ("b", 2), ("a", 3), ("b", 4)))
    //使用聚合函数
    val value1 = value.reduceByKey((x, y) => x + y)
    //取不同键的对应值的最大值
    val tuple = value.reduceByKey((x, y) => if (x > y) x else y)
    val value2 = value.reduceByKey((x, y) => math.max(x, y))
    println(tuple.collect().toList)
    println(value2.collect().toList)
    println(value1.collect().mkString("Array(", ", ", ")"))
    //wordcount 使用reduceByKey实现wordCount
    val value3 = sc.textFile("input/word.txt")
    println(value3.flatMap(e=>e.split(" ").toList)
      .map(e=>(e,1))
      .reduceByKey(_+_)
      .collect().toList)
  }
}
