package com.offcn.spark.p3

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Auther: BigData-LGW
 * @ClassName: CombineByKey
 * @Date: 2020/12/7 20:44
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object CombineByKey {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
            .setMaster("local[*]")
            .setAppName("CombineByKey")
        val sc = new SparkContext(conf)
        reduceByKey(sc)
        sc.stop()
    }

    def reduceByKey(sc: SparkContext) = {
        val array = sc.parallelize(Array(
            "hello you",
            "hello me",
            "hello you",
            "hello you",
            "hello me",
            "hello you"
        ),2)
        val paris = array.flatMap(_.split("\\s+")).map((_,1))
        val ret = paris.combineByKey(createCombiner,mergeValue,mergeCombiners)
        ret.foreach{
            case (key,count) => {
                println(s"key: ${key}, count: ${count}")
            }
        }
    }
    def createCombiner(num:Int):Int={
        num
    }
    def mergeValue(sumi:Int,num:Int):Int={
        sumi + num
    }
    def mergeCombiners(sum:Int,sumi:Int):Int={
        sum + sumi
    }
}
