package com.spark.cust.lesson

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @description: 输出每一步的结果(保存成文件也可)，今天的作业
 * @time: 2020/10/12 10:55
 * @author: lhy
 */
object Combine {
    def main(args: Array[String]){

        val conf = new SparkConf().setAppName("combine").setMaster("local")
        val sc = new SparkContext(conf)
        val data = sc.parallelize(Array(("company-1",92),("company-1",85),
                                        ("company-1",82),("company-2",78),
                                        ("company-2",96),("company-2",85),
                                        ("company-3",88),("company-3",94),
                                        ("company-3",80)),3)
        val res = data.combineByKey(
            (income) => (income,1),
            (acc:(Int,Int),income) => (acc._1 + income,acc._2 + 1),
            (acc1:(Int,Int),acc2:(Int,Int)) => (acc1._1 + acc2._1,acc1._2 + acc2._2)
        ).map{ case (key, value) => (key, value._1, value._1/value._2.toFloat) }
        res.repartition(1).saveAsTextFile("output/rdd/combine")
    }
}
