package cn.dmp.report

import cn.dmp.utils.AdRptKpi
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

object RptAreaAnalysisCore {


    def main(args: Array[String]): Unit = {
        // 检验参数 -> 配置文件
        if (args.length != 2) {
            println(
                """
                  |cn.dmp.report.RptAreaAnalysisCore
                  |参数：dataInputPath, outputPath
                """.stripMargin)
            sys.exit()
        }

        val Array(dataInputPath, outputPath) = args

        // sparkcontext
        val sparkConf = new SparkConf().setMaster("local[*]").setAppName("地域报表")
        val sc = new SparkContext(sparkConf)

        // 读取数据 - parquet
        val sqlc = new SQLContext(sc)
        // 数据文件 - dataframe
        val parquet = sqlc.read.parquet(dataInputPath)

        // 算子的方式实现 -> 聚合 reduceByKey(对偶元组)  groupByKey
        parquet.map(AdRptKpi(_)).reduceByKey((list1, list2) => {
            list1.zip(list2).map(t => t._1 + t._2)
        }).map(t => t._1._1 + "," +t._1._2 + "," + t._2.mkString(",")).saveAsTextFile(outputPath)

        sc.stop()
    }

}
