package com.bkd.report

import com.bkd.beans.Log
import com.bkd.util.RptUtils
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 广告在在每个地域的投放情况统计
  *
  * 实现方式---Spark Core
  *
  */
object AreaRpt2 {

  def main(args: Array[String]): Unit = {
    //0 校验参数个数
    if(args.length != 2){
      println(
        """
          |com.bkd.report.AreaRpt2
          |参数
          |logInputPath
          |resultOutputPath
        """.stripMargin)
      sys.exit()
    }


    //接受程序参数
    val Array(logInputPath,resultOutputPath) = args

    //创建sparkConf->sparkContext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.setMaster("local[*]")


    //RDD序列化到磁盘 worker与worker之间的数据传输
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

    //conf.registerKryoClasses(Array(classof[Log])

    val sc = new SparkContext(conf)

    sc.textFile(logInputPath)
      .map(_.split(",",-1))
      .filter(_.length >= 85)
      .map(arr => {
        val log = Log(arr)

        val req = RptUtils.caculateReq(log.requestmode, log.processnode)
        val rtb = RptUtils.caculateRtb(log.iseffective, log.isbilling,log.isbid, log.adorderid, log.iswin, log.winprice, log.adpayment)
        val showClick = RptUtils.caculateShowClick(log.requestmode, log.iseffective)

        ((log.provincename,log.cityname),req ++ rtb ++ showClick)
        // (省，地市，媒体，渠道，操作系统，网络类型,...，List(9个指标数据))


      }).reduceByKey((list1,list2) => {
      list1.zip(list2).map(t => t._1+t._2)
    }).map(t => t._1._1+","+t._1._2+","+t._2.mkString(","))
      .saveAsTextFile(resultOutputPath)
    sc.stop()
  }
}
