package dmp.beans.sparkcore

import dmp.beans.Log
import dmp.utils.RptUtil
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
  * author:CN.CDG
  * Date:2019/2/15
  * Time:10:33
  **/
object MediaCt_Disc {
  def main(args: Array[String]): Unit = {
    if(args.length!=3){
      println(
        """
          |参数不合法，退出程序
          |参数：
          |inputPath
          |midInputPath
          |outputPath
        """.stripMargin)
      sys.exit()
    }
    val Array(inputPath,midInputPath,outputPath)=args
    val conf=new SparkConf()
      .setAppName("MediaCt_Disc")
      .setMaster("local[*]")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val sc=new SparkContext(conf)

 /*   val spark=SparkSession
      .builder()
      .config(conf)
      .getOrCreate()
    //读取数据，包含两个文件的数据*/

    val mapData = sc.textFile(midInputPath)
      .map(line =>
    line.split("\t",-1)
      ).filter(_.length>5).map(arr=>(arr(4),arr(1))).collect().toMap

    //将字典数据广播到executor
   val broadCast = sc.broadcast(mapData)

    //读取数据
    sc.textFile(inputPath)
        .map(x=>{
          x.split(",",-1)
        }).filter(_.length>=85)
        .map(Log(_)).filter(log=>{
      !log.appid.isEmpty|| !log.appname.isEmpty
    }).map(log=>{
      var appname=log.appname
      if("".equals(appname)){
        appname=broadCast.value.getOrElse(log.appid,"未知")
      }
      val reqList=RptUtil.caculateReq(log.requestmode,log.processnode)
      val rtbList=RptUtil.caculateRtb(log.iseffective,log.isbilling,log.isbid,log.adorderid,log.iswin,log.winprice,log.adpayment)
      val showClickList=RptUtil.caculateShow(log.requestmode,log.iseffective)
      (appname,reqList++rtbList++showClickList)
    }).reduceByKey((List1,List2)=>{
      List1.zip(List2).map(t=>t._1+t._2)
    }).map(t=>{
      t._1+","+t._2.mkString(",")  //mkString()将List集合转换成字符串
    })
      .saveAsTextFile(outputPath)
    sc.stop()

  }

}
