package cn.dmp.report

import cn.dmp.util.AdApi
import org.apache.commons.lang.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2018/4/23.
  */
object RepAppAnalyze {
  def main(args: Array[String]): Unit = {
    if (args.length != 3){
      println(
        """
          |dataInputPath,InputPath1,outputPath2....
          |参数不合法，请输入正确参数个数：dataInputPath
        """.stripMargin)
      sys.exit(1)
    }
    val conf = new SparkConf()
      .setMaster("local[*]").setAppName("RepAppAnalyze")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)  //spark离线job的入口

    //输入参数 （模糊匹配）
    val Array(dataInputPath,dataInputPath2,dataInputPath3)=args

    val sQLContext = new SQLContext(sc)
    val dataFrame = sQLContext.read.parquet(dataInputPath)

    //读取字典文件 并提取出app
    val file = sc.textFile(dataInputPath2)
    val collect = file.map(t => t.split("\t", -1)).filter(_.length >= 5)
      .map(line => (line(4), line(1))).collect().toMap //切分 过滤 提取关键字段 收集到driver端 转换成map
    //收集到本地才可以广播出去
    //广播collect
   val broadcast = sc.broadcast(collect)

    val map1= dataFrame.map({
      t =>
        val appid = t.getAs[String]("appid")
        var appname = t.getAs[String]("appname")

        if (StringUtils.isEmpty(appname)) {
          if (StringUtils.isNotEmpty(appid)) {
            appname = broadcast.value.getOrElse(appid,appid)
          } else appname == "未知"
        }
        (appname, AdApi(t)._2)
    }).reduceByKey((list1,list2)=>list1.zip(list2).map(t=>t._1+t._2))
      .map(t=>t._1+","+t._2.mkString(","))
    //.foreach(println)
    .saveAsTextFile(dataInputPath3)
    sc.stop()
  }
}
