package com.need3

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{DoubleType, StringType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, Row, SQLContext}

import scala.collection.mutable.ListBuffer

/**
  * Created by zhuang on 2018/3/4.
  */
object DealAppName extends App {
  //屏蔽日志
  Logger.getLogger("org").setLevel(Level.WARN)
  private val load: Config = ConfigFactory.load()
  val conf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getSimpleName)
    .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
  val sc = new SparkContext(conf)
  //拿到sqlcontext对象，为了转换能parque文件
  val context: SQLContext = new SQLContext(sc)
  //隐式转换导入
  import context.implicits._

  //读取APP文件
  private val file: RDD[String] = sc.textFile(load.getString("AppNameById"))
  //开始处理App文件
  //用来存储id和对应的appname
  private val idRlues: Map[String, String] = file.map(_.split("\t")).filter(_.length >= 5).map(t => (t(4), t(1))).collect.toMap

  //错误代码,这种方式不会收集到driver端，而只是在executor端放入，而且每个executor端的map都不是一个
  /* private var idRlues: Map[String, String] = Map[String, String]()
   file.foreach(t => {
     //val replace = t.replace("\\s"," ")
     val split = t.split("\t", -1)
     if (split.length >= 5) {
       idRlues += (split(4) -> split(1))
     }
   })*/
  //广播变量
  private val bc = sc.broadcast(idRlues)
  //读取文件
  private val parquet: DataFrame = context.read.parquet(load.getString("DataForParquet"))
  //处理数据
  private val map: RDD[(String, List[Double])] = parquet.map(t => {
    //拿应用id
    var appid = t.getAs[String]("appid")
    //拿应用名称
    var appname = t.getAs[String]("appname")
    //开始处理id和appname的对应关系
    val mp = bc.value
    val appnamed = if (appname.equals("")) {
      if (!appid.equals("")) {
        //因为这个文件里面的appid也有可能是空的
        mp.getOrElse(appid, appid)
      } else {
        "未知"
      }
    } else {
      appname
    }
    var requestmode = t.getAs[Int]("requestmode")
    var processnode = t.getAs[Int]("processnode")
    var iseffective = t.getAs[Int]("iseffective")
    var isbilling = t.getAs[Int]("isbilling")
    var isbid = t.getAs[Int]("isbid")
    var iswin = t.getAs[Int]("iswin")
    var adorderid = t.getAs[Int]("adorderid")
    //根据计算逻辑表格，拿出价格，下面使用
    val winprice = t.getAs[Double]("winprice")
    val adpayment = t.getAs[Double]("adpayment")
    //定义一个list用于聚合
    //list（原始请求,有效请求,广告请求,参与竞价数,竞价成功数,展示量,点击量,广告成本,广告消费）
    var list = ListBuffer[Double]()
    //开始判断
    if (requestmode == 1 && processnode >= 1) list.append(1) else list.append(0)
    if (requestmode == 1 && processnode >= 2) list.append(1) else list.append(0)
    if (requestmode == 1 && processnode == 3) list.append(1) else list.append(0)
    if (iseffective == 1 && isbilling == 1 && isbid == 1 && adorderid != 1) list.append(1) else list.append(0)
    if (iseffective == 1 && isbilling == 1 && iswin == 1) list.append(1) else list.append(0)
    if (requestmode == 2 && iseffective == 1) list.append(1) else list.append(0)
    if (requestmode == 3 && iseffective == 1) list.append(1) else list.append(0)
    if (iseffective == 1 && isbilling == 1 && iswin == 1) list.append(winprice / 1000) else list.append(0)
    if (iseffective == 1 && isbilling == 1 && iswin == 1) list.append(adpayment / 1000) else list.append(0)
    (appnamed, list.toList)
  })
  private val key: RDD[(String, List[Double])] = map.reduceByKey({
    (list1, list2) => list1.zip(list2).map(t => t._1 + t._2)
  })
  private val map1 = key.map(t => {
    Row(t._1, t._2(0), t._2(1), t._2(2), t._2(3), t._2(4), t._2(5), t._2(6), t._2(7), t._2(8))
  })
  var schema =
    StructType(
      List(
        StructField("媒体类别", StringType),
        StructField("原始请求", DoubleType),
        StructField("有效请求", DoubleType),
        StructField("广告请求", DoubleType),
        StructField("参与竞价数", DoubleType),
        StructField("竞价成功数", DoubleType),
        StructField("展示量", DoubleType),
        StructField("点击量", DoubleType),
        StructField("广告成本", DoubleType),
        StructField("广告消费", DoubleType)
      ))
  private val df = context.createDataFrame(map1, schema)
  df.rdd.foreach(println)
  sc.stop()
}
