package com.rz.spark.report

import com.rz.spark.beans.Log
import com.rz.spark.utils.{JedisPools, RptUtil}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.{SparkConf, SparkContext}

object AppAnalyzeRptV2 {
  def main(args: Array[String]): Unit = {
    if (args.length != 2) {
      println(
        """
          |com.rz.spark.report.AppAnalyzeRptV2
          |参数：
          | 输入路径
          | 输出路径
        """.stripMargin)
    }


    val Array(inputPath, dictFilePath, outputPath) = args

    // 2 创建sparkConf-》sparkContext
    val sparkConf = new SparkConf()
    sparkConf.setAppName(s"${this.getClass.getSimpleName}")
    sparkConf.setMaster("local[*]")
    // RDD 序列化到磁盘 worker与worker之间的数据传输
    sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

    val sc = new SparkContext(sparkConf)

    sc.textFile(inputPath).map(_.split(",", -1)).filter(_.length >= 85)
      .map(Log(_)).filter(log => !log.appid.isEmpty || !log.appname.isEmpty)
      .mapPartitions(itr => {
        val jedis = JedisPools.getJedis

        val parResult = new collection.mutable.ListBuffer[(String, List[Double])]()
        // 遍历分区的所有数据，查询redis（把appname为空的数据进行转换），将结果放入到ListBuffer
        itr.foreach(log => {
          var newAppName = log.appname

          if (StringUtils.isEmpty(log.appname)) {
            newAppName = jedis.get(log.appid)
          }

          val req = RptUtil.calculateReq(log.requestmode, log.processnode)
          val rtb = RptUtil.calculateRtb(log.iseffective, log.isbilling, log.isbid, log.adorderid, log.iswin, log.winprice, log.adpayment)
          val showClick = RptUtil.calculateShowClick(log.requestmode, log.iseffective)

          parResult += ((newAppName, req ++ rtb ++ showClick))
        })
        jedis.close()
        // 将ListBuffer转为迭代器
        parResult.toIterator
      }).reduceByKey((list1, list2) => {
      list1.zip(list2).map(t => t._1 + t._2)
    }).map(t => t._1 + "," + t._2.mkString(","))
      .saveAsTextFile(outputPath)

    sc.stop()
  }


}
