package com.wei.violet.app.media

import com.wei.violet.config.ConfigHandle
import com.wei.violet.utils.{BaseDataKpi, Jpools}
import org.apache.commons.lang.StringUtils
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

object SparkCoreRedisMedia {

  Logger.getLogger("org").setLevel(Level.WARN)
  def main(args: Array[String]): Unit = {
     val conf: SparkConf = new SparkConf()
      .setAppName("将终端设备统计结果添加到mysql数据库中")
      .setMaster("local[*]")
      .set("spark.serizlizer", "org.apache.spark.serializer.KryoSerializer")

    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)
    val dataFrame: DataFrame = sQLContext.read.parquet("F:\\bigdataFile\\parquet")

    val basedata = dataFrame.mapPartitions(m=>{
      val jedis=Jpools.getJedis
      val iter = m.map(d => {
        var appname = d.getAs[String]("appname")
        val appid = d.getAs[String]("appid")

        if (StringUtils.isEmpty(appname)) {
          if (StringUtils.isNotEmpty(appid)) {

            appname = jedis.hget("appdict", appid)
            if(StringUtils.isEmpty(appname))
                 appname="未知"

          } else {

            appname = "未知"
          }

        }

        (appname, BaseDataKpi(d))
      })

      jedis.close()
      iter



    })

    val media=basedata.map(d=>((d._1),(d._2))).reduceByKey((a, b) => a.zip(b).map(t => t._1 + t._2))
    import sQLContext.implicits._
    val result = media.map(x => {
      (x._1, x._2(0).toInt, x._2(1).toInt, x._2(2).toInt, x._2(3).toInt, x._2(4).toInt, x._2(5).toInt, x._2(6).toInt, x._2(7), x._2(8))
    }).toDF("appname", "InitialRequest", "EffectiveRequest", "ADRequests", "BiddingCount", "BiddingSuccess", "ADShow", "ADHits", "DSPADExpense", "DSPADCosting")
    result.write.mode(SaveMode.Overwrite).jdbc(
      ConfigHandle.url,
      "sparkcoreMediaRedis",
      ConfigHandle.conn
    )

    sc.stop()

  }

}
