package report

import Configer.Configer
import org.apache.commons.lang.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import scalikejdbc.{DB, SQL}
import utils.{JedisPools, KPITotal}

//用redis实现媒体报表
object APPAnalysisRedis {
  def main(args: Array[String]): Unit = {
    //sparkcontext
    val conf = new SparkConf().setAppName(s"${this.getClass.getName}")
      .setMaster("local[*]").set("spark.serializer", Configer.serializer)
    val sc = new SparkContext(conf)
    //sqlcontext
    val sQLContext = new SQLContext(sc)
    //读取数据
    val frame = sQLContext.read.parquet("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\parquet")
    //分析数据
    val result: RDD[(String, List[Double])] = frame.mapPartitions(partition => {
      val jedis = JedisPools.getJedis(4)
      val re1 = partition.map(row => {
        var appName = row.getAs[String]("appname")
        val appId = row.getAs[String]("appid")
        if (StringUtils.isEmpty(appName)) {
          if (StringUtils.isNotEmpty(appId)) {
            appName = jedis.hget("appDict", appId)
            if (StringUtils.isEmpty(appName)) appName = appId
          } else {
            appName = "某app"
          }
        }
        (appName, KPITotal.KPI(row))
      })
      jedis.close()
      re1
    }).reduceByKey((li1,li2)=>li1 zip li2 map(li=>li._1+li._2))
    //存储数据
    result.foreachPartition(patition=>{
      DB.localTx{implicit session=>
        patition.foreach(arr=>{
          SQL("insert into area34 values (?,?,?,?,?,?,?,?,?,?)")
            .bind(arr._1,arr._2(0),arr._2(1),arr._2(2),arr._2(3),arr._2(4),arr._2(7),arr._2(8),arr._2(5),arr._2(6))
            .update().apply()
        })
      }
    })
    //释放资源
    sc.stop()
  }
}
