package hou.report

import java.util.Properties

import hou.config.ConfigHelper
import hou.utils.ReprotKPi
import org.apache.commons.lang.StringUtils
import org.apache.spark.sql.{SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

//媒体报表分析
object AppCore {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer",ConfigHelper.serializer)

    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)

    val dataFrame = sQLContext.read.parquet(ConfigHelper.parquetPath)
    val appDict = sc.textFile("C:\\Users\\44323\\Desktop\\资料PDF\\app_dict.txt")

    val mapDict = appDict.map(_.split("\t",-1)).filter(_.length>=5).map(arr=>(arr(4),arr(1))).collectAsMap()
    val value = sc.broadcast(mapDict)

    val result = dataFrame.map(row=>{
      var appName = row.getAs[String]("appname")
      val appId = row.getAs[String]("appid")

      if (StringUtils.isEmpty(appName)){
        if (StringUtils.isNotEmpty(appId)){
          appName = value.value.getOrElse(appId,appId)
        }else{
          appName="某软件"
        }
      }

      (appName,ReprotKPi.ReporKpi(row))
    }).reduceByKey((list1,list2)=>list1 zip list2 map(tp=>tp._1+tp._2))


    import sQLContext.implicits._
    var frame = result.map(row=>(row._1, row._2(0), row._2(1), row._2(2), row._2(3), row._2(4), row._2(5), row._2(6), row._2(7), row._2(8)))
      .toDF("appname","ss","dd","aa","ff","ssw","ds","das","da","fadas")

    val props = new Properties()
    props.setProperty("driver", ConfigHelper.driver)
    props.setProperty("user", ConfigHelper.user)
    props.setProperty("password", ConfigHelper.password)
    frame.write.mode(SaveMode.Overwrite).jdbc(ConfigHelper.url, ConfigHelper.table, props)
    sc.stop()
  }
}
