package hou.report

import java.util.Properties

import hou.beans.AreaResult
import hou.config.ConfigHelper
import hou.utils.{JedisPools, ReprotKPi}
import org.apache.commons.lang.StringUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, SaveMode}

object AppCoreRedis {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer",ConfigHelper.serializer)

    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)

    val dataFrame = sQLContext.read.parquet(ConfigHelper.parquetPath)


    val result = dataFrame.mapPartitions(partition=>{
      val jedis = JedisPools.getRedis(1)
      val ersultIter = partition.map(row=>{
        var appName = row.getAs[String]("appname")
        val appId = row.getAs[String]("appid")

        if (StringUtils.isEmpty(appName)){
          if (StringUtils.isNotEmpty(appId)){
            appName = jedis.hget("appdict",appId)
            if (StringUtils.isEmpty(appName)) appName = appId
          }else{
            appName = "moruanjian"
          }
        }
        (appName,ReprotKPi.ReporKpi(row))
      })
      jedis.close()
      ersultIter
    }).reduceByKey((list1,list2)=>list1 zip list2 map(tp=>tp._1+tp._2))


    import sQLContext.implicits._
    var frame = result.map(row=>(row._1, row._2(0), row._2(1), row._2(2), row._2(3), row._2(4), row._2(5), row._2(6), row._2(7), row._2(8)))
      .toDF("appname","ss","dd","aa","ff","ssw","ds","das","da","fadas")

    val props = new Properties()
    props.setProperty("driver", ConfigHelper.driver)
    props.setProperty("user", ConfigHelper.user)
    props.setProperty("password", ConfigHelper.password)
    frame.write.mode(SaveMode.Overwrite).jdbc(ConfigHelper.url, ConfigHelper.table, props)
    sc.stop()
  }
}
