package hou.report

import java.util.Properties
import hou.utils.ReprotKPi
import hou.beans.AreaResult
import hou.config.ConfigHelper
import org.apache.spark.sql.{Row, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

//用sparkCore的方式分析地区统计
object AreaAnalysisCore {
  def main(args: Array[String]): Unit = {
    //sparkContet
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName(s"${this.getClass.getName}")
    conf.set("spark.serializer", ConfigHelper.serializer)
    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)

    //读取数据
    val dataFrame = sQLContext.read.parquet(ConfigHelper.parquetPath)
    //分析数据
    var frame = dataFrame.map(row => {
      val ReporKpiResult = ReprotKPi.ReporKpi(row)


      val pname = row.getAs[String]("provincename")
      val cname = row.getAs[String]("cityname")

      ((pname, cname), ReporKpiResult)
    }).reduceByKey((list1, list2) => list1 zip list2 map (tp => tp._1 + tp._2))

    //存储数据
    import sQLContext.implicits._
    val fram = frame.map(row => AreaResult(row._1._1, row._1._2, row._2(0), row._2(1), row._2(2), row._2(3), row._2(4), row._2(5), row._2(6), row._2(7), row._2(8))).toDF()
    fram.coalesce(1).write.mode(SaveMode.Overwrite).json("C:\\Users\\44323\\Desktop\\资料PDF\\地区指标统计")
    val props = new Properties()
    props.setProperty("driver", ConfigHelper.driver)
    props.setProperty("user", ConfigHelper.user)
    props.setProperty("password", ConfigHelper.password)
    fram.write.mode(SaveMode.Overwrite).jdbc(ConfigHelper.url, ConfigHelper.table, props)
    //释放资源
    sc.stop()
  }

}
