package report

import java.util.Properties

import Configer.Config
import bean.AreaClass
import org.apache.spark.sql.{DataFrame, Row, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}
import util.KpiPublic

//地区指标统计
object AreaKpi {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer", Config.serializer)

    val sc = new SparkContext(conf)

    val sQLContext = new SQLContext(sc)

    //读取数据
    val dataFrame = sQLContext.read.parquet(Config.parquetPath)

    val value = dataFrame.map(row => {
      val adList = KpiPublic.KpiPublic(row)

      val pname = row.getAs[String]("provincename")
      val cname = row.getAs[String]("cityname")
      ((pname,cname),adList)
    }).reduceByKey((list1,list2)=>list1.zip(list2).map(li=>li._1+li._2))

    import sQLContext.implicits._
    val frame: DataFrame = value.map(row => AreaClass(row._1._1, row._1._2, row._2(0), row._2(1), row._2(2), row._2(3), row._2(4), row._2(7), row._2(8), row._2(5), row._2(6)))
      .toDF()
    frame

    val props = new Properties()
    props.setProperty("driver",Config.driver)
    props.setProperty("user",Config.user)
    props.setProperty("password",Config.password)
    frame.write.mode(SaveMode.Overwrite).jdbc(Config.url,Config.table,props)


    //释放资源
    sc.stop()
  }


}
