package com.itcast.report

import com.itcast.beans.ReportAreaAnalysis
import com.itcast.utils.{ConfigHandler, MysqlHandler, RptKpiTools}
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

//地域分布情况（core）
//sql方式
object RptAreaAnalysis {
  def main(args: Array[String]): Unit = {
    //创建sparkConf
    val sparkConf = new SparkConf()
      .setAppName("LogDataAnalysis")
      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    //创建sparkContext
    val sparkContext = new SparkContext(sparkConf)
    //创建sqlContext
    val sQLContext = new SQLContext(sparkContext)
    //读取数据
    val rawDataFrame = sQLContext.read.parquet(ConfigHandler.parquetPath)
   import sQLContext.implicits._
    val resultDF = rawDataFrame.map(row => {
     val pname = row.getAs[String]("provincename")
     val cname = row.getAs[String]("cityname")
      ((pname,cname),RptKpiTools.offLineKpi(row) )
   }).reduceByKey((list1,list2)=>list1 zip list2 map(tp => tp._1+tp._2))
      .map(rs => ReportAreaAnalysis(rs._1._1,rs._1._2,rs._2(0),rs._2(1),rs._2(2),rs._2(3),rs._2(4),rs._2(5),rs._2(6),rs._2(7),rs._2(8)))
        .toDF

    MysqlHandler.save2db(resultDF,ConfigHandler.areatable)



    sparkContext.stop()

  }

}
