package cn.zhang.violet.report

import cn.zhang.violet.bean.ProCity
import cn.zhang.violet.config.ConfigHandler
import com.google.gson.Gson
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

object ProCityAnalysisCore {


  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
    conf.setAppName("省市数据分析统计--core/")
    conf.setMaster("local[*]")
    //设置spark程序采用的是序列化方式
    conf.set("spark.serializer","org.apache.spark.serializer.KryoSerializer")

    val sc = new SparkContext(conf)

    val sQLContext = new SQLContext(sc)
    //读取数据
    val dataFrame = sQLContext.read.parquet(ConfigHandler.parquetFilePath)

    val value = dataFrame.map(str => {
      val privName = str.getAs[String]("provincename")
      val cityname = str.getAs[String]("cityname")
      ((privName, cityname), 1)
    })
    value.reduceByKey(_ + _).map(str =>{
      val gson = new Gson()
      gson.toJson(ProCity(str._1._1,str._1._2,str._2))
      ProCity(str._1._1,str._1._2,str._2)
    }).saveAsTextFile("G:\\data\\log\\output1")

      sc.stop()
  }
}
