package com.oracle.violet.first.report

import com.google.gson.Gson
import com.oracle.violet.first.bean.ProCity
import com.oracle.violet.first.config.ConfigHandler
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

object ProCityAnalyCore {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("省市分布统计core")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(sparkConf)
    val sQLContext = new SQLContext(sc)
    //读取数据
    val dataFrame: DataFrame = sQLContext.read.parquet(ConfigHandler.parquetFilePath)
    dataFrame.map(srt=>{
      val pname = srt.getAs[String]("provincename")
      val cname = srt.getAs[String]("cityname")

      ((pname,cname),1)
    })
        .reduceByKey(_ + _)
          //.map(tp => tp._1._1+"\t"+tp._1._2+"\t"+tp._2)
          .map(tp => {
          val gson = new Gson()
          gson.toJson(ProCity(tp._1._1, tp._1._2, tp._2))
          ProCity(tp._1._1, tp._1._2, tp._2)
        })/*.toDF().write.jdbc()*/
          .saveAsTextFile("H:\\甲骨文培训\\大数据\\Linux\\大纲\\项目二\\VIOLET\\report\\core")
  }

}
