package report

import java.util.Properties

import Configer.Config
import bean.ProCityClass
import com.google.gson.Gson
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

//统计各省市数据分布情况  sparkCore
object ProCityTotalCore {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer",Config.serializer)
    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)

    //读取数据
    val dataFrame = sQLContext.read.parquet(Config.parquetPath)
    //分析数据(pro  city  count)
    val result = dataFrame.map(row => {
      val pname = row.getAs[String]("provincename")
      val cname = row.getAs[String]("cityname")
      ((pname, cname), 1)
    }).reduceByKey(_ + _)
    result

    //存储数据
    import sQLContext.implicits._
    val frame = result.map(tp=>(tp._1._1,tp._1._2,tp._2)).toDF("pname","cname","count")
    frame.coalesce(1).write.json("C:\\Users\\44323\\Desktop\\资料PDF\\json1")

    result.map(row=>{
      val gson = new Gson()
      gson.toJson(ProCityClass(row._1._1,row._1._2,row._2))
    }).coalesce(1).saveAsTextFile("C:\\Users\\44323\\Desktop\\资料PDF\\json2")

    val props = new Properties()
    props.setProperty("driver",Config.driver)
    props.setProperty("user",Config.user)
    props.setProperty("password",Config.password)
    frame.write.jdbc(Config.url,Config.table,props)

    //释放资源
    sc.stop()
  }
}
