package cn.sheep.dmp.report

import com.typesafe.config.ConfigFactory
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 地域数据分布统计
  * Sheep.Old @ 64341393
  * Created 2018/3/28
  */
case class ReportArea(pName: String, cName: String, cnt: Int)

object AreaAnalysisCore {

    def main(args: Array[String]): Unit = {

        val load = ConfigFactory.load()

        val sparkConf = new SparkConf().setAppName("地域数据分布统计")
          .setMaster("local[*]")
          .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

        val sc = new SparkContext(sparkConf)
        val sqlc = new SQLContext(sc)

        // 读取数据
        val dataFrame = sqlc.read.parquet(load.getString("parquet.path"))

        import sqlc.implicits._

        // core -> ((sheng,shi), value)
        dataFrame.map(row => ((row.getAs[String]("provincename"), row.getAs[String]("cityname")), 1))
          .reduceByKey(_ + _)
          .map(tp => ReportArea(tp._1._1, tp._1._2, tp._2))
          .toDF().write.json("F:\\dmp\\report_area1")


        sc.stop()
    }

}
