package cn.dmp.report

import java.util.Properties

import com.typesafe.config.ConfigFactory
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 统计各省市的数据分布情况
  */

case class PReport(provinceName: String, cityName: String, ct: Int)

object ProAnalyse {

    def main(args: Array[String]): Unit = {

        // 检验参数
        if(args.length !=2) {
            println(
                """
                  |cn.dmp.report.ProAnalyse
                  |参数：<dataInputPath> <outPutPath>
                """.stripMargin)
            sys.exit()
        }


        val config = ConfigFactory.load()

        // 接受参数
        val Array(dataInputPath, outPutPath) = args

        // 设置job所需的参数 sparkconf
        val sparkConf = new SparkConf()
        sparkConf.setAppName(s"${this.getClass.getSimpleName}")
        sparkConf.setMaster("local[*]")

        // sparkcontext
        val sc = new SparkContext(sparkConf)
        val sQLContext = new SQLContext(sc)

        // 读取数据 -> parquet
        val dataFrame = sQLContext.read.parquet(dataInputPath)

        // 处理数据
        val reduced = dataFrame.map(row => {
            val pname = row.getAs[String]("provincename")
            val cname = row.getAs[String]("cityname")
            ((pname, cname), 1)
        }).reduceByKey(_ + _)


        import sQLContext.implicits._
        // 存储数据
        val df = reduced.map(t => PReport(t._1._1, t._1._2, t._2)).toDF
        // reduced.map(t => (t._1._1, t._1._2, t._2)).toDF("", "", "")

        // df.write.json(outPutPath)

        val props = new Properties()
        props.setProperty("user", config.getString("mysql.user"))
        props.setProperty("password", config.getString("mysql.password"))

        df.write.jdbc(
            config.getString("mysql.url"),
            "PReport_28",
            props
        )

        // 关闭上下文对象
        sc.stop()
    }

}
