package cn.sheep.dmp.report

import java.util.Properties

import cn.sheep.dmp.utils.Tools
import com.typesafe.config.ConfigFactory
import org.apache.spark.sql.{SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 地域数据分布统计
  * Sheep.Old @ 64341393
  * Created 2018/3/28
  */
object AreaAnalysis2DB {

    def main(args: Array[String]): Unit = {


        val sparkConf = new SparkConf().setAppName("地域数据分布统计")
          .setMaster("local[*]")
          .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

        val sc = new SparkContext(sparkConf)
        val sqlc = new SQLContext(sc)

        // 读取数据
        val dataFrame = sqlc.read.parquet(Tools.load.getString("parquet.path"))

        // 使用SQL
        dataFrame.registerTempTable("logs")

        // 聚合统计
        val result = sqlc.sql(
            """
              |select provincename, cityname, count(*) cnt
              |from logs group by provincename, cityname
            """.stripMargin)

        // 将统计结果写入到数据库中
        val props = new Properties()
        props.setProperty("user", Tools.load.getString("db.user"))
        props.setProperty("password", Tools.load.getString("db.password"))
        result.write.mode(SaveMode.Append).jdbc(Tools.load.getString("db.url"),"report_area",props)

        sc.stop()
    }

}
