package cn.sheep.violet.report

import cn.sheep.violet.config.ConfigHandler
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * author: old sheep
  * QQ: 64341393 
  * Created 2018/10/15
  */
object ProCityAnalysis {

    def main(args: Array[String]): Unit = {

        val sparkConf = new SparkConf()
        sparkConf.setAppName("省市数据分布统计-sql")
        sparkConf.setMaster("local[*]")
        // 设置spark程序采用的序列化方式
        sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
        val sc = new SparkContext(sparkConf)

        val sQLContext = new SQLContext(sc)

        // 读取数据
        val dataFrame = sQLContext.read.parquet(ConfigHandler.parquetFilePath)

        // 按照需求分析数据
        dataFrame.registerTempTable("adlogs")

        // 查询这张表
        sQLContext.sql(
            """
              |select provincename, cityname, count(1) ct from adlogs group by provincename, cityname
            """.stripMargin)
            .coalesce(4)
            .write
            //.json("f:/violet/report/json") // 写入到磁盘文件 json
            .jdbc(ConfigHandler.url, "orc_report_province", ConfigHandler.props)

        sc.stop()

    }

}
