package cn.zhang.violet.report

import cn.zhang.violet.config.ConfigHandler
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

object ProCityAnalysis {


  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
    conf.setAppName("省市数据分析统计")
    conf.setMaster("local[*]")
    //设置spark程序采用的是序列化方式
    conf.set("spark.serializer","org.apache.spark.serializer.KryoSerializer")

    val sc = new SparkContext(conf)

    val sQLContext = new SQLContext(sc)
    //读取数据
    val dataFrame = sQLContext.read.parquet(ConfigHandler.parquetFilePath)
    //按照需求分析这张表的数据
    dataFrame.registerTempTable("adlog")
    //查询此表
    sQLContext.sql(
      """
        |select provincename ,cityname,count(1) ct from adlog group by provincename,cityname
        |
      """.stripMargin)
      .coalesce(4)
      .write.jdbc(ConfigHandler.url,"orc_report_province",ConfigHandler.props)



      sc.stop()

  }
}
