package hou.report

import java.util.Properties

import hou.beans.Logs
import hou.config.ConfigHelper
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

/**
  * 统计日志文件中各省市数据分布情况sparksql
  */
object ProCityAnalysis {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer",ConfigHelper.serializer)

    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)

    //读取数据parquet
    val dataframe = sQLContext.read.parquet("E:\\DMP项目\\第一天\\资料PDF\\parquet")

    //注册表
    dataframe.registerTempTable("log")

    //按需求统计各省市分布情况
    val result = sQLContext.sql(
      """
        |select
        |provincename,cityname,count(*) cnt
        |from log
        |group by provincename,cityname
      """.stripMargin)

    result.coalesce(1).write.json("C:\\Users\\44323\\Desktop\\资料PDF\\省市分布")

    result.foreach(println)
//    val props = new Properties()
//    props.setProperty("driver",ConfigHelper.driver)
//    props.setProperty("user",ConfigHelper.user)
//    props.setProperty("password",ConfigHelper.password)
//    result.write.jdbc(ConfigHelper.url,ConfigHelper.table,props)
    sc.stop()

  }
}
