package cn.pengpeng.dmp.report

import cn.pengpeng.dmp.utils.ConfigHandler
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
  * 统计各省市的数据
  */
object LogDataAnalysis {
  def main(args: Array[String]): Unit = {

    val inputPath = "d:\\data\\spark\\out1"

    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("logDataAnalysis")
    val spark: SparkSession = SparkSession
      .builder()
      .config(conf)
      //.config("spark.debug.maxToStringFields", "100")
      .getOrCreate()

    //读数据
    val parquet: DataFrame = spark.read.parquet(inputPath)
    //注册成为临时表
    parquet.createTempView("log")

    val result: DataFrame = spark.sql(
      """
        |select count(*) ct,provincename,cityname
        |from log group by provincename,cityname
      """.stripMargin)

    //将结果写成json数据格式
    //coalesce(n) 重新分区，
    result.coalesce(1).write.mode(SaveMode.Overwrite).json(ConfigHandler.logdataAnalysisResultJsonPath)

    //释放资源
    spark.stop()

  }

}
