package com.bkd.report

import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

object ProcityRpt {

  def main(args: Array[String]): Unit = {
    if(args.length != 2){
      println(
        """
          |com.bkd.report.ProcityRpt
          |参数
          |logInputPath
          |resultOutputPath
        """.stripMargin)
      sys.exit()
    }

    //制定接受的参数
    val Array(logInputPath,resultOutputPath) = args
    //创建sparkconf
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.setMaster("local[*]")
    //制定序列号方式
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

    //创建sparkcontext
    val sc = new SparkContext(conf)
    //获得sqlContext
    val sqlcontext = new SQLContext(sc)
    //读取文件
    val df: DataFrame = sqlcontext.read.parquet(logInputPath)

      //使用sparksql的方式进行分析
    df.registerTempTable("log")
    //sqlcontext制定sql查询
    val result: DataFrame = sqlcontext.sql("select count(*) cn,provincename,cityname from log group by provincename,cityname")

    //输出到文件
    result.coalesce(1).write.json(resultOutputPath)

    sc.stop()


  }
}
