package dmp.beans.sparksql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * author:CN.CDG
  * Date:2019/2/14
  * Time:14:20
  * 统计省市结果，将结果存成json文件
  **/
object LocalTestV2_Disc {
  def main(args: Array[String]): Unit = {
    if(args.length!=2){
      println(
        """
          |输入的参数不合法
          |inputPath
          |outputPath
          |程序退出
        """.stripMargin)
      sys.exit()
    }
    val Array(inputPath,outputPath)=args
    val conf=new SparkConf()
      .setAppName("LocalTestV2")
      .setMaster("local[*]")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val spark=SparkSession
      .builder()
      .config(conf)
      .getOrCreate()
    val dfData: DataFrame = spark.read.parquet(inputPath)
    dfData.createOrReplaceTempView("temp")
    val dfReault: DataFrame = spark.sql(
      """
select provincename,cityname,count(*)as ct from temp group by provincename,cityname
      """.stripMargin)
    dfReault.coalesce(1).write.json(outputPath)
    spark.stop()
  }
}
