package com.bkd.report

import java.util.Properties

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

object ProcityRpt2 {

  def main(args: Array[String]): Unit = {
    if(args.length != 1){
      println(
        """
          |com.bkd.report.ProcityRpt2
          |参数
          |logInputPath
        """.stripMargin)

      sys.exit()
    }

    //制定接受的参数
    val Array(logInputPath) = args
    //创建sparkconf
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.setMaster("local[*]")
    //制定序列化方式
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")


    //创建sparkcontext
    val sc = new SparkContext(conf)
    //获得sqlContext
    val sqlcontext = new SQLContext(sc)
    //读取文件
    val df: DataFrame = sqlcontext.read.parquet(logInputPath)
    //创建临时表
    df.registerTempTable("log")
    val result: DataFrame = sqlcontext.sql("select count(*) cn, provincename,cityname from log group by provincename,cityname")
    //如何写出到数据库
    //加载配置文件
    val load: Config = ConfigFactory.load()
    val props = new Properties()
    props.setProperty("user",load.getString("jdbc.user"))
    props.setProperty("password",load.getString("jdbc.password"))
    result.write.jdbc(load.getString("jdbc.url"),load.getString("jdbc.tableName"),props)
    sc.stop()




  }
}
