package dmp.beans.sparksql

import java.util.Properties

import com.typesafe.config.ConfigFactory
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

import scala.tools.cmd.Property

/**
  * author:CN.CDG
  * Date:2019/2/14
  * Time:14:58
  **/
object LocalTestV3_Mysql {
  def main(args: Array[String]): Unit = {
    if(args.length!=1){
      println(
        """
          |输入的参数不合法
          |inputPath
          |程序退出
        """.stripMargin)
      sys.exit()
    }
    val Array(inputPath)=args
    val conf=new SparkConf()
      .setAppName("LocalTestV2")
      .setMaster("local[*]")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val spark=SparkSession
      .builder()
      .config(conf)
      .getOrCreate()
    val dfData: DataFrame = spark.read.parquet(inputPath)
    dfData.createOrReplaceTempView("temp")
    val dfReault: DataFrame = spark.sql(
      """
      |select provincename,cityname,count(*)as ct from temp group by provincename,cityname
      """.stripMargin)
    val configFactory = ConfigFactory.load()
    val props=new Properties()
    props.setProperty("user",configFactory.getString("jdbc.user"))
    props.setProperty("password",configFactory.getString("jdbc.password"))

    dfReault.write.jdbc(configFactory.getString("jdbc.url"),configFactory.getString("jdbc.tablename"),props)
    spark.stop()
  }
}
