package cn.tiakon.dmp.report

import java.util.Properties

import cn.tiakon.dmp.untils.ContextUtils
import com.typesafe.config.{Config, ConfigFactory}
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}

/**
  * 将 spark sql 处理的结果存储到 mysql
  *
  * @author Tiakon
  *         2018/3/28 17:59
  */
object Sql2JDBC {

  def main(args: Array[String]): Unit = {

    val load: Config = ConfigFactory.load()

    val sc = ContextUtils.getSparkContext()

    val sqlc = new SQLContext(sc)

    //读取 parquet 文件，注册成临时表
    sqlc.read.parquet(load.getString("output.parquet.path")).registerTempTable("logs")

    val counted: DataFrame = sqlc.sql("select count(*) cnt,provincename,cityname from logs group by provincename , cityname")

    //        counted.show()
    //        counted.write.json("D:\\json")
    //        counted.write.partitionBy("provincename","cityname").json("D:\\json")

    val properties = new Properties()
    properties.setProperty("user", load.getString("db.user"))
    properties.setProperty("password", load.getString("db.password"))

    counted.write.mode(SaveMode.Append).jdbc(load.getString("db.url"), "t_logs", properties)

    sc.stop()
  }

}
