package com.max.spark.postgre

import com.max.spark.utils.{ConfUtil, LogUtil, PropsUtil, SparkUtil}
import org.apache.spark.sql.{DataFrame, DataFrameReader, SaveMode, SparkSession}

object SparkGPApp {

  val confPath = "conf/pg.properties"
  val hostname = ConfUtil.getProperty(confPath, "pg.hostname")
  val port = ConfUtil.getProperty(confPath, "pg.port")
  val username = ConfUtil.getProperty(confPath, "pg.username")
  val password = ConfUtil.getProperty(confPath, "pg.password")
  val driver = ConfUtil.getProperty(confPath, "pg.driver")
  val db = ConfUtil.getProperty(confPath, "pg.db")
  val table = ConfUtil.getProperty(confPath, "pg.table")

  // 添加属性 properties
  val properties = PropsUtil.getProps(driver, username, password)
  val url = s"jdbc:postgresql://$hostname:$port/$db"
  // 添加属性 map
  val prop = PropsUtil.getPropsByMap("postgresql", driver, hostname, port, username, password, db, table)


  def main(args: Array[String]): Unit = {
    LogUtil.info(s"[url]: $url")
    val spark = SparkUtil.sparkSession()
//    readType03(spark)
    write2postgre(spark)
    spark.stop()
  }

  /**
   * 第一种
   * @param spark
   */
  def sparkReadGP01(spark: SparkSession): Unit = {
    val kafka = spark.read.jdbc(url, table, properties)
    kafka.show()
    spark.read.jdbc(url, s"(select * from $table where topic != '') T", properties).show()
  }

  // 读数据：第二种
  def readType02(spark: SparkSession): Unit = {
    val kfkOffset = spark.read.format("jdbc").options(prop).load()
    //读出来之后注册为临时表
    kfkOffset.createOrReplaceTempView("kafka")
    kfkOffset.show()
    //注册好之后就可以通过sql语句查询了
    spark.sql("select * from kafka where partition=1").show()
  }

  // 读数据第三种
  def readType03(spark: SparkSession): Unit = {
    val read: DataFrameReader = spark.read.format("jdbc")
      .option("url", url)
      .option("driver", driver)
      .option("user", username)
      .option("password", password)
      .option("dbtable", table)

    val source2: DataFrame = read.load()
    source2.show()
  }

  // 写入数据
  def write2postgre(spark: SparkSession): Unit ={
    val k = spark.read.jdbc(url, "kafka_offset", properties)
    k.createOrReplaceTempView("k")
    val res = spark.sql("select *  from k where topic != ''")
    res.show()
    // 第一种：将查询出的结果保存到pg表之中
    res.write.mode(SaveMode.Overwrite).jdbc(url,"res", properties)

    // 第二种：将查询出的结果保存到mysql表之中
    //    result.write.format("jdbc").options(prop).mode(SaveMode.Append).save()
  }



}
