package com.guchenbo.spark.sql

import org.apache.spark.sql.SparkSession

/**
 * spark jdbc 并行读取
 *
 * @author guchenbo
 * @date 2021/6/25
 */
object JdbcDorisParallel {

  def main(args: Array[String]): Unit = {
    val spark = SparkUtils.sparkSessionTd("Spark Sql")

    read(spark)
  }


  def read(spark: org.apache.spark.sql.SparkSession): Unit = {
    println("读取外表")

    var sql =
      """
        |SELECT *FROM t_user_1200w_dup
        |""".stripMargin

    def makeReader = {
      spark.read
        .format("jdbc")
        .option("url", "jdbc:mysql://10.57.16.196:9030/test?useSSL=false")
        .option("user", "root")
        .option("password", "root")
        .option("driver", "com.mysql.jdbc.Driver")
    }

    var partitionColumn = "gmt_create"
    var maxSql = s"SELECT MAX(maxmin.$partitionColumn) as maxv, MIN(maxmin.$partitionColumn) as minv FROM( $sql ) maxmin"
    println(maxSql)

    var dfr = makeReader
      .option("query", maxSql)

    var df = dfr.load()
    var arr = df.select("maxv", "minv").take(1)(0)
    var upperB = String.valueOf(arr(0))
    var lowerB = String.valueOf(arr(1))
    println(upperB)
    println(lowerB)

    dfr = makeReader
      .option("dbtable", s"($sql) sub")
      .option("numPartitions", 5)
      .option("partitionColumn", partitionColumn)
      .option("upperBound", upperB)
      .option("lowerBound", lowerB)

    df = dfr.load()

    var insertSql =
      """
        |insert overwrite table turing_monitor.monitor_type_138_table_0 select * from (( select mobile as v1,address as v2 from ds_t3132 ) as field_0)
        |""".stripMargin
    df.createOrReplaceTempView("ds_t3132")

    spark.sql(insertSql).show()
  }
}
