package com.guchenbo.spark.sql

import com.guchenbo.spark.sql.SparkJdbcUtils.{SparkJdbcOption, SqlOpt}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.{DataFrameReader, SparkSession}

import scala.util.matching.Regex

/**
 * 监控insert overwrite 改造
 *
 * @author guchenbo
 * @date 2021/6/25
 */
object JdbcReadMonitor {

  def main(args: Array[String]): Unit = {
    var spark = SparkSession.builder().appName("Spark Jdbc Read")
      .master("local[2]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://ark150:9083")
      .config("hive.exec.scratchdir", "/tmp/hive")

      .getOrCreate()
    read(spark)
  }


  def buildSqlOpt(originSql: String) = {
    val viewName = StringUtils.substringAfter(StringUtils.substringBefore(originSql, "as"), "with")
    val idx = StringUtils.indexOf(originSql, "insert overwrite table")
    val insSql = StringUtils.substring(originSql, idx)
    var selSql = StringUtils.substring(originSql, 0, idx)
    selSql = StringUtils.substringBefore(StringUtils.substringAfter(selSql, "("), ")")

    var sqlOpt = new SqlOpt
    sqlOpt.viewName = viewName.trim
    sqlOpt.viewSql = selSql.trim
    sqlOpt.insertSql = insSql.trim
    sqlOpt
  }

  def getUppderLower(dfr: DataFrameReader, partitionColumn: String, sql: String): Tuple2[String, String] = {
    var maxSql = s"SELECT MAX(maxmin.$partitionColumn) as maxv, MIN(maxmin.$partitionColumn) as minv FROM( $sql ) maxmin"
    println(s"use maxmin sql = $maxSql")

    var df = dfr.option("query", maxSql).load()
    var arr = df.select("maxv", "minv").take(1)(0)
    var upperB = String.valueOf(arr(0))
    var lowerB = String.valueOf(arr(1))
    println(s"max = $upperB, min = $lowerB")
    new Tuple2[String, String](upperB, lowerB)
  }

  def read(spark: org.apache.spark.sql.SparkSession): Unit = {
    println("读取外表")

    val sparkJdbcOption = new SparkJdbcOption
    sparkJdbcOption.url = "jdbc:mysql://10.57.16.13:3306/model_manager?useSSL=false"
    sparkJdbcOption.user = "model_paas_manager"
    sparkJdbcOption.password = "Liu_0123456789"
    sparkJdbcOption.driver = "com.mysql.jdbc.Driver"

    var originSql =
      """
        |with ds_t3126 as ( SELECT a.*,b.name as nameb
        |      FROM model_manager.model_info a
        |               LEFT JOIN model_manager.model_form_template b ON a.form_template_uuid = b.uuid )insert overwrite table turing_monitor.monitor_type_134_table_0 select * from (( select name as v1,nameb as v2 from ds_t3126 ) as field_0)
        |""".stripMargin
    var originSql2 =
      """
        |with ds_t3126 as ( SELECT a.*,b.name as nameb
        |      FROM turing.mysql_3126_model_manager_model_info a
        |               LEFT JOIN turing.mysql_3126_model_manager_model_form_template b ON a.form_template_uuid = b.uuid )insert overwrite table turing_monitor.monitor_type_134_table_0 select * from (( select name as v1,nameb as v2 from ds_t3126 ) as field_0)
        |""".stripMargin

    oneReadWrite(spark, sparkJdbcOption, originSql2)

    sparkJdbcOption.partitionColumn = "gmt_create"
//    parallelReadWrite(spark, sparkJdbcOption, originSql)
  }

  /**
   * 并行多分区读取
   *
   * @param spark
   * @param sparkJdbcOption
   * @param originSql
   * @return
   */
  def parallelReadWrite(spark: SparkSession, sparkJdbcOption: SparkJdbcOption, originSql: String) = {
    var sqlOpt = buildSqlOpt(originSql)
    println(s"viewName = ${sqlOpt.viewName}\n viewSql = ${sqlOpt.viewSql}\n insertSql = ${sqlOpt.insertSql}")

    def makeReader = {
      spark.read
        .format("jdbc")
        .option("url", sparkJdbcOption.url)
        .option("user", sparkJdbcOption.user)
        .option("password", sparkJdbcOption.password)
        .option("driver", sparkJdbcOption.driver)
    }

    var partitionColumn = sparkJdbcOption.partitionColumn
    var dfr = makeReader
    var upperLower = getUppderLower(dfr, partitionColumn, sqlOpt.viewSql)
    dfr = makeReader
      .option("dbtable", s"(${sqlOpt.viewSql}) sub")
      .option("numPartitions", 5)
      .option("partitionColumn", partitionColumn)
      .option("upperBound", upperLower._1)
      .option("lowerBound", upperLower._2)

    var df = dfr.load()
    df.createOrReplaceTempView(sqlOpt.viewName)

    sparkSql(spark, sqlOpt.insertSql)
  }

  /**
   * 单个读取
   *
   * @param spark
   * @param sparkJdbcOption
   * @param originSql
   * @return
   */
  def oneReadWrite(spark: SparkSession, sparkJdbcOption: SparkJdbcOption, originSql: String) = {
    var sqlOpt = buildSqlOpt(originSql)
    println(s"viewName = ${sqlOpt.viewName}\n viewSql = ${sqlOpt.viewSql}\n insertSql = ${sqlOpt.insertSql}")

    def makeReader = {
      spark.read
        .format("jdbc")
        .option("url", sparkJdbcOption.url)
        .option("user", sparkJdbcOption.user)
        .option("password", sparkJdbcOption.password)
        .option("driver", sparkJdbcOption.driver)
    }

    var dfr = makeReader
      .option("dbtable", s"(${sqlOpt.viewSql}) sub")
      .option("numPartitions", 5)
    var df = dfr.load()
    df.createOrReplaceTempView(sqlOpt.viewName)


//    sparkSql(spark, sqlOpt.insertSql)
    sparkSql(spark,originSql)
  }

  def sparkSql(spark: SparkSession, realSql: String) = {
    println(s"开始执行realSql = $realSql")
    spark.sql(realSql).show(10)
    println(s"执行完成................")
  }

}
