package com.guchenbo.spark.sql

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * 分区
 *
 * @author guchenbo
 * @date 2022/2/22
 */
object PartitionDemo {

  def main(args: Array[String]): Unit = {
    //    val sc = SparkUtils.sparkSession("partition demo")
    val spark = SparkSession.builder().master("local[4]").appName("demo")
      .config("hive.metastore.uris", "thrift://ark150:9083")
      //      .config("spark.sql.warehouse.dir", "hdfs:///user/hive/warehouse")
      .enableHiveSupport().getOrCreate()

    //    val sql = "select * from turing.credit_card"
//    spark.sql("drop table turing_monitor.string_test_par_bu")
    var sql = "select * from turing_monitor.string_test_par "
    var df = spark.sql(sql)
    df.explain(true)
    df.collect()
    df.schema
    df.write.partitionBy("ds")
//      .format("hive")
      .bucketBy(2, "y_tag")
      .option("fileFormat", "textfile")
      .mode(SaveMode.Overwrite)
      .saveAsTable("turing_monitor.string_test_par_bu")

    spark.sql("select * from turing_monitor.string_test_par")
//
//    sql = "select * from turing.credit_card"
//    df = spark.sql(sql)
//    df.explain(true)
//    df.collect()

    //    df.show()
  }

  val SCHEMA_ERROR = "does not allow user-specified schemas";

  def executeSparkSql(spark: SparkSession, sql: String, dropFirst: Boolean, retry: Boolean):
  DataFrame = {
    try {
      if (dropFirst) {
        val ddlSql =
          """
            |
            |DROP TABLE IF EXISTS turing_monitor.gp_test01;
            |
            |CREATE TABLE IF NOT EXISTS turing_monitor.gp_test01
            |    USING jdbc
            |    OPTIONS (
            |        url "jdbc:postgresql://10.57.36.55:5432/shangqi_db",
            |        dbtable "public.test01",
            |        user "gpadmin",
            |        password "gpadmin123",
            |        driver "org.postgresql.Driver",
            |        customSchema "a double,b STRING"
            |        )
            |    COMMENT 'spark to test01';
            |
            |""".stripMargin
      }
      spark.sql(sql)
      spark.sql("REFRESH TABLE turing_monitor.gp_public_model_report_psi_test_df")
      //      spark.sql("clear cache")
      spark.sql(sql)
    } catch {
      case e: Exception =>
        if (retry) {
          val msg = e.getMessage
          if (msg != null && msg.contains(SCHEMA_ERROR)) {
            println(s"sql error $msg, then retry once")
            executeSparkSql(spark, sql, dropFirst = true, retry = false)
          }
        }
        throw e
    }
  }
}
