package com.guchenbo.spark.sql

import org.apache.spark.sql.SparkSession

/**
 * @author guchenbo
 * @date 2022/2/21
 */
object QueryDemo {
  def main(args: Array[String]): Unit = {
    var spark = SparkSession.builder().appName("postgreSQL")
      .master("local[2]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://ark150:9083")
      .config("hive.exec.scratchdir", "/tmp/hive")

      .getOrCreate()
    //
    //    var reader = spark.read.format("jdbc")
    //      .option("url", "jdbc:postgresql://10.58.11.12:5432/pgtest0611")
    //      .option("user", "pgadmin")
    //      .option("password", "pgadmin123")
    //      .option("driver", "org.postgresql.Driver")
    //
    //    val table = args(0)
    //    var sql = s"with view_alias as (select * from $table) select * from (select * from ( select * from view_alias ) Table_0) dims"
    //
    //    reader.option("dbtable", table)
    //    val df = reader.load()
    //    df.show()
    //
    //    var tempView = table
    //    val idx = table.indexOf(".")
    //    if (idx > -1) {
    //      tempView = table.substring(idx + 1)
    //      sql = sql.replaceAll(table, tempView)
    //      println(s"sql replace $table to $tempView")
    //    }
    //    df.createOrReplaceTempView(tempView)
    //    val df2 = spark.sql(sql)
    //
    //    df2.columns.foreach(println(_))
    //    df2.collect()

    var sql = "select * from turing_monitor.mysql_962_holmes_2_11_0_holmes_model_log limit"
    sql = "with view_alias as ( select `model_version`,`success`,`model_uuid`,`model_type` from (select * from turing_monitor.mysql_962_holmes_2_11_0_holmes_model_log WHERE id <7000 and model_uuid='2022041248961219') view_alias2 ) insert overwrite table turing_monitor.template_417_table_0 select * from (select * from ( select * from view_alias ) Table_0) dims"
    sql = "select * from turing_monitor.score_test"
    sql = "SELECT * FROM turing.test_case03 LIMIT 5"
    var df = spark.sql(sql)
//    df.explain()
    df.show()
    println("11")

    //    println(df.rdd)

    sql = "select * from turing_monitor.score_test_parquet"
//    df = spark.sql(sql)
//    df.show()
    println("11")

    //    println(df.rdd)

    sql = "select * from turing_monitor.mysql_962_holmes_2_11_0_holmes_model_log"
//    df = spark.sql(sql)
//    df.show()
    println("11")

    sql = "select * from turing_monitor.string_test_par_bu"
//    df = spark.sql(sql)
//    df.show()
    println("11")
//    println(df.rdd)
    //    sql = "select * from turing.credit_card"
//    spark.sql(sql).explain()

  }
}
