package com.guchenbo.spark.sql

import com.guchenbo.spark.sql.CsvReader.path
import org.apache.spark.sql.SparkSession

/**
 * 代码片段，用于测试，spark-shell
 *
 * @author guchenbo
 * @date 2022/2/22
 */
object CreateTableDemo2 {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local").appName("CreateTableDemo")
//      .config("hive.metastore.uris", "thrift://localhost:9083")
            .config("hive.metastore.uris", "thrift://ark150:9083")
      //            .config("spark.sql.warehouse.dir", "/Users/guchenbo/opt/data/hive/warehouse")
      //            .config("spark.sql.warehouse.dir", "hdfs:///user/hive/warehouse")
      .enableHiveSupport().getOrCreate()

    var sql =
      """
        |
        |DROP TABLE IF EXISTS turing_monitor.db2_db2admin_testcn2
        |""".stripMargin
        spark.sql(sql)

    val url = String.format("jdbc:db2://%s:%s/%s", "10.58.101.177", "50000", "lrmdb")

    sql =
      s"""
         |
         |CREATE TABLE IF NOT EXISTS turing_monitor.db2_db2admin_testcn2
         |    USING jdbc
         |    OPTIONS (
         |        url "$url",
         |        dbtable "DB2ADMIN.TESTCN",
         |        user "db2admin",
         |        password "Td@12345",
         |        driver "com.ibm.db2.jcc.DB2Driver"
         |        )
         |    COMMENT 'spark to test01'
         |
         |""".stripMargin
        spark.sql(sql)


    sql = "select `机构号` from turing_monitor.db2_db2admin_testcn2"
        spark.sql(sql).show()

  }

}
