package com.guchenbo.spark.sql

import org.apache.spark.sql.SparkSession

/**
 * spark jdbc 读取 hive 尝试
 *
 * @author guchenbo
 * @date 2021/6/25
 */
object JdbcReadMysqlSingle {

  def main(args: Array[String]): Unit = {
    var spark = SparkSession.builder().appName("Spark Jdbc Read")
      .master("local[2]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://ark150:9083")
      .config("hive.exec.scratchdir", "/tmp/hive")

      .getOrCreate()
    read(spark)
  }


  def read(spark: org.apache.spark.sql.SparkSession): Unit = {
    def makeReader = {
      spark.read
        .format("jdbc")
        .option("url", "jdbc:mysql://10.57.16.13:3306/model_manager?useSSL=false")
        .option("user", "model_paas_manager")
        .option("password", "Liu_0123456789")
        .option("driver", "com.mysql.jdbc.Driver")
    }

    var sql = "select * from model_manager.model_info"
    var dfr = makeReader.option("query", sql)
    dfr.load().show()
  }
}
