package com.guchenbo.spark.sql

import org.apache.spark.sql.SparkSession

/**
 * spark jdbc 读取 hive 尝试
 *
 * @author guchenbo
 * @date 2021/6/25
 */
object JdbcReadHive {

  def main(args: Array[String]): Unit = {
    var spark = SparkSession.builder().appName("Spark Jdbc Read")
      .master("local[2]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://ark150:9083")
      .config("hive.exec.scratchdir", "/tmp/hive")

      .getOrCreate()
    read(spark)
  }


  def read(spark: org.apache.spark.sql.SparkSession): Unit = {
    def makeReader = {
      spark.read
        .format("jdbc")
        .option("url", "jdbc:hive2://ark150:10000")
        .option("user", "admin")
        .option("password", "admin")
        .option("driver", "org.apache.hive.jdbc.HiveDriver")
    }

    var sql = "select * from turing.credit_card"
    var dfr = makeReader.option("query", sql)
    dfr.load().show()
  }
}
