package com.guchenbo.spark.sql

import org.apache.spark.sql.SparkSession

/**
 * spark jdbc 并行读取
 *
 * @author guchenbo
 * @date 2021/6/25
 */
object SparkSqlSingle {

  def main(args: Array[String]): Unit = {
    var spark = SparkSession.builder().appName("Spark")
      .master("local[2]")
      .enableHiveSupport()
//      .config("hive.metastore.uris", "thrift://ark150:9083")
//      .config("hive.exec.scratchdir", "/tmp/hive")

      .getOrCreate()
    read(spark)
  }


  def read(spark: org.apache.spark.sql.SparkSession): Unit = {
    var sql = "select * from turing_monitor.monitor_type_134_table_0"
    spark.sql(sql).show()
  }
}
