package com.zt.bigdata.spark.spark.sql

import java.io.File

import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession

case class Record2(key: Int, value: String)

object SparkSqlOnHiveExample extends App with Logging {
  // warehouseLocation points to the default location for managed databases and tables
  val warehouseLocation = new File("spark-warehouse").getAbsolutePath

  val spark = SparkSession
    .builder()
    .appName("Spark Hive Example")
    .config("spark.sql.warehouse.dir", warehouseLocation)
    .enableHiveSupport()
    .getOrCreate()

  import spark.sql

  sql("CREATE TABLE IF NOT EXISTS zt-test (key INT, value STRING) USING hive")
  sql("LOAD DATA LOCAL INPATH 'spark-project/src/main/resources/kv1.txt' INTO TABLE zt-test")
  // Queries are expressed in HiveQL
  sql("SELECT * FROM zt-test").show()
}
