package cn.itcast.spark.sql

import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.types.{FloatType, IntegerType, StringType, StructField, StructType}

object HiveAccess {

  def main(args: Array[String]): Unit = {
    // 1. 创建SparkSession
    //    1. 开启Hive支持
    //    2. 指定Metastore位置
    //    3. 指定warehouse位置
    val spark = SparkSession.builder()
      .appName("hive access1")
      .enableHiveSupport()
      .config("spark.sql.warehouse.dir", "/datas/hive")
      .getOrCreate()

    import spark.implicits._

    // 2. 读取数据
    //    1. 读HDFS
    //    2. 使用DF读取数据

    val schema = StructType(
      List(
        StructField("name", StringType),
        StructField("age", IntegerType),
        StructField("gpa", FloatType)
      )
    )

    val dataframe = spark.read
      .option("delimiter","\t")
      .schema(schema)
      .csv("hdfs:///datas/studenttab10k")

    val resultDF = dataframe.where('age > 50)

    // 3. 写入数据
    resultDF.write
      .mode(SaveMode.Overwrite)
      .saveAsTable("spark03.student")

  }
}
