package com.gy.spark.sparksql.dataframe

import org.apache.spark.sql.{SaveMode, SparkSession}

object CreateDFFromHive {
  def main(args: Array[String]): Unit = {
    //    val conf = new SparkConf()
    //    conf.setAppName(this.getClass.getSimpleName)
    //    val sc = new SparkContext(conf)
    //    /**
    //     * HiveContext是SQLContext的子类。
    //     */
//        val hiveContext = new hiveContext(sc)

    val hiveContext = SparkSession
      .builder()
      .master("local")
      .appName(this.getClass.getSimpleName)
      //      .config("spark.sql.warehouse.dir", warehouseLocation)
      .enableHiveSupport()
      .getOrCreate()


    hiveContext.sql("create database if not exists spark")
    hiveContext.sql("use spark")
    hiveContext.sql("drop table if exists student_infos")
    hiveContext.sql("create table if not exists student_infos (name string,age int) row format  delimited fields terminated by '\t'")
    hiveContext.sql("load data local inpath './spark/input/sql/student_infos.txt' into table student_infos")

    hiveContext.sql("drop table if exists student_scores")
    hiveContext.sql("create table if not exists student_scores (name string,score int) row format delimited fields terminated by '\t'")
    hiveContext.sql("load data local inpath './spark/input/sql/student_scores.txt' into table student_scores")

    val df = hiveContext.sql("select si.name,si.age,ss.score from student_infos si,student_scores ss where si.name = ss.name")
    hiveContext.sql("drop table if exists good_student_infos")


    hiveContext.sql("select * from student_infos").show()
    hiveContext.sql("select * from student_scores").show()

    df.printSchema()
    df.show(20)

    /**
     * 将结果写入到hive表中
     */
    df.write.mode(SaveMode.Overwrite).saveAsTable("good_student_infos")

    hiveContext.stop()
  }
}