package com.demo.hive

import org.apache.spark.sql.{SaveMode, SparkSession}

object ReadFromHiveData {


  def main(args: Array[String]): Unit = {

    val hiveContext = SparkSession
      .builder()
      .appName("Spark Hive Example")
      .enableHiveSupport()
      .getOrCreate()

    hiveContext.sql("show tables").collect().foreach(println)

    //hiveContext.sql("CREATE TABLE IF NOT EXISTS srczb (key INT, value STRING) USING hive")
    /*  hiveContext.sql("DROP TABLE IF EXISTS student_infos");
      hiveContext.sql("CREATE TABLE IF NOT EXISTS student_infos (name STRING, age INT)");
      hiveContext.sql("LOAD DATA "
          + " INPATH '/tmp/hive/root/student/student_infos.txt' "
          + "INTO TABLE student_infos");

      hiveContext.sql("DROP TABLE IF EXISTS student_scores");
      hiveContext.sql("CREATE TABLE IF NOT EXISTS student_scores (name STRING, score INT)");
      hiveContext.sql("LOAD DATA "
          + " INPATH '/tmp/hive/root/student/student_scores.txt' "
          + "INTO TABLE student_scores");*/


    val goodStudentsDF =
      hiveContext.sql("SELECT si.name, si.age, ss.score "
        + "FROM ods.student_infos si  "
        + "JOIN ods.student_scores ss ON si.name=ss.name  "
        + "WHERE ss.score>=30")
    //goodStudentsDF.collect().foreach(println)

    goodStudentsDF.createOrReplaceTempView("temp1")

    goodStudentsDF.select("name", "age", "score").write.mode(SaveMode.Overwrite).saveAsTable("hive_recordszb")

    // After insertion, the Hive managed table has data now .write.mode(SaveMode.Overwrite)
    //hiveContext.sql("SELECT * FROM hive_records").show()


    /* goodStudentsDF.show()
          goodStudentsDF.write.mode(SaveMode.Overwrite).saveAsTable("good_student_infos01")
    //goodStudentsDF.write.option("path", "/tmp/hive/root/").saveAsTable("good_student_infos01");





    goodStudentsDF.show()

   // goodStudentsDF("good_student_infos");  */

    val goodStudentRows = hiveContext.table("hive_recordszb").collect()
    for (goodStudentRow <- goodStudentRows) {
      println("*******************************" + goodStudentRow)
    }
    hiveContext.close()
  }

}




//package com.infinova.bigdata.utils
//
//
//
//
//import org.apache.spark.SparkConf
//import org.apache.spark.SparkContext
//
//import org.apache.spark.sql.SQLContext
//import org.apache.spark.sql.SparkSession
//import java.io.File
//import org.apache.spark.sql.SaveMode
//
///**
// * @author Administrator
// */
//object ReadFromHiveData {
//
//
//  def main(args: Array[String]): Unit = {
///*    val conf = new SparkConf()
//        .setAppName("HiveDataSource").setMaster("local[2]");
//    val sc = new SparkContext(conf);
//    val hiveContext = new SQLContext(sc);*/
//
//
//    // $example on:init_session$
//   // warehouseLocation points to the default location for managed databases and tables
//   val warehouseLocation = new File("spark-warehouse").getAbsolutePath
//   val conf = new SparkConf()
//    conf.setAppName("ReadFromHiveData").setMaster("local[2]")
//
//    val hiveContext = SparkSession
//  .builder()
//  .appName("Spark Hive Example")
///*  .config("hive.metastore.uris","thrift://hadoop01:9083")
//   .config("hive.metastore.warehouse.dir","hdfs://hadoop01:9000//opt/tools/hive/warehouse")
//   .config("spark.sql.warehouse.dir","hdfs://hadoop01:9000/opt/tools/hive/warehouse").config(conf)*/
//  .enableHiveSupport()
//  .getOrCreate()
//
//    hiveContext.sql("show tables").collect().foreach(println)
//    // For implicit conversions like converting RDDs to DataFrames
//    import hiveContext.implicits._
//    // $example off:init_session$
//
//    //hiveContext.sql("CREATE TABLE IF NOT EXISTS srczb (key INT, value STRING) USING hive")
//  /*  hiveContext.sql("DROP TABLE IF EXISTS student_infos");
//    hiveContext.sql("CREATE TABLE IF NOT EXISTS student_infos (name STRING, age INT)");
//    hiveContext.sql("LOAD DATA "
//        + " INPATH '/tmp/hive/root/student/student_infos.txt' "
//        + "INTO TABLE student_infos");
//
//    hiveContext.sql("DROP TABLE IF EXISTS student_scores");
//    hiveContext.sql("CREATE TABLE IF NOT EXISTS student_scores (name STRING, score INT)");
//    hiveContext.sql("LOAD DATA "
//        + " INPATH '/tmp/hive/root/student/student_scores.txt' "
//        + "INTO TABLE student_scores");*/
//
//
//   val goodStudentsDF =
//
//     hiveContext.sql("SELECT si.name, si.age, ss.score "
//        + "FROM ods.student_infos si  "
//        + "JOIN ods.student_scores ss ON si.name=ss.name  "
//        + "WHERE ss.score>=30")
//     //goodStudentsDF.collect().foreach(println)
//
//     goodStudentsDF.select("name","age","score").write.mode(SaveMode.Overwrite).saveAsTable("hive_recordszb")
//     // After insertion, the Hive managed table has data now .write.mode(SaveMode.Overwrite)
//     //hiveContext.sql("SELECT * FROM hive_records").show()
//
//
//    /* goodStudentsDF.show()
//          goodStudentsDF.write.mode(SaveMode.Overwrite).saveAsTable("good_student_infos01")
//    //goodStudentsDF.write.option("path", "/tmp/hive/root/").saveAsTable("good_student_infos01");
//
//
//
//
//
//    goodStudentsDF.show()
//
//   // goodStudentsDF("good_student_infos");  */
//
//    val goodStudentRows = hiveContext.table("hive_recordszb").collect()
//    for(goodStudentRow <- goodStudentRows) {
//      println("*******************************"+goodStudentRow);
//    }
//    hiveContext.close()
//  }
//
//}
//
