package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{SaveMode, SparkSession}

object Test13_Hive {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME","root")
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("hiveTest")
      .enableHiveSupport()
      .getOrCreate()
    import spark.implicits._
    //读取hive中的所有数据库
    spark.sql("show databases").show
    //读取一个表格中内容
    val df = spark
      .read
      .table("db_hive1.house_all")
    df.show(5)
    //写入hive
    val sc = spark.sparkContext
    val df1 = sc.makeRDD(List((1, "黄蓉", 18, "逍遥游"),
        (2, "郭靖", 20, "降龙十八掌")))
      .toDF("id", "name", "age", "gongFu")
    df1.write
      .mode(SaveMode.Append)
      .saveAsTable("xiaoshou")
  }
}
