package com.zhao.sparkonhive

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 * Description: <br/>
 * Copyright (c) ，2020 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2020/11/10 16:28
 *
 * @author 柒柒
 * @version : 1.0
 */

object ConnectDemo {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setAppName(this.getClass.getName)
      .setMaster("local[*]")

    val spark: SparkSession = SparkSession.builder().config(conf)
      .enableHiveSupport()  //默认不支持外部hive,这里需要调用方法
      //需要指明数据库,否则就是在本地创建
      .config("spark.sql.warehouse.dir","hdfs://ns1:9000/user/hive/warehouse")
      .getOrCreate()

    spark.sql("use mianshi")

//    val df = spark.read.json("User")   //第一列为 name：string 第二列为salary：long
//    spark.sql("drop table user")
//    df.printSchema()
//    df.write.mode(SaveMode.Append).saveAsTable("user")

    //saveAsTable写入,要求字段名字和数据类型与原表保持一致
//    val df: DataFrame = List((1000l, "ace"), (1011l, "jaek")).toDF("salary", "name")
//    df.write.mode(SaveMode.Append).saveAsTable("user")
//
//    //insertInto
      //要求字段数据类型和顺序保持一致,名字可随意
//    val df1 = List((1000l, "zhao"), (1011l, "wang")).toDF("s", "n")
//    df1.write.insertInto("user")

    //使用hive的insert语句
    spark.sql("insert into table user values(10000,'li')")

    spark.sql("select * from user").show()
  }
}
