package cn.doitedu.dfdemo

import java.util.Properties

import cn.doitedu.util.SparkUtil
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * @Date 22.4.10
 * @Created by HANGGE
 * @Description
 */
object C10_DF_Write_HIve {
  def main(args: Array[String]): Unit = {
    val session = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getSimpleName)
      // 添加对hive的支持
      /**
       * 添加MySQL和spark-hive的依赖
       * 添加hive-site.xml   访问元数据服务
       *
       */
      .enableHiveSupport()
      .getOrCreate()
    val df = session.read.option("header" , true).option("inferSchema",true).csv("file:///D://code/doit30_spark_sql/data/csv/Teacher2.csv")
     // 打印结构
    df.printSchema()

   // df.write.saveAsTable("default.tb_spark_teacher")

    df.write.mode(SaveMode.Overwrite).partitionBy("gender").saveAsTable("default.tb_spark_teacher2")

  }

}
