package cn.doitedu.dfdemo

import java.util.Properties

import cn.doitedu.util.SparkUtil
import org.apache.spark.sql.SaveMode

/**
 * @Date 22.4.10
 * @Created by HANGGE
 * @Description
 */
object C09_DF_Write_Mysql {
  def main(args: Array[String]): Unit = {
    val session = SparkUtil
      .getSession
    val df = session.read.option("header" , true).option("inferSchema",true).csv("data/csv/Teacher2.csv")
     // 打印结构
    df.printSchema()
    /**
     * root
     * |-- id: integer (nullable = true)
     * |-- name: string (nullable = true)
     * |-- age: integer (nullable = true)
     * |-- gender: string (nullable = true)
     * |-- city: string (nullable = true)
     * |-- sal : integer (nullable = true)
     * |-- job: string (nullable = true)
     * |-- email: string (nullable = true)
     * create  table  tb_teacher(
     * id int ,
     * name  varchar(20) ,
     * age int ,
     * gender varchar(20) ,
     * city varchar(20) ,
     * sal  int
     * job  varchar(20) ,
     * email  varchar(20)
     * )
     *
     */

    val properties = new Properties()
    properties.setProperty("user" , "root")
    properties.setProperty("password" , "root")

    /**
     * 将DF的数据写入到MySQL的时候  写入模式
     * Append,      追加写
     * Overwrite,  覆盖写
     * ErrorIfExists,  默认模式 , 自动建表  ,表存在会报错
     * Ignore;
     */
    df.write.mode(SaveMode.Append).jdbc("jdbc:mysql://localhost:3306/doit30?useSSL=false&characterEncoding=utf8" , "tb_teacher",properties)

  }

}
