package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{SaveMode, SparkSession}

import java.util.Properties

object Test12_csvToMySQL {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .appName("csvToMySQL")
      .master("local[*]")
      .getOrCreate()
    //读取csv
    val df1 = spark.read
      .format("csv")
      .option("inferSchema", "true")
      .option("header", "true")
      .load("input/Employee_salary_first_half.csv")
    df1.show(5)
    val df2 = spark.read
      .option("inferSchema", "true")
      .option("header", "true")
      .csv("input/Employee_salary_second_half.csv")
    df2.show(5)
    //将数据存入MySQL
    val url = "jdbc:mysql://hadoop01:3306/student?" +
      "useUnicode=true&characterEncoding=utf8&useSSL=false"
    val proWrite = new Properties()
    proWrite.put("user","root")
    proWrite.put("password","123456")
    df1.union(df2)
      .write
      .mode(SaveMode.Append)
      .jdbc(url,"salary",proWrite)
    spark.stop()
  }
}
