package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{SaveMode, SparkSession}

import java.util.Properties

object Test11_MySQL {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME","root")
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("mysql")
      .getOrCreate()
    import spark.implicits._
    val url = "jdbc:mysql://hadoop01:3306/student?" +
      "useUnicode=true&characterEncoding=utf8&useSSL=false"
    val proRead = new Properties()
    proRead.put("user","root")
    proRead.put("password","123456")
    val df = spark.read.jdbc(url, "user", proRead)
    df.show
    val df1 = spark.read.format("jdbc")
      .option("url", url)
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "user")
      .load()
    df1.show()
    //写MySQL
    val sc = spark.sparkContext
    val df2 = sc.makeRDD(List((12, "唐嫣", 40, "大数据1班")))
      .toDF("id", "name", "age", "grade")
      .as[Stu]
    //如果要写入的表格存在 那么就写入该表格 不存在就创建一个 Save_mode
    df2.write
      .mode(SaveMode.Append)
      .jdbc(url,"abc",proRead)
    df2.write
      .format("jdbc")
      .mode(SaveMode.Append)
      .option("url", url)
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "user")
      .save()
  }
}
case class Stu(id:Int,name:String,age:Int,grade:String)
