package org
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import java.util.Properties
object jdbc {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
  .builder()
  .master("local[*]")
  .appName("spark")
  .getOrCreate()
    val sc = spark.sparkContext
    val properties: Properties = new Properties();
    properties.setProperty("user", "root")
    properties.setProperty("password", "123456")
    properties.setProperty("driver", "com.mysql.jdbc.Driver")
    val mysqlScore = spark.read.jdbc("jdbc:mysql://localhost:3306/test?" +
      "verifyServerCertificate=false&useSSL=false", "spark", properties)

    mysqlScore.createTempView("spark")
    val data: RDD[String] = sc.makeRDD(Array("檀健次,1001,100","曾婉之,1002,99"))
    val dataRDD = data.map(_.split(","))
    val scoreRDD =dataRDD.map(x => Score(x(0),x(1),x(2)))
    import spark.implicits._
    val dataDF = scoreRDD.toDF()
    dataDF.write.mode("append").jdbc("jdbc:mysql://localhost:3306/test?verifyServerCertificate=false&useSSL=false", "spark", properties)
    val scoreCVS = spark.read.option("encoding","GBK").option("header","true")
      .csv("src/main/resources/yun2.csv")
    scoreCVS.show(3)
      mysqlScore.show()
      sc.stop()

  }
  case class Score(name:String,number:String,score:String)
}
