package org.example

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.nlpcn.commons.lang.viterbi.function.Score

import java.util.Properties

object sparkData2_JDBC {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    //连接MySQL数据库并读取数据表
    val properties:Properties= new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    properties.setProperty("driver","com.mysql.jdbc.Driver")
    val mysqlScore = spark.read.jdbc("jdbc:mysql://Localhost:3306/test?" +
      "verifyServerCertificate=false&userSSl=false","spark",properties)
    //写入数据  新增行 新增列 张三 李四
    val data:RDD[String] = sc.makeRDD(Array("张三,1,90","李四,2,78"))
    //1.按sql的列名切分数据
    val dataRDD = data.map(_.split(","))
    //2.RDD关联数据类
    val scoreRDD = dataRDD.map(x => Score(x(0),x(1),x(2)))
    //3.将RDD转换为DataFrame（导入隐形转换）
    import  spark.implicits._
    val scoreDF = scoreRDD.toDF()
    scoreDF.write.mode("append").jdbc("jdbc:mysql://Localhost:3306/test?" +
      "verifyServerCertificate=false&userSSl=false", "spark", properties)
    //RW  清空数据表 将平时成绩通过spark文件读写 写入Mysql

    mysqlScore.show()
    sc.stop()
  }
  case class Score(name:String,number: String,score:String)

}
