package org.example

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.{SparkSession, types}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

import java.util.Properties

object jdbc {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    //连接mysql数据库
    val properties: Properties = new Properties();
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    properties.setProperty("drive","com.mysql.jdbc.Driver")
    val  mysqlScore = spark.read.jdbc("jdbc:mysql://localhost:3306/test?" +
      "verifyServerCertificate=false&useSSL=false","spark",properties)
    mysqlScore.createTempView("spark")
    //往mysql写入数据 写入行 写入列
    val data: RDD[String] = sc.makeRDD(Array("张三，11，100","李四，12,99"))
    //按MySQL列名切分数据
    val dataRDD = data.map(_.split(","))
    //匹配样例类
    val scoreRDD = dataRDD.map(x=> Score(x(0),x(1),x(2)))

    import spark.implicits._
    val  dataDF = scoreRDD.toDF()
    dataDF.write.mode("append").jdbc("jdbc:mysql://localhost:3306/test?" +
      "verifyServerCertificate=false&useSSL=false", "spark", properties)

    sc.stop()
  }
  case class Score(name:String,number:String,score:String)
}
