package org.example

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import java.util.Properties

object data1_JDBC {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
//  连接MySQL
    val properties: Properties=new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    properties.setProperty("driver","com.mysql.jdbc.Driver")
    val mysqlScore=spark.read.jdbc(
      "jdbc:mysql://localhost:3306/test?verifyServerCertificate=false&useSSL=false","spark",properties)
//    mysqlScore.show()
//    mysqlScore.printSchema()

//   mysqlScore.select("name","score").show(3)
////   添加新列
//    mysqlScore.withColumn("kaoqing",mysqlScore("score") + 2).show()
//  写入数据
    val  newScore:RDD[String]=sc.parallelize(Array("李四,2,80","王五,3,85"))
    val scoreRDD=newScore.map(_.split(","))
    val studentScore=scoreRDD.map(x=>St_score(x(0),x(1),x(2)))
//    将RDD转换DataFrame
    import spark.implicits._
    val scoreDF=studentScore.toDF()
    scoreDF.write.mode("append").jdbc(
      "jdbc:mysql://localhost:3306/test?verifyServerCertificate=false&useSSL=false","spark",properties)
//    保存到csv
    scoreDF.coalesce(1).write.csv("src/main/resources/score.csv")
    sc.stop()
  }
  case class St_score(name:String,number:String,score:String)
}
