package org.example

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import java.util.Properties

object pg {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    val properties:Properties=new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    properties.setProperty("driver","com.mysql.jdbc.Driver")
    val mysqlScore =spark.read.jdbc(
      "jdbc:mysql://localhost:3306/test?verifyServerCertificate=false&useSSL=false",
    "spark",properties
    )
  //  mysqlScore.show()
    //mysqlScore.printSchema()
    //mysqlScore.select("name","score").show(3)
    //mysqlScore.withColumn("kaoqing",mysqlScore("score")+2).show()
    val newScore:RDD[String]=sc.parallelize(Array("lisi,2,80","wangwu,3,85"))
    val scoreRDD=newScore.map(_.split(","))
    val studentScore=scoreRDD.map(x=> St_score(x(0),x(1),x(2)))
    import spark.implicits._
    val scoreDF=studentScore.toDF()
    scoreDF.write.mode("append").jdbc(
      "jdbc:mysql://localhost:3306/test?verifyServerCertificate=false&useSSL=false",
      "spark", properties
    )
    sc.stop()
  }
  case class St_score(sname:String,snumber:String,score:String)
}
