package org.example

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.nlpcn.commons.lang.viterbi.function.Score

import java.util.Properties


object sparkData2_JDBC {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .getOrCreate()
    val sc=spark.sparkContext
    val properties:Properties=new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    properties.setProperty("driver","com.mysql.jdbc.Driver")
   val mysqlScore=spark.read.jdbc("jdbc:mysql://localhost:3306/test?"+
    "verifyServerCertificate=false&userSSL=false","spark",properties)
    //val data:RDD[String]=sc.makeRDD(Array("张三，1，90","李四,2,96"))
   // val dataRDD =data.map(_.split(","))
   // val scoreRDD=dataRDD.map(x=> Score(x(0),x(1),x(2)))
    //import  spark.implicits._
   // val scoreDF =scoreRDD.toDF()
val scoreCSV =spark.read.option("encoding","GBK").option("header","true").csv("E:\\yhm\\23data2.csv")
    scoreCSV.show(3)
    val scoreDF:DataFrame=scoreCSV.toDF()
    scoreDF.write.mode("append").jdbc("jdbc:mysql//localhost:3306/test?"+
    "verfyServerCertificate=false&userSSL=false","spark",properties)
    sc.stop()
  }
  case class Score(name:String,number:String,score:String)
}
