package test


import org.apache.spark.sql.{DataFrame, SparkSession}

import java.util.Properties

object jj {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
  //连接MySql数据库并读取数据表
    val properties:Properties = new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    properties.setProperty("driver","com.mysql.jdbc.Driver")
    val mysqlScore = spark.read.jdbc("jdbc:mysql://localhost:3306/test?"+
      "verifyServerCertificate=false&userSSL=false","spark",properties)
    //数据写入 新增行/列
    //val data:RDD[String] = sc.makeRDD(Array("张三,1,90", "李四,2,96"))
    //按sql的列名切分数据
   // val dataRDD = data.map(_.split(","))
    //RDD关联数据类
   // val scoreRDD = dataRDD.map(x => Score(x(0),x(1),x(2)))
    //将RDD转换为DataFrame(导入隐式转换类）
   // val scoreDF = scoreRDD.toDF()
  //  scoreDF.write.mode("append").jdbc("jdbc:mysql://localhost:3306/test?" +
   //   "verifyServerCertificate=false&userSSL=false", "spark", properties)
  // 清空数据表，将平时成绩通过spark文件读写/写入mysql
    val scoreCSV = spark.read.option("encoding","GBK").option("header","true").csv("src/main/resources/23data2.csv")
    scoreCSV.show(3)
    val scoreDF: DataFrame = scoreCSV.toDF("name","number","score")
    scoreDF.write.mode("append").jdbc("jdbc:mysql://localhost:3306/test?" +
      "verifyServerCertificate=false&userSSL=false", "spark", properties)




    mysqlScore.show()

    sc.stop()
  }
  case class Score(name:String,number:String,score: String)

}
