package org
import org.apache.spark.sql.SparkSession
import org.apache.spark.rdd.RDD
import java.util.Properties
object sparkYun2_JDBC {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    //连接MYSQL数据库
    val properties:Properties=new Properties();
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    properties.setProperty("driver","com.mysql.jdbc.Driver")
    val mysqlScore = spark.read.jdbc("jdbc:mysql://localhost:3306/test?" + "verifyServerCertificate=false&useSSL=false","spark",properties)
    mysqlScore.show()
    mysqlScore.createTempView("saprk")

//使用Navicat导入平时成绩，在spark中读取并计算平均分

//往MySQL写入数据 写入行 写入列
//    val data:RDD[String]=sc.makeRDD(Array("张三，1001，100","李四，1002，99"))
//    //1.按MYSQL列名切分数据
//    val dataRDD = data.map(_.split(","))
//    //2.匹配样例表
//    val scoreRDD=dataRDD.map(x=>Score(x(0),x(1),x(2)))
//    //3.将rdd转换成DataFrame
//    import spark.implicits._
//    val dataDF=scoreRDD.toDF()
//    dataDF.write.mode("append").jdbc("jdbc:mysql://localhost:3306/test?" + "verifyServerCertificate=false&useSSL=false","spark",properties)
   // mysqlScore.show()
  //  读取平时成绩csv文件并写入MYSQLs数据库的spark成绩表中
    sc.stop()
  }
  case class Score(name:String,number:String,score:String)

}
