package org.example

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import java.util.Properties

object ketang7 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local[*]").appName("spark").getOrCreate()
    val sc = spark.sparkContext
    //连接MySQL数据库
    val properties:Properties =new Properties();
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    properties.setProperty("driver","com.mysql.jdbc.Driver")//8.0版本要加.cj
    val mysqlScore =spark.read.jdbc("jdbc:mysql://localhost:3306/test?"+"verifyServerCertificate=false&useSSL=false","spark",properties)
    mysqlScore.show()
    //使用Navicat导入平时成绩，在spark中读取并计算平均分
    /*//往MySQL写入数据 写入行 写入列
    val data: RDD[String] =sc.makeRDD(Array("张三,1001,100","李四,1002,99"))
    // 1、按MySQL列名切分数据
    val dataRDD = data.map(_.split(","))
    // 2、匹配样例类
    val scoreRDD =dataRDD.map(x => Score(x(0),x(1),x(2)))
    // 3、将RDD转换成DataFrane
    import spark.implicits._
    val dataDF = scoreRDD.toDF()
    dataDF.write.mode("append").jdbc("jdbc:mysql://localhost:3306/test?"+"verifyServerCertificate=false&useSSL=false","spark",properties)
    mysqlScore.show()*/
    //读取平时成绩csv文件并写入MySQL数据库的spark表中
    sc.stop()
  }
  case class Score(name:String,number:String,score:String)
}
