package com.a

import java.util.Properties

import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @author Liu Guibin
 * @date 2020/8/21 17:01
 */
object readSource {
  Logger.getLogger("org").setLevel(Level.FATAL)

  def main(args: Array[String]): Unit = {

    val session: SparkSession = SparkSession.builder()
      .master("local")
      .appName("sql")
      .getOrCreate()
    import session.implicits._
    val dataRdd: RDD[String] = session.sparkContext
      .textFile("C:\\Users\\DFJX\\Desktop\\a.csv")
    val daraArrayRdd: RDD[Array[String]] = dataRdd.map(x => x.split(",")).repartition(1)

    val stuRdd: RDD[Stu] = daraArrayRdd.map(x => Stu(x(0).toInt, x(1), x(2).toDouble))

    val stdFrame: DataFrame = stuRdd.toDF()
    val filterDdd: RDD[Stu] = stuRdd.filter(a => a.id > 1)
    val l: Long = stdFrame.count()
    //println(l)
    //filterDdd.toDF().show()
    //sql 操作

    stdFrame.createTempView("student")
    val resRdd: DataFrame = session
      .sql("select * from student  ")
    //写到MySQL
    val prop = new Properties
    prop.put("user", "root")
    prop.put("password", "123456")
    resRdd.write.mode("append")
      .jdbc("jdbc:mysql://localhost:3306/mydatabase?serverTimezone=UTC", "student_age", prop)
    println("数据完成！")
  }
}
