package cn.whuc.sparksql

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

import java.sql
import java.util.{Date, Properties}

object Demo_write_mysql {
  def main(args: Array[String]): Unit = {
    val ss: SparkSession = SparkSession
      .builder()
      .config(new SparkConf()
        .setMaster("local[*]")
        .setAppName(" "))
      .getOrCreate()

    import ss.implicits._

    val p: Properties = new Properties() // 创建p对象 存放字符串型kv

    p.setProperty("user","root")
    p.setProperty("password","111111")

    // 创建rdd 使用样例类 将rdd转换成ds
//    val rdd: RDD[Emp] = ss.
//      sparkContext.makeRDD(List(Emp(9527, "史蒂芬周", "厨子", new sql.Date(new Date().getTime()), 8899)))
//
//    val ds: Dataset[Emp] = rdd.toDS()
//
//    ds.write.mode("append").jdbc("jdbc:mysql://localhost:3306/data1","emp",p)

    ss.stop()
  }
}
case class Emp(empno:Int,ename:String,job:String,hiredate: java.sql.Date,sal:Float)