package sparkSQL.study

import java.util.Properties

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

object JdbcWrite {
  def main(args: Array[String]): Unit = {

    //创建SparkSession对象
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("jdbcwrite...")
    val sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
    import sparkSession.implicits._

    val datas = sparkSession.sparkContext.makeRDD(List(Goods(1, "zhangsan", 19), Goods(2, "lisi", 20)))
    val dsDatas = datas.toDS()

    // 1.通用的方式 format指定写出类型
    dsDatas.write
        .format("jdbc")
        .option("url", "jdbc:mysql://127.0.0.1:3306/student_test")
        .option("driver", "com.mysql.jdbc.Driver")
        .option("user", "root")
        .option("password", "123456")
        .option("dbtable","students1906")
        .mode(SaveMode.Append)
        .save()

    // 2. 通过 jdbc方法
    val properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "123456")
    dsDatas.write.mode(SaveMode.Append).jdbc("jdbc:mysql://127.0.0.1:3306/student_test",
                                             "students1906", properties)


    sparkSession.stop()

  }
  case class Goods (id: Int, name: String, age: Int)

}
