package day4

import Utils.SparkUtils

import java.sql.DriverManager

object Test1 {
  def main(args: Array[String]): Unit = {
    // 目标：给 mysql 中添加数据
    val sc = SparkUtils.getSparkContext(4,"ceshi2")
    val rdd = sc.makeRDD(List(("zs","2000-10-01"),("lisi","2000-10-02"),("wangwu","2000-10-03"),("zhaoliu","2000-10-04")),2)

    // foreach 也可以 但创建的连接数太多了
    rdd.foreachPartition(iter => {
      val conn = DriverManager.getConnection("jdbc:mysql://hadoop10:3306/test1","root", 	"123456")
      // 预加载 sql 语句
      val ps = conn.prepareStatement("insert into student values(null,?,?)")
      iter.foreach(v => {
        ps.setString(1,v._1)
        ps.setString(2,v._2)
        ps.executeUpdate()
      })

      ps.close()
      conn.close()
    })

    sc.stop()

  }
}
