package com.doit.spark.day02

import com.doit.spark.day01.utils.SparkUtil
import com.mysql.jdbc.Driver
import org.apache.spark.rdd.RDD

import java.sql.DriverManager





/**
 * @DATE 2022/1/3/16:21
 * @Author MDK
 * @Version 2021.2.2
 * */
object C04_Transformation03 {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc
    val rdd: RDD[User] = sc.parallelize(List[User]( User(1,"zss"), User(2,"lss"),User(3,"ww"),User(4,"zll"),User(5,"wdd")), 3)
    val rdd2 = rdd.mapPartitions(iterator => {
      //将每个用户存储到MySQL中
      Class.forName(new Driver().getClass.getName)
      val connection = DriverManager.getConnection("jdbc:mysql://linux01:3306/doit", "root", "root")
      val ps = connection.prepareStatement("insert into tb_stu value (?,?)")
      iterator.foreach(e=>{
        ps.setInt(1,e.id)
        ps.setString(2,e.name)
        ps.execute()
      })
      iterator
    })
    rdd2.foreach(println)
  }
}
