package com.doit.day02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import java.sql.{Connection, DriverManager}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo01ToMysql02 {

  def main(args: Array[String]): Unit = {
       val  conf  = new SparkConf()
             .setMaster("local[*]")
             .setAppName(this.getClass.getSimpleName)
           val sc = SparkContext.getOrCreate(conf)

    val tps: RDD[(Int, String, String, String)] = sc.textFile("data/users")
      .map(line => {
        val arr = line.split(",")
        (arr(0).toInt, arr(1), arr(2), arr(3))
      })


    tps.foreachPartition(es=>{
      val conn: Connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/doe47", "root", "root")
      val ps = conn.prepareStatement("insert into  ums_user values(?,?,?,?)")
      es.foreach(e=>{
        // 对sql预编译
        ps.setInt(1, e._1)
        ps.setString(2, e._2)
        ps.setString(3, e._3)
        ps.setString(4, e._4)
        // 执行插入语句
        ps.executeUpdate()
      })
    })



    sc.stop()



  }
}
