package com.doit.day02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import java.sql.{Connection, DriverManager}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo01ToMysql {

  def main(args: Array[String]): Unit = {
       val  conf  = new SparkConf()
             .setMaster("local[*]")
             .setAppName(this.getClass.getSimpleName)
           val sc = SparkContext.getOrCreate(conf)

    val tps: RDD[(Int, String, String, String)] = sc.textFile("data/users")

      .map(line => {
        val arr = line.split(",")
        (arr(0).toInt, arr(1), arr(2), arr(3))
      })


    // 将数据写到mysql中
   // val conn: Connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/doe47", "root", "root")
  // Task not serializable 闭包引用
    tps.foreach(tp4=>{
      // 获取mysql的连接
      // 每个元素 执行一次函数   获取一次连接
      val conn: Connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/doe47", "root", "root")
      val ps = conn.prepareStatement("insert into  ums_user values(?,?,?,?)")
     // 对sql预编译
      ps.setInt(1 , tp4._1)
      ps.setString(2 ,tp4._2)
      ps.setString(3 ,tp4._3)
      ps.setString(4 ,tp4._4)
      // 执行插入语句
      ps.executeUpdate()
    })

    sc.stop()



  }
}
