package com.atguigu.sparkcore.rdd.file.mysql

import com.atguigu.cm.constan.MysqlCons
import com.atguigu.sparkcore.util.{MySparkContextUtil, MysqlUtil}

/**
 * description ：将rdd写入mysql中
 * author ：剧情再美终是戏
 * mail : 13286520398@163.com
 * date ：Created in 2020/1/9 9:58
 * modified By ：
 * version: : 1.0
 */
object WMysql {

  def main(args: Array[String]): Unit = {

    // 创建sparkContext
    val sc = MySparkContextUtil.get(args)

    // 创建rdd
    val rdd = sc.parallelize(Array((11, "11"), (12, "12"), (13, "13"), (14, "14"), (15, "15"), (16, "16"), (17, "17"), (22, "17"), (33, "17"), (44, "17"), (55, "17"), (66, "17"), (77, "17"), (1, "17"), (2, "17")), 2)

    // 将rdd数据写入mysql中
    val sql = "insert into user VALUE(?,?)"
    rdd.foreachPartition(it => {
      // 获取mysql连接,及设置prepareStatement
      val conn = MysqlUtil.getConnection("/rdd")
      val ps = conn.prepareStatement(sql)

      // 批量提交控制参数
      val bathSize = MysqlCons.bathCommitSize
      var total = 0

      // 设置相关参数
      it.foreach { case (id, name) => total = total + 1
        ps.setInt(1, id)
        ps.setString(2, name)
        ps.addBatch()
        if (total % bathSize == 0) { // 批量提交数据
          ps.executeBatch()
        }
      }
      if (total % bathSize != 0) { // 将剩余的数据提交
        ps.executeBatch()
      }
      ps.close()
      conn.close()
      it
    })

    // 关闭sparkContext资源
    MySparkContextUtil.close(sc)
  }

}
