package com.fwmagic.spark.core.sink

import java.sql.{Connection, Date, DriverManager, PreparedStatement}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.mutable.ArrayBuffer

/**
  * 将Student的数据插入到mysql中
  */
object StudentSinkToMysql {

    private val logger: Logger = LoggerFactory.getLogger(StudentSinkToMysql.getClass)

    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                //代表本地运行模式，并开启多个线程(* 表线程数)，线程数=本机核数 * 2
                .setMaster("local[*]")

        val sc = new SparkContext(conf)

        val students = new ArrayBuffer[Student]()

        //构造数据
        for (i <- 1 to 10) {
            val time: Long = System.currentTimeMillis()
            students += Student(i,"name_" + i, "pwd_" + i, new Date(time), new Date(time))
        }

        //创建RDD
        val rdd: RDD[Student] = sc.parallelize(students, 3)

        //写入数据到mysql
        rdd.foreachPartition(fun)

        sc.stop()
    }

    private def fun(it: Iterator[Student]): Unit = {
        val sql = "insert into t_user(`name`,`password`,`insert_time`,`update_time`) values (?,?,NOW(),NOW())"
        var con: Connection = null
        var ps: PreparedStatement = null
        try {
            con = DriverManager.getConnection("jdbc:mysql://localhost:3306/spark_test?useSSL=false&useUnicode=true&characterEncoding=utf8", "root", "123456")
            ps = con.prepareStatement(sql)
            it.foreach(stu => {
                ps.setString(1, stu.name)
                ps.setString(2, stu.password)
                ps.addBatch()
            })
            ps.executeBatch()
        } catch {
            case e: Exception => {
                e.printStackTrace()
                logger.error(e.getMessage, e)
            }
        } finally {
            if (ps != null) ps.close()
            if (con != null) con.close()
        }
    }
}
