package utils

import java.util.Properties
import java.sql.{DriverManager, PreparedStatement, Connection}

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
/**
 * @author 杨铭
 *         2023/6/19,22:51
 */

object WriteRDDToMySQL {
  /**
   *
   * @sql "INSERT INTO mytable (column1, column2) VALUES (?, ?)
   * */
  def connection(username:String,password:String,url:String,sql:String,array: Array[(Int,Double)],sc:SparkContext):Unit={

    // 加载mysql相关驱动
    Class.forName("com.mysql.cj.jdbc.Driver")
    val connectionProperties = new Properties()
    connectionProperties.setProperty("user", username)
    connectionProperties.setProperty("password", password)
    // 定义插入数据方法
    def insertBatch(iterator: Iterator[(Int, Double)]): Unit = {
      val conn = DriverManager.getConnection(url, username, password)
      val insertSQL = sql
      println(sql)
      val preparedStatement = conn.prepareStatement(insertSQL)

      iterator.foreach { case (column1, column2) =>
        preparedStatement.setInt(1, column1)
        preparedStatement.setDouble(2, column2)
        preparedStatement.addBatch()
      }

      preparedStatement.executeBatch()
      conn.close()
    }

    // 将数据RDD以分区的形式写入MYSQL中
    val tuples: Array[(Int, Double)] = array
    val rdd: RDD[(Int, Double)] = sc.parallelize(tuples)
    rdd.foreachPartition(insertBatch)
    sc.stop()
  }

}
