package com.shengzai.opt

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import java.sql.{Connection, DriverManager, PreparedStatement}

object Demo3ForeachPartitions {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("foreach")
      .getOrCreate()

    val sc: SparkContext = spark.sparkContext

    val stuRDD: RDD[String] = sc.textFile("data/students.txt")

    stuRDD.foreachPartition(
      (iter: Iterator[String]) => {
        Class.forName("com.mysql.jdbc.Driver")

        val con: Connection = DriverManager.getConnection("jdbc:mysql://192.168.59.100:3306/bigdata22?useUnicode=true&characterEncoding=UTF-8&useSSL=false", "root", "123456")

        val statement: PreparedStatement = con.prepareStatement("insert into student2 values (?,?,?,?,?)")

        iter.foreach(
          line=>{
            val split: Array[String] = line.split(",")
            val sid: String = split(0)
            val name: String = split(1)
            val age: Int = split(2).toInt
            val sex: String = split(3)
            val clazz: String = split(4)

            statement.setString(1,sid)
            statement.setString(2,name)
            statement.setInt(3,age)
            statement.setString(4,sex)
            statement.setString(5,clazz)

            statement.execute()
          }
        )
        con.close()


      }
    )


  }

}
