package com.offcn.spark.p3

import java.sql.DriverManager

import com.offcn.spark.p3.ForeachPartitionOps.saveInfoMySQL2
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Auther: BigData-LGW
 * @ClassName: saveInfoMySQLByForeachPartition
 * @Date: 2020/12/8 12:18
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object SaveInfoMySQLByForeachPartition {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
            .setAppName("_05ActionOps")
            .setMaster("local[*]")
        val sc = new SparkContext(conf)
        val array = sc.parallelize(Array(
            "hello you",
            "hello me",
            "hello you",
            "hello you",
            "hello me",
            "hello you"
        ), 2)
        val pairs = array.flatMap(_.split("\\s+")).map((_,1))
        val ret = pairs.aggregateByKey(0)(_+_,_+_)
        saveInfoMySQLByForeachPartition(ret)
        sc.stop()
    }
    def saveInfoMySQLByForeachPartition(rdd:RDD[(String,Int)]) ={
        rdd.foreachPartition(partition => {
            Class.forName("com.mysql.jdbc.Driver")
            val url = "jdbc:mysql://localhost:3306/wordcount"
            val connection = DriverManager.getConnection(url,"mark","sorry")
            val sql =
                """
                  |insert into wordcounts(word,`count`) value (?,?)
                  |""".stripMargin
            val ps = connection.prepareStatement(sql)
            partition.foreach{
                case (word,count) => {
                    ps.setString(1,word)
                    ps.setInt(2,count)
                    ps.execute()
                }
            }
            ps.close()
            connection.close()
        })
    }
}
