package com.learn.lb.spark.streaming

import java.sql.DriverManager

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * 通过foreachRdd将数据存入mysql数据库中
 *
 * @author laibo
 * @since 2019/9/9 20:44
 *
 */
object ForeachRddDemo {


  def main(args: Array[String]): Unit = {
    Class.forName("com.mysql.jdbc.Driver")
    val conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount")
    //5秒接受一次数据
    val ssc = new StreamingContext(conf, Seconds(5))
    //返回的是InputDStream
    val lines = ssc.socketTextStream("master", 9999)
    lines.flatMap(_.split(" ")).foreachRDD{
      rdd=>rdd.foreachPartition{partitionOfRecords =>
        val connection = DriverManager.getConnection("jdbc:mysql://master:3306/spark_learn?useSSL=false&serverTimezone=UTC", "root", "123456")
        val statement = connection.createStatement()
        partitionOfRecords.foreach{ record =>
          val result = statement.execute(s"insert into word_count(word, count) value('$record', 1)")
          println(result)
        }
        statement.close()
        connection.close()
      }
    }
    ssc.start()
    ssc.awaitTermination()
  }
}
