package com.shujia.streaming

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Durations, StreamingContext}

import java.sql.{Connection, DriverManager, PreparedStatement, ResultSet}
import scala.collection.mutable

object Demo05Bukong {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo05Bukong")
      .master("local[2]")
      .getOrCreate()

    val ssc: StreamingContext = new StreamingContext(spark.sparkContext, Durations.seconds(5))


    val dianxinDS: DStream[String] = ssc.socketTextStream("master", 7777)

    // 构建可变的Set集合存储从MySQL获取到的黑名单
    val blacklistSet: mutable.Set[String] = mutable.Set[String]()


    dianxinDS
      // 不能直接使用filter算子，每来一条数据就会建立一次连接，性能太低了
      .filter(line => {
        // 建立连接
        val conn: Connection = DriverManager.getConnection("jdbc:mysql://master:3306/student", "root", "123456")
        // 创建Statement
        val pSt: PreparedStatement = conn.prepareStatement("select mdn from blacklist")
        // 执行查询，获取返回的结果
        val rs: ResultSet = pSt.executeQuery()
        while (rs.next()) {
          val mdn: String = rs.getString("mdn")
          blacklistSet.add(mdn)
        }

        val mdn: String = line.split(",")(0)
        blacklistSet.contains(mdn)
      })
      .map(line => {
        val splits: Array[String] = line.split(",")
        (splits(0), splits(2), splits(3), splits(5), splits(6))
      })
    //      .print(10000)

    dianxinDS
      // 将每个批次Batch转换成RDD进行处理
      .transform(rdd => {
        // 建立连接
        val conn: Connection = DriverManager.getConnection("jdbc:mysql://master:3306/student", "root", "123456")
        // 创建Statement
        val pSt: PreparedStatement = conn.prepareStatement("select mdn from blacklist")
        // 执行查询，获取返回的结果
        val rs: ResultSet = pSt.executeQuery()
        while (rs.next()) {
          val mdn: String = rs.getString("mdn")
          blacklistSet.add(mdn)
        }

        val blacklistSetBro: Broadcast[mutable.Set[String]] = spark.sparkContext.broadcast(blacklistSet)

        rdd.filter(line => {
          val mdn: String = line.split(",")(0)
          blacklistSetBro.value.contains(mdn)
        }).map(line => {
          val splits: Array[String] = line.split(",")
          (splits(0), splits(2), splits(3), splits(5), splits(6))
        })
      }).print(10000)


    ssc.start()
    ssc.awaitTermination()
    ssc.stop()

  }

}
