package com.air.antispider.stream.dataprocess

import com.air.antispider.stream.common.util.jedis.{JedisConnectionUtil, PropertiesUtil}
import com.air.antispider.stream.dataprocess.constants.businessProcess.FilterURL.filterRule
import com.air.antispider.stream.dataprocess.constants.businessProcess.{AnalyzeRuleDB, BusinessProcess, FilterURL}
import kafka.serializer.StringDecoder
import org.apache.commons.lang3.StringUtils
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import redis.clients.jedis.{Jedis, JedisCluster}

import scala.collection.mutable.ArrayBuffer

/**
  * 数据预处理的主程序
  */
object DataProcessApp {
  def main(args: Array[String]): Unit = {
    // 先创建执行环境
    val conf: SparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("DataProcessApp")

    val sc: SparkContext = new SparkContext(conf)
    //去mysql中查询过滤规则
    val filterRuleList: ArrayBuffer[String] = AnalyzeRuleDB.queryFilterRule()
    //允许多线程安全的更新
    @volatile
    var filterRuleBroadcast: Broadcast[ArrayBuffer[String]] = sc.broadcast(filterRuleList)


    val ssc: StreamingContext = new StreamingContext(sc, Seconds(2))
    //kafka集群地址信息
    val brokerList: String = PropertiesUtil.getStringByKey("default.brokers", "kafkaConfig.properties")
    //topic信息
    val topic: String = PropertiesUtil.getStringByKey("source.nginx.topic", "kafkaConfig.properties")
    //定义Kafka连接参数
    val kafkaParams = Map[String, String](
      "metadata.broker.list" -> brokerList
    )
    //topic信息
    val topics: Set[String] = Set[String](topic)

    //从Kafka中获取消息, StringDecoder因为Kafka使用java默认的序列化效率不高,所以Kafka自己捣鼓了一套序列化机制,这里就使用Kafka的序列化
    val sourceStream: InputDStream[(String, String)] = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topics)
    //打印数据
    // sourceStream.print()
    //取出原始的message信息
    val sourceMessage: DStream[String] = sourceStream.map(_._2)
    //创建jedis连接val jedisCluster: Unit = JedisConnectionUtil.createJedisCluster()
    val jedis: JedisCluster = JedisConnectionUtil.getJedisCluster()


    sourceMessage.foreachRDD(messageRDD => {
      var filterChangeFlag: String = jedis.get("FilterChangeFlag")
      if (StringUtils.isBlank(filterChangeFlag)) {
        //如果取出来的数据为空,那么就先创建
        filterChangeFlag = "true"
        jedis.set("FilterChangeFlag", filterChangeFlag)
      }
      if (filterChangeFlag.toBoolean) {
        //需要更新广播变量
        //1. 先废除之前的广播变量
        filterRuleBroadcast.unpersist()
        //2. 查询最新的数据
        val filterRuleList: ArrayBuffer[String] = AnalyzeRuleDB.queryFilterRule()
        //3. 放入广播变量
        filterRuleBroadcast = sc.broadcast(filterRuleList)
        //4. 更新redis
        filterChangeFlag = "false"
        jedis.set("FilterChangeFlag", filterChangeFlag)


      }

     // val bool: Boolean = FilterURL.filterRule(messageRDD,filterRuleList)


      //链路统计功能
      BusinessProcess.linkCount(messageRDD)
      //进行数据过滤操作
      val filterRDD: RDD[String] = messageRDD.filter(message => FilterURL.filterRule(message, filterRuleBroadcast.value))

       messageRDD.foreach(println)
    })
    //启动程序
    ssc.start()
    //让程序一直运行
    ssc.awaitTermination()
  }
}