package com.air.antispider.stream.dataprocess

import com.air.antispider.stream.commom.bean._
import com.air.antispider.stream.commom.util.jedis.{JedisConnectionUtil, PropertiesUtil}
import com.air.antispider.stream.dataprocess.businessprocess._
import com.air.antispider.stream.dataprocess.constants.TravelTypeEnum.TravelTypeEnum
import kafka.serializer.StringDecoder
import org.apache.commons.lang3.StringUtils
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.JedisCluster

import scala.collection.mutable.ArrayBuffer

/**
  * 数据预处理的入口程序
  */
object DataProcessStreamingApp {

  def main(args: Array[String]): Unit = {
    //当应用被停止的时候，进行如下设置可以保证当前批次执行完之后再停止应用。
    System.setProperty("spark.streaming.stopGracefullyOnShutdown", "true")
    //创建streamContext对象
    val conf: SparkConf = new SparkConf()
      .setMaster("local[6]")
      .setAppName("DataProcessStreamingApp")
      //开启Spark监控功能
      .set("spark.metrics.conf.executor.source.jvm.class", "org.apache.spark.metrics.source.JvmSource")
    val ssc: StreamingContext = new StreamingContext(conf,Seconds(3))
    val sc: SparkContext = ssc.sparkContext
    sc.setLogLevel("WARN")

    //利用sc对象，从数据库中查询到的过滤规则放入广播变量中
    val filterList: ArrayBuffer[String] = QueryDBUtil.queryFilterData()
    @volatile var filterBroadcast: Broadcast[ArrayBuffer[String]] = sc.broadcast(filterList)

    //从数据库中查询url匹配打标签的规则，放到广播变量中
    val classifyRuleMap: Map[String, ArrayBuffer[String]] = QueryDBUtil.queryClassifyRuleData()
    @volatile var classifyRuleBroadcast: Broadcast[Map[String, ArrayBuffer[String]]] = sc.broadcast(classifyRuleMap)

    //获取查询的解析规则
    val queryAnalyzeRules: List[AnalyzeRule] = QueryDBUtil.queryAnalyzeRule(0)
    @volatile var queryAnalyzeBroadcast: Broadcast[List[AnalyzeRule]] = sc.broadcast(queryAnalyzeRules)
    //获取预定的解析规则
    val bookAnalyzeRules: List[AnalyzeRule] = QueryDBUtil.queryAnalyzeRule(1)
    @volatile var bookAnalyzeBroadcast: Broadcast[List[AnalyzeRule]] = sc.broadcast(bookAnalyzeRules)

    //从数据库中查询黑名单ip
    val blackList: ArrayBuffer[String] = QueryDBUtil.queryBlackListData()
    @volatile var blackListBroadcast: Broadcast[ArrayBuffer[String]] = sc.broadcast(blackList)




    //从kafka中消费数据
    //获取kafka集群的地址
    val brokerList: String = PropertiesUtil.getStringByKey("default.brokers", "kafkaConfig.properties")
    //从配置文件加载topic
    val topic: String = PropertiesUtil.getStringByKey("source.nginx.topic", "kafkaConfig.properties")
    var kafkaParams = Map[String,String]()
    kafkaParams += ("metadata.broker.list" -> brokerList)
    val topics: Set[String] = Set[String](topic)
    val inputStream: InputDStream[(String, String)] = KafkaUtils.createDirectStream[String,String,StringDecoder,StringDecoder](ssc,kafkaParams,topics)

    //对消费到的数据处理
    inputStream.map(_._2).foreachRDD(messageRDD =>{

      //每个批次处理前都要先判断过滤规则是否改变，若改变，需重置广播变量
      //获取jedis
      val jedis: JedisCluster = JedisConnectionUtil.getJedisCluster
      var filterFlag: String = jedis.get("FilterFlag")
      //判断是否第一次运行，若是，需给定初始值
      if (StringUtils.isBlank(filterFlag)) {
        filterFlag = "true"
        jedis.set("FilterFlag", filterFlag)
      }
      //判断标记是否为true，若是，更新广播变量中的数据
      if (filterFlag.toBoolean) {
        val newFilterList: ArrayBuffer[String] = QueryDBUtil.queryFilterData()
        //废除之前广播变量中的数据
        filterBroadcast.unpersist()
        //将新的数据放入广播变量
        filterBroadcast = sc.broadcast(newFilterList)
        //将标记重置为false
        filterFlag = "false"
        jedis.set("FilterFlag", filterFlag)
      }

      //更新classifyRuleBroadcast广播变量
      var classifyRuleFlag: String = jedis.get("ClassifyRuleFlag")
      if(StringUtils.isBlank(classifyRuleFlag)) {
        //第一次运行，设置一个初始值
        classifyRuleFlag = "true"
        jedis.set("ClassifyRuleFlag",classifyRuleFlag)
      }
      if (classifyRuleFlag.toBoolean) {
        //数据库中数据更改，需重设
        val newClassifyRuleMap: Map[String, ArrayBuffer[String]] = QueryDBUtil.queryClassifyRuleData()
        //废除原广播变量
        classifyRuleBroadcast.unpersist()
        //重设
        classifyRuleBroadcast = sc.broadcast(newClassifyRuleMap)
        //将标记置为false
        classifyRuleFlag = "false"
        jedis.set("ClassifyRuleFlag", classifyRuleFlag)
      }

      //更新解析规则的广播变量
      var analyzeBroadcastFlag: String = jedis.get("AnalyzeBroadcastFlag")
      if (StringUtils.isBlank(analyzeBroadcastFlag)) {
        //第一次运行
        analyzeBroadcastFlag = "true"
        jedis.set("AnalyzeBroadcastFlag", analyzeBroadcastFlag)
      }
      if (analyzeBroadcastFlag.toBoolean) {
        //废除之前广播变量中的数据
        queryAnalyzeBroadcast.unpersist()
        bookAnalyzeBroadcast.unpersist()

        //如果标记为true,数据需要更新
        val newQueryAnalyzeRules: List[AnalyzeRule] = QueryDBUtil.queryAnalyzeRule(0)
        val newBookAnalyzeRules: List[AnalyzeRule] = QueryDBUtil.queryAnalyzeRule(1)

        //将新的数据放入广播变量
        queryAnalyzeBroadcast = sc.broadcast(newQueryAnalyzeRules)
        bookAnalyzeBroadcast = sc.broadcast(newBookAnalyzeRules)
        //将标记重置为false
        analyzeBroadcastFlag = "false"
        jedis.set("AnalyzeBroadcastFlag", analyzeBroadcastFlag)
      }

      //更新黑名单数据
      var blackListFlag: String = jedis.get("BlackListFlag")
      if (StringUtils.isBlank(blackListFlag)) {
        blackListFlag = "true"
        jedis.set("BlackListFlag",blackListFlag)
      }
      if (blackListFlag.toBoolean) {
        //更新数据
        blackListBroadcast.unpersist()
        val newBlackListData: ArrayBuffer[String] = QueryDBUtil.queryBlackListData()
        blackListBroadcast = sc.broadcast(newBlackListData)
        //重置标记为false
        blackListFlag = "false"
        jedis.set("BlackListFlag",blackListFlag)
      }


      //链路统计
      val serversCountMap: collection.Map[String, Int] = LinkCount.linkCount(messageRDD)

      //对数据过滤，去除js，css等静态页面
      val filterRDD: RDD[String] = messageRDD.filter(message => {
        FilterURL.filter(message, filterBroadcast.value)
      })

      //数据转换操作，将数据转为结构计划数据
      val processedDataRDD: RDD[ProcessedData] = filterRDD.map(message => {
        //数据脱敏，对手机号和身份证号加密
        //1. 手机号
        val encryptedPhoneMessage: String = EncryptedMessage.encryptedPhone(message)
        //2. 身份证
        val encryptedIDMessage: String = EncryptedMessage.encryptedID(encryptedPhoneMessage)

        //数据切割
        val (request, requestMethod, contentType, requestBody, httpReferrer,
        remoteAddr, httpUserAgent, timeIso8601, serverAddr, cookiesStr,
        cookieValue_JSESSIONID, cookieValue_USERID) = DataSplit.dataSplit(encryptedIDMessage)

        //数据打标签，通过url与数据库中的url对应规则比较，得出属于国内国际，查询预订那个分类
        val requestType: RequestType = GenerateLabel.generateLabel(request, classifyRuleBroadcast.value)

        //打往返标签
        val travelType: TravelTypeEnum = TravelTypeLabel.traveTypeLabel(httpReferrer)

        //解析数据，不同的标签对应不同类型的请求，请求体格式不同，通过标签到数据库中查询对应请求体如何解析
        val queryRequestData: Option[QueryRequestData] = AnalyzeRequest.analyzeQueryRequest(requestType, requestMethod, contentType, request, requestBody, travelType, queryAnalyzeBroadcast.value)
        //根据标签进行预定的数据解析
        val bookRequestData: Option[BookRequestData] = AnalyzeBookRequest.analyzeBookRequest(requestType, requestMethod, contentType, request, requestBody, travelType, bookAnalyzeBroadcast.value)

        //数据加工，判断该条数据的ip是否为高频ip
        val highFrqIPGroup: Boolean = IpOperation.isFreIP(remoteAddr, blackListBroadcast.value)

        //结构化数据，将前面求得的数据封装为一个对象
         ConstructorData.getProcessedData(requestMethod, request, remoteAddr, httpUserAgent,
          timeIso8601, serverAddr, highFrqIPGroup, requestType, travelType, cookieValue_JSESSIONID,
           cookieValue_USERID, queryRequestData, bookRequestData, httpReferrer)
      })

      //将结构化数据发送到kafka，根据不同请求类型，发送到不同的topic
      //发送查询信息到查询的topic
      SendData.sendQuery(processedDataRDD)
      //发送预定信息到预定的topic
      SendData.sendBook(processedDataRDD)
      //spark的任务监控
      SparkStreamingMonitor.streamMonitor(sc,processedDataRDD,serversCountMap)

    })


    //启动程序
    ssc.start()
    //保持运行状态
    ssc.awaitTermination()
  }


}
