package hz



import scala.collection.mutable.ArrayBuffer
import kafka.serializer.StringDecoder
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.{Duration, Seconds, StreamingContext}
import org.apache.spark.streaming.kafka.KafkaUtils
import org.elasticsearch.spark._


/**
  * Created by hz on 2016/11/14.
  */

object Kafkauu {
  def main(args:Array[String]) {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val sparkConf = new SparkConf().setMaster("spark://hz:7077").setAppName("test")
    sparkConf.set("es.nodes", "hz")
    sparkConf.set("es.port", "9200")
    sparkConf.set("es.index.auto.create", "true")

    val scc = new StreamingContext(sparkConf, Duration(5000)) //batch 时间为5s一次
    // System.setProperty("hadoop.home.dir", "F:\\hadoop-common-2.2.0-bin-master")
    // scc.checkpoint(".") // 因为使用到了updateStateByKey,所以必须要设置checkpoint
    val topics = Set("test") //我们需要消费的kafka数据的topic
    val kafkaParam = Map(
        "metadata.broker.list" -> "hz:9092" // kafka的broker list地址
      )
    val stream: InputDStream[(String, String)] = createStream(scc, kafkaParam, topics)
    val lei = stream.map(_._2) // 取出value
    lei.persist()
    val rule = DBUtil.getlist(); //得到规则
    if (rule.size() == 0) {
      println("====zheshikongde ")
      lei.print()
    } else {
    for (i <- 0 until rule.size()) {
      println("==================")
      lei.print()
      var rule1 = rule.get(i)
      println(rule1.getId)
      val shijian = rule1.getTimerange
      //val h = lei.window(Seconds(rule1.getTimerange * 60), Seconds(5))
      val h = lei.window(Seconds(rule1.getTimerange), Seconds(5))
      val daxiao=zhixing(h, rule1)
      daxiao.foreachRDD(pricesRDD=>{
        val x= pricesRDD.count

        println(pricesRDD.count()+"这是count")
        println("==============zheshies")
        if (pricesRDD.count() > 0)  // RDD has data
        {
          pricesRDD.saveToEs("baojing/guizefenlei")    //保存到elasticsearch中
        }
      })
    }
  }


    scc.start() // 真正启动程序
    scc.awaitTermination() //阻塞等待
  }



  def zhixing(lei:DStream[String],rule:RuleBean):DStream[Map[String,String]]={

    var rizhi1:DStream[Map[String,String]]=null

      println("这是第"+rule.getId+"")

      val rizhi = lei //.window(Seconds(10),Seconds(5))

      //val rizhi = lei.window(Seconds(10),Seconds(5))     .map(_.split(" "))
      // val rizhi = lei.map(x=>()).reduceByWindow(,Seconds(10),Seconds(5))

      rizhi1=tiaojian(rizhi,rule) //条件过滤

      paixu1(rizhi1,rule)  //进行排序

    rizhi1
  }


  /**
    * 进行条件匹配
    *
    * @param rizhi
    * @param rule1
    * @return
    */
  def tiaojian(rizhi:DStream[String],rule1:RuleBean):DStream[Map[String,String]]={
  var rizhi1 = rizhi.map(_.split(" ")) // 将字符串使用空格分隔
    .map(x => denghao(x)) //去等号
    .map(line => jiexi(line, rule1.getId)) //匹配map

  val size=rule1.getTiaojian().size()  //条件大小
  val tiaojian= rule1.getTiaojian();   //一个规则多条件去过滤
  for (i <- 0 until size) {
    val ziduan=tiaojian.get(i).getZiduan
    val zhi=tiaojian.get(i).getZhi
    tiaojian.get(i).getCaozuofu match {
      case "=" =>rizhi1 = rizhi1.map(x => (x, x(ziduan))).filter(_._2 == zhi).map(x => x._1)
      case "<" =>rizhi1 = rizhi1.map(x => (x, x(ziduan))).filter(_._2 < zhi).map(x => x._1)
      case ">" =>rizhi1 = rizhi1.map(x => (x, x(ziduan))).filter(_._2 > zhi).map(x => x._1)
      case "<=" =>rizhi1 = rizhi1.map(x => (x, x(ziduan))).filter(_._2 <= zhi).map(x => x._1)
      case ">=" =>rizhi1 = rizhi1.map(x => (x, x(ziduan))).filter(_._2 >= zhi).map(x => x._1)
      case "!=" =>rizhi1 = rizhi1.map(x => (x, x(ziduan))).filter(_._2 != zhi).map(x => x._1)
      case "Contains" =>rizhi1 = rizhi1.map(x => (x, x(ziduan))).filter(_._2.contains(zhi)).map(x => x._1)
      case "StartWith" =>rizhi1 = rizhi1.map(x => (x, x(ziduan))).filter(_._2.startsWith(zhi)).map(x => x._1)
      case "EndWith" =>rizhi1 = rizhi1.map(x => (x, x(ziduan))).filter(_._2.endsWith(zhi)).map(x => x._1)
    }
  }
    rizhi1
}


  def denghao(list:Array[String]):Array[String]={
    var tmp=""
    var ab=ArrayBuffer[String]()
    val length=list.length
      for(i <- 0 until length){
        if(!list(length-1-i).contains("=")){
          tmp=list(length-1-i)+tmp
        }else{
          ab+=(list(length-1-i)+tmp)
          tmp=""
        }
      }
    val rab=ab.toArray
    return rab
  }


  /**
    * 进行排序
    *
    * @param list
    * @param rule
    */
  def paixu1(list:DStream[Map[String,String]],rule:RuleBean):DStream[Map[String,String]]={
    println("==进入排序==")
    var hh=list
    hh.cache()

    /**
      * 相同聚合时    tiaojian1="src" fuhao="=" zhi="12134"
      *
      * @param hello
      * @return
      */
    def zuhea(hello:Map[String,String]):String={
      val list=rule.getSamepol.split(",")
      var rs:String=""
      for (i <- list){
        rs+=hello(i)
      }
      rs
    }

    /**
      * 不同时聚合
      *
      * @param hello
      * @return
      */
    def zuheb(hello:Map[String,String]):String={
      val list=rule.getDiffpol.split(",")
      var rs:String=""
      for (i <- list){
        rs+=hello(i)
      }
      rs
    }




    var gv1:DStream[Iterable[Map[String,String]]]=null

    gv1=hh.map(x=>(zuhea(x),x)).groupByKey().map(_._2)      //相同聚合
    val size=rule.getNumber

    val daxiao:DStream[Map[String,String]]= gv1.map(x=>(x,x.map(y=>zuheb(y)).toArray.distinct.size)).filter(_._2>=size).flatMap(x=>x._1.map(y=>y))  //不同时聚合




    /*daxiao.foreachRDD(pricesRDD=>{
      val x= pricesRDD.count

    println(pricesRDD.count()+"这是count")
    println("==============zheshies")
      if (pricesRDD.count() > 0)  // RDD has data
      {
        pricesRDD.saveToEs("baojing/guizefenlei")    //保存到elasticsearch中
        SpringUtil.getInstance().setbaojing()           //改变报警静态变量
        val ip:String=pricesRDD.take(1).apply(0)("dst")
        val id=SpringUtil.getInstance().getasset(ip)    //通过日志目标（dst）ip 取得资产 id

        val warn=new WarningBean();
        warn.setWarnname(rule.getRulename)
        warn.setAssetid(id)
        warn.setStatus("未清除")
        warn.setType("事件告警")
        warn.setRuntime((new java.util.Date().getTime-rule.getMktime.getTime).toInt)
        warn.setRuleid(rule.getId)
        SpringUtil.getInstance().savebaojing(warn);
        daxiao.count()
      }
    })*/
    daxiao
  }





  /**
    * 创建一个从kafka获取数据的流.
    *
    * @param scc           spark streaming上下文
    * @param kafkaParam    kafka相关配置
    * @param topics        需要消费的topic集合
    * @return
    */
  def createStream(scc: StreamingContext, kafkaParam: Map[String, String], topics: Set[String]) = {
    KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](scc, kafkaParam, topics)
  }

  /**
    * 将字符串解析为Map
    *
    * @param map
    * @param guize1
    * @return
    */
  def jiexi(map:Array[String],guize1:Int):Map[String,String]={
    var map1:Map[String,String]=Map();
    var guize:Int=0;
    for (i <- map.indices){
      var cache1,cache2=""
      cache1=map(i).split("=")(0)
      cache2=map(i).split("=")(1)
      map1+=(cache1 -> cache2.replaceAll("\"",""))
    }
    map1+=("guize"->guize1.toString)
    map1
    }
}





