package com.gitee.filter

import com.alibaba.fastjson.JSON
import com.gitee.etl.bean.WideLog
import com.gitee.filter.bean.BeanFilter
import com.gitee.utils.GlobalConfigUtil
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment,_}
import org.apache.flink.streaming.api.windowing.time.Time

/**
  * 功能: 识别5秒内的ip相同数量超过10的
  *   1. 获取数据
  *   2. 获取ip,1
  *   3. 对数据进行窗口聚合
  *   4. 过滤出聚合后大于10的ip
  *   5. 将这些ip发送给mysql
  * @param envs
  */
class IPSum(envs:StreamExecutionEnvironment) extends BeanFilter(envs){
  /**
    * 爬虫识别程序的接口
    */
  override def recognize(): Unit = {

    //1. 获取数据
    val wideLogJson: DataStream[String] = getKafkaDataStream(GlobalConfigUtil.`output.topic.dwd_log`)

    //2. 获取ip,1
    val ipOne: DataStream[(String, Int)] = wideLogJson.map(logJson => {
      val wideLog: WideLog = JSON.parseObject[WideLog](logJson, classOf[WideLog])
      (wideLog.ip, 1)
    })

    //3. 对数据进行窗口聚合
    val ipSum: DataStream[(String, Int)] = ipOne.keyBy(0).timeWindow(Time.seconds(5)).sum(1)

    //4. 过滤出聚合后大于10的ip
    val timeStrat: Long = System.currentTimeMillis()
    val result: DataStream[(String, Long)] = ipSum.filter(_._2 >= 10).map(x=>(x._1,timeStrat))

    result.print()
    //5. 将这些ip发送给mysql
    result.addSink(sendMysql())
  }
}
