package com.yanggu.networkflow_analysis

import cn.hutool.core.date.DateUtil
import org.apache.flink.api.common.eventtime.{SerializableTimestampAssigner, WatermarkStrategy}
import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.api.common.state.MapStateDescriptor
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.ProcessWindowFunction
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector

import java.time.Duration
import java.util.Date
import java.util.concurrent.TimeUnit
import scala.collection.mutable.ListBuffer

/**
 * 每隔5秒钟, 统计每个url过去一小时的访问流量
 */
object NetworkFlow {

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    //正则表达式
    val pattern = "^((?!\\.(css|js|jpg|png|ico)$).)*$".r
    val dataStream = env
      //.readTextFile(getClass.getResource("/apache.log").getPath)
      .socketTextStream("localhost", 9000)
      .flatMap(fun = (line, out: Collector[ApacheLogEvent]) => {
        val arr = line.split(" ")
        //这里只统计GET请求和这里使用正则表达式将静态资源请求过滤掉
        //if (arr(5) == "GET" && pattern.findFirstIn(arr(6)).nonEmpty) {
        out.collect(ApacheLogEvent(arr(0), arr(1), DateUtil.parse(arr(3), "dd/MM/yyyy:HH:mm:ss").getTime, arr(5), arr(6)))
        //}
      })
      //设置EventTime数据源和Watermark生成策略
      .assignTimestampsAndWatermarks(WatermarkStrategy
        //设置1秒钟的延迟
        .forBoundedOutOfOrderness(Duration.ofSeconds(1L))
        .withTimestampAssigner(new SerializableTimestampAssigner[ApacheLogEvent]() {
          override def extractTimestamp(element: ApacheLogEvent, recordTimestamp: Long): Long = element.eventTime
        }).withIdleness(Duration.ofSeconds(10L)))

    val apacheLogEventOutputTag = new OutputTag[ApacheLogEvent]("ApacheLogEventOutputTag")
    val agg = dataStream
      //根据url进行分组
      .keyBy(_.url)
      //设置滑动事件时间窗口, 窗口大小10分钟, 滑动步长5秒钟
      //窗口设置1s的延迟进行计算
      .window(SlidingEventTimeWindows.of(Time.minutes(10L), Time.seconds(5L)))
      //将窗口延迟1min + 1s关闭
      .allowedLateness(Time.minutes(1L))
      //将迟到且对应的窗口都已经关闭的数据扔到侧输出流中
      .sideOutputLateData(apacheLogEventOutputTag)
      //执行增量聚合操作
      .aggregate(new PageCountAgg, new PageViewCountWindowResult)

    //打印数据流
    dataStream.print("data")

    //打印聚合流
    agg.print("agg")

    agg
      //根据窗口的结束时间进行分组
      .keyBy(_.windowEnd)
      .process(new TopNHotPages)
      //打印topN的结果
      .print("result")

    //打印迟到数据的侧输出流进行打印
    agg.getSideOutput(apacheLogEventOutputTag).print("late")

    //执行Job
    env.execute("NetworkFlow Job")
  }

}

/**
 * ApacheLogEvent样例类
 *
 * @param ip        客户端ip
 * @param userId    用户id
 * @param eventTime 事件事件
 * @param method    请求方式
 * @param url       请求的url
 */
case class ApacheLogEvent(ip: String, userId: String, eventTime: Long, method: String, url: String)

/**
 * 窗口输出的样例类
 *
 * @param url       访问的url
 * @param windowEnd 窗口结束时间
 * @param count     访问次数
 */
case class UrlViewCount(url: String, windowEnd: Long, count: Long)

/**
 * 基于中间计算结果状态进行增量聚合的函数, 不需要缓存原始数据
 *
 * @tparam IN  ApacheLogEvent 输入的数据
 * @tparam ACC Long 中间聚合数据
 * @tparam OUT Long 聚合结束输出的数据
 */
class PageCountAgg extends AggregateFunction[ApacheLogEvent, Long, Long] {

  override def createAccumulator(): Long = 0L

  override def add(value: ApacheLogEvent, accumulator: Long): Long = accumulator + 1L

  override def getResult(accumulator: Long): Long = accumulator

  override def merge(a: Long, b: Long): Long = a + b

}

/**
 * @tparam IN  Long 增量聚合后的输出结果
 * @tparam OUT UrlViewCount 输出的数据类型
 * @tparam KEY String 分组的类型
 * @tparam W   TimeWindow 窗口的类型
 */
class PageViewCountWindowResult extends ProcessWindowFunction[Long, UrlViewCount, String, TimeWindow] {

  override def process(key: String, context: Context, elements: Iterable[Long], out: Collector[UrlViewCount]): Unit = {
    out.collect(UrlViewCount(key, context.window.getEnd, elements.head))
  }

}

/**
 * @tparam K Long 分组的key, 窗口的结束时间
 * @tparam I UrlViewCount 输入的数据
 * @tparam O String 输出的数据
 */
class TopNHotPages extends KeyedProcessFunction[Long, UrlViewCount, String] {

  private lazy val mapState = getRuntimeContext.getMapState(
    new MapStateDescriptor[String, UrlViewCount]("mapState", classOf[String], classOf[UrlViewCount]))

  override def processElement(value: UrlViewCount,
                              ctx: KeyedProcessFunction[Long, UrlViewCount, String]#Context,
                              out: Collector[String]): Unit = {

    //放入到MapState中
    mapState.put(value.url, value)

    val timerService = ctx.timerService()
    val windowEnd = ctx.getCurrentKey

    //事件时间定时器的执行逻辑, 只有当Watermark更新时, 才会去执行定时器中的任务
    //注册定时器, 输出聚合结果
    timerService.registerEventTimeTimer(windowEnd + Time.seconds(1L).toMilliseconds)

    //注册定时器, 在窗口关闭时windowEnd + 1s + 1min清空状态
    timerService.registerEventTimeTimer(windowEnd + Time.seconds(1L).toMilliseconds + Time.minutes(1L).toMilliseconds)
  }

  override def onTimer(timestamp: Long,
                       ctx: KeyedProcessFunction[Long, UrlViewCount, String]#OnTimerContext,
                       out: Collector[String]): Unit = {

    //当allowedLateness时间到达时, 窗口已经关闭, 这时候清空状态
    if (timestamp == ctx.getCurrentKey + Time.seconds(1L).toMilliseconds + +Time.minutes(1L).toMilliseconds) {
      mapState.clear()
      //清空状态之后, 直接return, 该窗口计算完全结束
      return
    }

    val iter = mapState.values().iterator()

    var listBuffer = ListBuffer[UrlViewCount]()
    while (iter.hasNext) {
      listBuffer += iter.next()
    }

    //这里不能清空状态, 因为窗口延迟一会才关闭

    //这里使用sortBy进行排序, 使用隐式参数
    listBuffer = listBuffer.sortBy(data => (-data.count, data.url)).take(5)

    val builder = new StringBuilder()
    val windowEnd = ctx.getCurrentKey
    builder.append(s"窗口开始时间: ${DateUtil.formatDateTime(new Date(windowEnd - 10 * 60 * 1000L))}, " +
      s"窗口结束时间: ${DateUtil.formatDateTime(new Date(windowEnd))}").append("\r\n")

    for (index <- listBuffer.indices) {
      val data = listBuffer(index)
      builder.append(s"排名: ${index + 1}, 页面url: ${data.url}, 访问次数: ${data.count}").append("\r\n")
    }

    out.collect(builder.toString())

    TimeUnit.SECONDS.sleep(1L)
  }

}
