package com.sunzm.flink.networkflow

import java.time.Duration
import java.util

import com.sunzm.flink.networkflow.bean.{ApacheLogEvent, UrlViewCount}
import org.apache.commons.lang3.StringUtils
import org.apache.commons.lang3.time.{DateUtils, FastDateFormat}
import org.apache.flink.api.common.eventtime.{SerializableTimestampAssigner, WatermarkStrategy}
import org.apache.flink.api.common.functions.{AggregateFunction, FlatMapFunction}
import org.apache.flink.api.common.state.{ListState, ListStateDescriptor}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector
import org.slf4j.{Logger, LoggerFactory}

import scala.collection.mutable.ArrayBuffer

/**
 * 纯DataStream API每隔5秒实时统计过去10分钟的排名前3名的页面访问量
 */
object StreamHotPagesJob {
  private val logger: Logger = LoggerFactory.getLogger(this.getClass.getName.stripSuffix("$"))
  private val isLocal = true

  def main(args: Array[String]): Unit = {

    //1.创建执行的环境
    val env: StreamExecutionEnvironment = if (isLocal) {
      StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
    } else {
      StreamExecutionEnvironment.getExecutionEnvironment
    }


    //为了便于观察，并行度设置为1，实际生产环境不能设置成1
    env.setParallelism(1)
    //读取数据
    //val dataStream: DataStream[String] = env.socketTextStream("82.156.210.70", 9999)
    //读取文件
    val dataStream: DataStream[String] = env.readTextFile("data/flink/case-demo/apache.log")


    //创建一个Watermark生成策略对象，并提取事件时间
    val watermarkStrategy = WatermarkStrategy
      //数据存在乱序，WaterMark设置乱序程度为2秒
      .forBoundedOutOfOrderness[ApacheLogEvent](Duration.ofSeconds(2))
      .withTimestampAssigner(new SerializableTimestampAssigner[ApacheLogEvent] {
        override def extractTimestamp(element: ApacheLogEvent, recordTimestamp: Long): Long = {
          element.eventTime
        }
      })

    val mapedDataStream: DataStream[ApacheLogEvent] = dataStream.flatMap(new FlatMapFunction[String, ApacheLogEvent] {
      override def flatMap(line: String, out: Collector[ApacheLogEvent]): Unit = {

        if (StringUtils.isNotBlank(line)) {

          val fields = StringUtils.splitByWholeSeparatorPreserveAllTokens(line, " ")

          if (fields.length >= 7) {
            val ip = fields(0)
            //这里userId没有用到
            //val userId = fields(1).toLong
            val userId = 20210701001L
            val eventTimeStr = fields(3)
            val method = fields(5)
            val url = fields(6)

            //17/05/2015:10:05:03
            val eventTime = DateUtils.parseDate(eventTimeStr, "dd/MM/yyyy:HH:mm:ss").getTime

            if (StringUtils.equals("GET", method.toUpperCase.trim)) {
              //只输出访问类型为 GET 的数据
              out.collect(ApacheLogEvent(ip, userId, eventTime, method, url))
            }
          }

        }
      }
    })
      // 指定WaterMark生成策略，以及提取事件时间
      .assignTimestampsAndWatermarks(watermarkStrategy)

    val lateOutputTag: OutputTag[ApacheLogEvent] = new OutputTag[ApacheLogEvent]("lateData")

    //每隔5秒统计过去10分钟每个URL的访问次数
    val windowAggStream: DataStream[UrlViewCount] = mapedDataStream
      //按照页面url分组，因为要统计每个url的访问次数
      .keyBy(_.url)
      //开一个大小为10分钟，滑动步长为5秒，基于事件时间的滑动窗口
      .window(SlidingEventTimeWindows.of(Time.minutes(10), Time.seconds(5)))
      //允许数据延迟一分钟到达，watermark到达窗口关闭时间触发一次计算，但是并不关闭窗口
      //如果一分钟之内还有属于这个窗口的数据到达，来一条处理一条
      //如果一分钟之内还没有到达窗口就真正关闭，再来的数据就会被丢弃
      .allowedLateness(Time.minutes(1))
      //如果一分钟之内还没有到达的数据，就不能被窗口处理了，但是可以把这些数据输出到侧输出流（旁路输出）
      .sideOutputLateData(lateOutputTag)
      //这种是增量聚合，来一条数据就聚合一次，最后窗口关闭的时候，再整体聚合一次输出
      .aggregate(new PageCountAgg, new PageCountResult)

    //windowAggStream.print()

    //统计每个窗口的浏览量排名前3名的url
    val topNDS: DataStream[String] = windowAggStream
      //按照窗口结束时间分组（如果不按照窗口分组，多个窗口的数据可能会混在一起）
      .keyBy(_.windowEnd)
      //process中去进行排序，取TopN
      .process(new TopNHotUrls)

    topNDS.print("主流数据")

    topNDS.getSideOutput(lateOutputTag).print("延迟数据")

    //5.执行
    env.execute(this.getClass.getSimpleName.stripSuffix("$"))


  }

  /**
   * 实现求分组TopN的 KeyedProcessFunction
   */
  private class TopNHotUrls extends KeyedProcessFunction[String, UrlViewCount, String] {
    // 定义状态，所有UrlViewCount的List
    private var urlViewCountListState: ListState[UrlViewCount] = _

    override def open(parameters: Configuration): Unit = {
      //初始化状态，只执行一次
      urlViewCountListState = getRuntimeContext.getListState[UrlViewCount](
        new ListStateDescriptor[UrlViewCount]("urlViewCountListState", classOf[UrlViewCount])
      )
    }

    override def processElement(value: UrlViewCount,
                                ctx: KeyedProcessFunction[String, UrlViewCount, String]#Context,
                                out: Collector[String]): Unit = {

      urlViewCountListState.add(value)

      //什么时候进行排序，什么时候输出？
      //每处理一条数据就排序，合适吗？
      //以窗口结束时间+1，注册一个定时器
      val windowEndStr = value.windowEnd
      val windowEnd: Long = DateUtils.parseDate(windowEndStr, "yyyy-MM-dd HH:mm:ss").getTime
      ctx.timerService().registerEventTimeTimer(windowEnd + 1L)
    }

    override def onTimer(timestamp: Long,
                         ctx: KeyedProcessFunction[String, UrlViewCount, String]#OnTimerContext,
                         out: Collector[String]): Unit = {

      //定时器触发，开始排序，并输出结果
      val urlViewCounts: util.Iterator[UrlViewCount] = urlViewCountListState.get().iterator()

      //构建一个空的可变数组
      val buffer = ArrayBuffer.empty[UrlViewCount]

      //把状态中的数据全部放入这个数组中，以便进行排序
      while (urlViewCounts.hasNext) {
        buffer.append(urlViewCounts.next())
      }

      //排序,返回一个布尔类型，表示前面的元素是否小于后面的元素（正序），如果需要倒序，顺序调整一下就可以了
      val top3ViewCount: ArrayBuffer[UrlViewCount] = buffer.sortWith((item1, item2) => {
        //正序
        //if(item1.viewCount < item2.viewCount) true else false
        //倒序
        if (item2.viewCount < item1.viewCount) true else false
      }).take(3)

      //因为都是同一个窗口的数据，所有我们随便取一条，来获取窗口时间
      val head: UrlViewCount = top3ViewCount.head

      val outputStr =
        s"""
           |=============================================================
           |
           |窗口开始时间:${head.windowStart}, 窗口结束时间: ${head.windowEnd}, 前3名数据详情如下:
           |
           |${top3ViewCount.mkString(System.lineSeparator())}

           |===================================================

           |""".stripMargin

      //输出结果
      out.collect(outputStr)

      //清空状态数据
      urlViewCountListState.clear()
    }

    override def close(): Unit = super.close()
  }


  /**
   * 自定义预聚合函数类，每来一个数据就count加1
   */
  private class PageCountAgg extends AggregateFunction[ApacheLogEvent, Int, Int] {
    //创建一个用于保存中间聚合结果的累加器，这里进行初始化
    override def createAccumulator(): Int = 0

    //来一条数据，就应该+1
    override def add(value: ApacheLogEvent, accumulator: Int): Int = {
      accumulator + 1
    }

    //获取结果
    override def getResult(accumulator: Int): Int = accumulator
    //合并
    override def merge(a: Int, b: Int): Int = a + b
  }


  /**
   * 窗口函数
   */
  private class PageCountResult extends WindowFunction[Int, UrlViewCount, String, TimeWindow] {
    override def apply(key: String, window: TimeWindow,
                       input: Iterable[Int],
                       out: Collector[UrlViewCount]): Unit = {

      //url
      val url = key

      //获取点击数量
      /*var count = 0
      input.foreach(ct => {
        count += ct
      })*/
      //因为我们做过预聚合，这里得到的数据只有一条
      val count = input.iterator.next()

      //获取窗口开始时间和结束时间
      val windowStart = window.getStart
      val windowEnd = window.getEnd

      //时间格式化
      val fastDateFormat = FastDateFormat.getInstance("yyyy-MM-dd HH:mm:ss")

      val windowStartStr = fastDateFormat.format(windowStart)
      val windowEndStr = fastDateFormat.format(windowEnd)

      //输出结果
      out.collect(UrlViewCount(windowStartStr, windowEndStr,url, count))
    }
  }

}
