package org.project.top

import java.sql.Timestamp
import java.text.SimpleDateFormat

import org.FlinkStreamApp
import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.api.common.state.ListStateDescriptor
import org.apache.flink.api.scala.typeutils.Types
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.streaming.api.scala.function.ProcessWindowFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector
import org.project.bean.{ApacheLogEvent, UrlViewCount}

import scala.collection.mutable.ListBuffer

/**
 * description ：网站 url 访问统计 topn
 * author      ：剧情再美终是戏
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/2/25 13:46
 * modified By ：
 * version:    : 1.0
 */
object UrlTop extends FlinkStreamApp {
  override def doSomeThing(environment: StreamExecutionEnvironment) = {
    environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
    // 创建数据源
    val source = environment.readTextFile("F:\\Word\\idea\\bigdata\\Flink0830s\\src\\main\\resources\\apachetest.log")

    // 操作 1 分钟内 访问网站 url top3 计算
    source
      .map(line => {
        val splits = line.split("\\s")
        // 把时间戳ETL成毫秒
        val format = new SimpleDateFormat("dd/MM/yyyy:HH:mm:ss")
        ApacheLogEvent(splits(0), splits(2), format.parse(splits(3)).getTime, splits(5), splits(6))
      })
      .assignAscendingTimestamps(_.eventTime)
      .keyBy(_.url)
      .timeWindow(Time.seconds(60), Time.seconds(5))
      .aggregate(new MyCountAgg, new MyProcessResult)
      .keyBy(_.windowEnd)
      .process(new MyProcessTopN(3))
      .print

  }

  class MyProcessTopN(n: Int) extends KeyedProcessFunction[Long, UrlViewCount, String] {
    lazy val itmes = getRuntimeContext.getListState(
      new ListStateDescriptor[UrlViewCount]("items", Types.of[UrlViewCount])
    )

    override def processElement(value: UrlViewCount, ctx: KeyedProcessFunction[Long, UrlViewCount, String]#Context, out: Collector[String]) = {
      itmes.add(value)
      ctx.timerService().registerEventTimeTimer(value.windowEnd + 1)
    }

    override def onTimer(timestamp: Long, ctx: KeyedProcessFunction[Long, UrlViewCount, String]#OnTimerContext, out: Collector[String]) = {
      var list = ListBuffer[UrlViewCount]()
      import scala.collection.JavaConversions._
      for (it <- itmes.get()) {
        list += it
      }

      // 排序
      val sortedItems = list.sortBy(-_.count).take(n)

      // 将 topn 结果写出
      val result = new StringBuilder
      result.append("====================================\n")
      result.append("时间: ").append(new Timestamp(timestamp - 1)).append("\n")
      for (i <- sortedItems.indices) {
        val currentItem = sortedItems(i)
        result.append("No")
          .append(i + 1)
          .append(":")
          .append("  URL=")
          .append(currentItem.url)
          .append("  浏览量=")
          .append(currentItem.count)
          .append("\n")
      }
      result.append("====================================\n\n")
      Thread.sleep(1000)
      out.collect(result.toString())
    }
  }

  class MyProcessResult extends ProcessWindowFunction[Long, UrlViewCount, String, TimeWindow] {
    override def process(key: String, context: Context, elements: Iterable[Long], out: Collector[UrlViewCount]) = {
      out.collect(UrlViewCount(key, context.window.getEnd, elements.toIterator.next()))
    }
  }

  class MyCountAgg extends AggregateFunction[ApacheLogEvent, Long, Long] {
    override def createAccumulator() = 0

    override def add(value: ApacheLogEvent, accumulator: Long) = accumulator + 1

    override def getResult(accumulator: Long) = accumulator

    override def merge(a: Long, b: Long) = a + b
  }

}
