package com.xcc.flink.hot

import java.lang
import java.text.SimpleDateFormat
import java.util.Date

import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.api.common.state.{ListState, ListStateDescriptor}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.ProcessWindowFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector


//83.149.9.216 - - 17/05/2015:10:05:03 +0000 GET /presentations/logstash-monitorama-2013/images/kibana-search.png
case class ApacheLogEvent(ip: String, userId: String, eventTime: Long, method: String, url: String)

case class LogViewCount(url: String, windowEnd: Long, count: Long)

//实时流量统计
object Demo02_RealtimeNetTopN {

  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    //获取对应的文本数据转化为bean对象，并定义水位线
    val logDStream: DataStream[ApacheLogEvent] = env.readTextFile(getClass.getResource("/apache.log").getPath)
      .map {
        item => {
          val arr: Array[String] = item.split(" ")
          val format = new SimpleDateFormat("dd/MM/yyyy:HH:mm:ss")
          val date: Date = format.parse(arr(3).trim)
          ApacheLogEvent(arr(0).trim, arr(1).trim, date.getTime, arr(5).trim, arr(6).trim)
        }
      }
      .filter( //过滤
        data => {
          val pattern = "^((?!\\.(ico|css|js)$).)*$".r
          (pattern findFirstIn data.url).nonEmpty
        })
      .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor[ApacheLogEvent](Time.seconds(1)) { //定义水位线
        override def extractTimestamp(element: ApacheLogEvent) = element.eventTime
      })

    //划分时间窗口并定义对应的聚合
    logDStream.keyBy(_.url)
      .timeWindow(Time.minutes(15), Time.minutes(5))
      .allowedLateness(Time.seconds(5)) //允许缓存延迟
      .aggregate(new MyLogCountAgg, new MyLogViewAgg)
      .keyBy(_.windowEnd)
      .process(new MyLogProcess(5))


    env.execute()
  }

}

//< IN, ACC, OUT > 输出为对应的值
class MyLogCountAgg extends AggregateFunction[ApacheLogEvent, Long, Long] {
  override def createAccumulator(): Long = 0L

  override def add(value: ApacheLogEvent, accumulator: Long): Long = accumulator + 1

  override def getResult(accumulator: Long): Long = accumulator

  override def merge(a: Long, b: Long): Long = a + b
}

//[IN, OUT, KEY, W <: Window]
class MyLogViewAgg extends ProcessWindowFunction[Long, LogViewCount, String, TimeWindow] {

  override def process(key: String, context: Context, elements: Iterable[Long], out: Collector[LogViewCount]): Unit = {
    out.collect(LogViewCount(key, context.window.getEnd, elements.iterator.next()))
  }

}

//<K, I, O>
case class MyLogProcess(topN: Int) extends KeyedProcessFunction[Long, LogViewCount, String] {

  var listState: ListState[LogViewCount] = _

  override def open(parameters: Configuration): Unit = {
    listState = getRuntimeContext.getListState[LogViewCount](new ListStateDescriptor[LogViewCount]("list-state", classOf[LogViewCount]))
  }

  override def processElement(value: LogViewCount, ctx: KeyedProcessFunction[Long, LogViewCount, String]#Context, out: Collector[String]): Unit = {
    listState.add(value)
    ctx.timerService().registerEventTimeTimer(value.windowEnd + 1)
  }

  override def onTimer(timestamp: Long, ctx: KeyedProcessFunction[Long, LogViewCount, String]#OnTimerContext, out: Collector[String]): Unit = {
    val counts: lang.Iterable[LogViewCount] = listState.get()
    
  }
}


