package com.mjf

import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.{KeyedProcessFunction, ProcessFunction}
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector

object TestTimer {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    env.setParallelism(1)
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    val inputStream: DataStream[String] = env.socketTextStream("hadoop103", 9999)

    val result: DataStream[(String, Long)] = inputStream.map {
      line =>
        val arr: Array[String] = line.split(",")
        (arr(0), arr(1).toLong)
    }.assignAscendingTimestamps(_._2 * 1000L)
      .keyBy(_._1)
      .process(new TestProcess())

    result.print()

    env.execute("test")

  }
}

class TestProcess() extends KeyedProcessFunction[String, (String, Long), (String, Long)] {
  override def processElement(value: (String, Long), ctx: KeyedProcessFunction[String, (String, Long), (String, Long)]#Context, out: Collector[(String, Long)]): Unit = {
    out.collect(value)
    ctx.timerService().registerEventTimeTimer(value._2 * 1000L + 3000L)
//    ctx.timerService().registerProcessingTimeTimer(value._2 * 1000L + 3000L)
  }

  override def onTimer(timestamp: Long, ctx: KeyedProcessFunction[String, (String, Long), (String, Long)]#OnTimerContext, out: Collector[(String, Long)]): Unit = {
    out.collect((ctx.getCurrentKey + "超时", timestamp))
  }
}