package cn.azzhu.day04

import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector

/**
 * @author azzhu
 * @create 2020-09-21 17:15:08
 */
object ProcessingTimeTimer {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val stream = env.socketTextStream("hadoop105", 9999)
      .map(line => {
        val arr = line.split(" ")
        (arr(0), arr(1).toLong * 1000)
      })
      .keyBy(_._1)
      .process(new keyed)

    stream.print()

    env.execute("EventTimeTimer")
  }

  class keyed extends KeyedProcessFunction[String,(String,Long),String] {
    //每到一条数据，就会调用一次
    override def processElement(value: (String, Long), ctx: KeyedProcessFunction[String, (String, Long), String]#Context, out: Collector[String]): Unit = {
      //注册定时器:当前的机器时间加上10s
      ctx.timerService().registerEventTimeTimer(ctx.timerService().currentProcessingTime() + 10*1000L)
    }

    override def onTimer(timestamp: Long, ctx: KeyedProcessFunction[String, (String, Long), String]#OnTimerContext, out: Collector[String]): Unit = {
      out.collect("定时器触发了！ 定时器执行的时间戳是： " + timestamp)
    }
  }
}
