package com.atguigu.stream.watermark


import java.sql.Timestamp

import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.ProcessWindowFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector

object EventTimeExample {

  def main(args: Array[String]): Unit = {

    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    //    设置时间语义为事件时间
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
    //    系统默认200毫秒插入一次水位线    这里设为1000毫秒
    env.getConfig.setAutoWatermarkInterval(1000)
    //
    val stream = env.socketTextStream("hadoop102", 9999, '\n')

    val str:DataStream[String] = stream
      .map(line => {
        val arr = line.split(" ")
        //时间处理时间必须为毫秒
        (arr(0), arr(1).toLong * 1000L)
      })
      //分配时间戳和水位线一定要在keyed之前
      .assignTimestampsAndWatermarks(
        //设置超时时间Time.seconds(5)
        new BoundedOutOfOrdernessTimestampExtractor[(String, Long)](Time.seconds(5)) {
          //用第几个字段作为事件时间
          override def extractTimestamp(t: (String, Long)): Long = t._2
        }
      )
      .keyBy(_._1)
      .timeWindow(Time.seconds(10))
      .process(new WindowResult)

    str.print()
    env.execute()
  }


  class WindowResult extends ProcessWindowFunction[(String,Long),String,String,TimeWindow]{
    override def process(key: String, context: Context, elements: Iterable[(String, Long)], out: Collector[String]): Unit = {
      out.collect(new Timestamp(context.window.getStart)+"-"+new Timestamp(context.window.getEnd)+"窗口中有"+elements.size+"个元素")
    }
  }


}