package join

import ods.SourceKafka
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.util.Collector

object BasedTimeDoubleStream {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    val stream1: DataStream[String] = env.addSource(SourceKafka.getSource("stream1"))
    val stream2: DataStream[String] = env.addSource(SourceKafka.getSource("stream2"))

    val keyed1: KeyedStream[String, String] = stream1.assignAscendingTimestamps(x => x.split(",")(2).toLong).keyBy(x => x.split(",")(0))
    val keyed2: KeyedStream[String, String] = stream2.assignAscendingTimestamps(x => x.split(",")(2).toLong).keyBy(x => x.split(",")(0))

    val joined: DataStream[String] = keyed1.intervalJoin(keyed2)
      .between(Time.seconds(-1), Time.seconds(1))
      .process(new ProcessJoinFunction[String, String, String] {
        override def processElement(left: String, right: String, ctx: ProcessJoinFunction[String, String, String]#Context, out: Collector[String]): Unit = {
          println(left.split(",")(1) + "-" + right.split(",")(1))
        }
      })

    joined.print()
    env.execute()
  }
}
