package Test

import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala._
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala.function.ProcessAllWindowFunction
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.assigners.WindowAssigner.WindowAssignerContext
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.table.runtime.operators.window.assigners.SlidingWindowAssigner
import org.apache.flink.util.Collector

/**
 * 龚英杰：
 * 每隔1分钟统计5分钟内最通畅的卡口，取前3
 * 最通畅：同一时间段中车流量和最高且平均速度最高的
 * 将结果存入Mysql中的 g_resource.MonitorTop3表中
 * 表字段如下：
 * start_time 窗口开始时间
 * end_time 窗口结束时间
 * monitorId 卡口
 * cars 车流量
 * avgSpeed 平均车速
 */

object rw3 {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val stream = env.fromSource(KafkaSource.builder()
    .setTopics("test")
    .setBootstrapServers("192.168.23.62:9092")
    .setValueOnlyDeserializer(new SimpleStringSchema())
    .setStartingOffsets(OffsetsInitializer.earliest())
    .build()
    ,WatermarkStrategy.noWatermarks()
    ,"test")

    stream.windowAll(SlidingProcessingTimeWindows.of(Time.minutes(5),Time.minutes(1)))
      .process(new ProcessAllWindowFunction[String,String,TimeWindow] {
        override def process(context: Context, elements: Iterable[String], out: Collector[String]): Unit = {

        }
      })


    env.execute()

  }
}
