package project.p1_car

import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.kafka.clients.consumer.ConsumerConfig

import java.util.{Date, Properties}
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector


object D2_windown_avg_speed {

  /**
   * 统计卡口的拥堵情况
   *  思路：求卡口的平均测速
   */


  val TOPIC:String = "flink_traffic_log"

  def main(args: Array[String]): Unit = {


    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    import org.apache.flink.streaming.api.scala._

    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime) //声明使用事件事件

    val prop = new Properties()
    prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "node1:9092")
    prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "g1")

    val kafkaConsumer = new FlinkKafkaConsumer[String](TOPIC, new SimpleStringSchema(), prop)
    kafkaConsumer.setStartFromLatest()

    val ds: DataStream[String] = env.addSource(kafkaConsumer)
    ds.assignAscendingTimestamps(_.split(",")(0).toLong)  //指定事件事件


    val res: DataStream[(Long, Long, String, Double, Int)] = ds.map(data => {
      val arrs: Array[String] = data.split(',')
      TrafficInfo(arrs(0).toLong, arrs(1), arrs(2), arrs(3), arrs(4).toDouble, arrs(5), arrs(6))
    }).assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor[TrafficInfo](Time.seconds(5)) {
      override def extractTimestamp(element: TrafficInfo) = element.action_time
    }).keyBy(_.monitor_id)
      .timeWindow(Time.seconds(10), Time.seconds(5))
      .aggregate(
        new AggregateFunction[TrafficInfo, (Double, Int), (Double, Int)] {

          override def createAccumulator(): (Double, Int) = (0.0, 0)

          override def add(value: TrafficInfo, accumulator: (Double, Int)): (Double, Int) = (accumulator._1 + value.speed, accumulator._2 + 1)

          override def getResult(accumulator: (Double, Int)): (Double, Int) = accumulator

          override def merge(a: (Double, Int), b: (Double, Int)): (Double, Int) = (a._1 + b._1, a._2 + b._2)

        }, new WindowFunction[(Double, Int), (Long, Long, String, Double, Int), String, TimeWindow] {

          override def apply(key: String, window: TimeWindow, input: Iterable[(Double, Int)], out: Collector[(Long, Long, String, Double, Int)]): Unit = {

            val tuple: (Double, Int) = input.iterator.next()

            val avgSpeed: Double = (tuple._1 / tuple._2).formatted("%.2f").toDouble

            out.collect((window.getStart, window.getEnd, key, avgSpeed, tuple._2))
          }
        }
      )




    res.map(data => {
      (new Date(data._1), new Date(data._2), data._3,data._4,data._5)
    }).print()




    env.execute("re")
  }

}
