package com.hrt.cityTraffic.distribution

import java.util.Properties

import com.hrt.cityTraffic.utils.TrafficInfo
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.util.Collector

import scala.collection.mutable

/**
 *  实时车辆分布情况(小的数据量使用set进行数据的去重)
 *  实时车辆分布情况，是指在一段时间内（比如：10分钟）整个城市中每个区多少分布多少量车。这里要注意车辆的去重，
 *  因为在10分钟内一定会有很多的车，经过不同的卡口。这些车牌相同的车，我们只统计一次。其实就是根据车牌号去重。
 */
object  AreaDistributionAnalysis{

  def main(args: Array[String]): Unit = {
    val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    import org.apache.flink.streaming.api.scala._
    streamEnv.setParallelism(1)


    val props = new Properties()
    props.setProperty("bootstrap.servers","192.168.56.13:9092,192.168.56.14:9092,192.168.56.15:9092")
    props.setProperty("group.id","hrt_001")
    props.setProperty("auto.offset.reset","earliest")
    props.setProperty("enable.auto.commit","false")


    //创建一个Kafka的Source
    val stream: DataStream[TrafficInfo] = streamEnv.addSource(
      new FlinkKafkaConsumer[String]("t_traffic", new SimpleStringSchema(), props) //从第一行开始读取数据
    )
    .map(line => {
      var arr = line.split(",")
      new TrafficInfo(arr(0).toLong, arr(1), arr(2), arr(3), arr(4).toDouble, arr(5), arr(6))
    })
    stream.keyBy(_.areaId)
        .timeWindow(Time.seconds(10))
        .apply(
          (k:String, window:TimeWindow, input:Iterable[TrafficInfo],out: Collector[String]) =>{
            var set:mutable.Set[String] = scala.collection.mutable.Set() //Set集合去重
            for(i<-input){
              set+=i.car
            }
            out.collect(s"区域${k},在窗口其实时间${window.getStart},到窗口结束时间${window.getEnd} ,一共有${set.size} 辆车")
          }
        )

    streamEnv.execute()

  }
}
