package com.gin.monitor.agg.monitor;

import com.gin.monitor.agg.connector.kafka.FlinkKafkaUtils;
import com.gin.monitor.agg.utils.JobEnvUtils;
import com.gin.monitor.agg.vo.AvgSpeedInfo;
import com.gin.monitor.agg.vo.TrafficInfo;
import org.apache.flink.api.common.eventtime.*;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;


/**
 * 实时车辆超速监控: 计算平局速度
 * cd /opt/software/kafka_2.11-2.0.0/bin
 * bin/kafka-server-start.sh -daemon config/server.properties
 * ./kafka-console-consumer.sh --bootstrap-server node01:9092 --topic t_traffic_monitor --group group01 --property print.key=true --property print.value=true --property key.separator=,
 * 测试生产
 * ./kafka-console-producer.sh --broker-list node01:9092 --topic t_traffic_monitor
 *
 * 测试时注意: 使用当前时间戳
 * 1620637883000,0002,75312,京L22188,60.1,27,31
 * 1620637884000,0002,75312,京L22188,70.1,27,31
 * 1620637885000,0002,75312,京L22188,80.1,27,31
 *
 * @author gin
 * @date 2021/4/30
 */
public class TopNMonitorAnalysis {

    public static void main(String[] args) {
        StreamExecutionEnvironment env = JobEnvUtils.initEnv(args);

        //streamKafka 海量的数据流，不可以存入广播状态流中
        //streamMysql 从Mysql数据库中读取的卡口限速信息，特点：数据量少，更新不频繁

        //默认最近开始读取
        //DataStreamSource<TrafficInfo> streamKafka = env.addSource(FlinkKafkaUtils.getKafkaConsumer());
        //从第一行开始读取. 测试用!
        SingleOutputStreamOperator<TrafficInfo> streamKafka = env
                .addSource(FlinkKafkaUtils.getKafkaConsumer().setStartFromEarliest())
                //指定时间戳字段, 及允许延时时间
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<TrafficInfo>forBoundedOutOfOrderness(Duration.ofSeconds(20))
                        .withTimestampAssigner((event, timestamp) -> event.actionTime));

        //streamKafka.print();

        streamKafka
                .keyBy(new KeySelector<TrafficInfo, String>() {
                    @Override
                    public String getKey(TrafficInfo trafficInfo) throws Exception {
                        return trafficInfo.getMonitorId();
                    }
                })
                .timeWindow(Time.seconds(30), Time.seconds(10))
        .aggregate(new AggregateFunction<TrafficInfo, Tuple2<Double, Long>, Tuple2<Double, Long>>() {
            @Override
            public Tuple2<Double, Long> createAccumulator() {
                //设计累加器: 二元组(车速之和2, 车辆数量)
                return new Tuple2<>(0.0D, 0L);
            }

            @Override
            public Tuple2<Double, Long> add(TrafficInfo trafficInfo, Tuple2<Double, Long> add) {
                Tuple2<Double, Long> res = new Tuple2<>();
                res.setField(add.f0 + trafficInfo.speed, 0);
                res.setField(add.f1 + 1, 1);
                return res;
            }

            @Override
            public Tuple2<Double, Long> getResult(Tuple2<Double, Long> result) {
                return new Tuple2<>(result.f0, result.f1);
            }

            @Override
            public Tuple2<Double, Long> merge(Tuple2<Double, Long> acc1, Tuple2<Double, Long> acc2) {
                return new Tuple2<>(acc1.f0 + acc2.f0, acc1.f1 + acc2.f1);
            }
        }, new WindowFunction<Tuple2<Double, Long>, AvgSpeedInfo, String, TimeWindow>() {
            @Override
            public void apply(String k, TimeWindow window, Iterable<Tuple2<Double, Long>> input, Collector<AvgSpeedInfo> out) throws Exception {
                Tuple2<Double, Long> next = input.iterator().next();
                double avg = next.f0 / next.f1;
                out.collect(new AvgSpeedInfo(window.getStart(), window.getEnd(), k, avg, next.f1.intValue()));
            }
        })
        //窗口2 ，负责对窗口1输出的数据进行排序取TopN
        .timeWindowAll(Time.seconds(10))
        .apply(new AllWindowFunction<AvgSpeedInfo, String, TimeWindow>() {
            @Override
            public void apply(TimeWindow window, Iterable<AvgSpeedInfo> input, Collector<String> out) throws Exception {
                //map集合，保存每个卡口的车辆数量,有可能在窗口1多次触发的时候（AllowedLateness），同一个卡口会有多条数据，留下车辆数量最多的
                //当前处理数据迟到，采用的是 Watermark，不需要map去重
                ArrayList<AvgSpeedInfo> avg = new ArrayList<>();
                Iterator<AvgSpeedInfo> iterator = input.iterator();
                while (iterator.hasNext()) {
                    avg.add(iterator.next());
                }
                Collections.sort(avg);
                //降序排序取Top1
                out.collect(String.valueOf(avg.get(0)));
            }
        })
        .print();

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }

    }

}
