package com.zlm.app;

import com.zlm.bean.ApacheLogEvent;
import com.zlm.bean.PageViewCount;
import com.zlm.util.MyKafkaUtils;
import org.apache.commons.compress.utils.Lists;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Map;

/**
 * Author: Harbour
 * Date: 2021-05-14 9:00
 * Desc:
 */
public class NetworkFlowApp {
    public static void main(String[] args) throws Exception {
        // step 1. 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        // step 2. 获取数据源
        DataStream<String> dataStream = MyKafkaUtils.getKafkaInputStream(env, "net-flow", new SimpleStringSchema());

        // step 3. 将数据源数据转换为指定格式
        DataStream<ApacheLogEvent> apacheLogEventDataStream = dataStream.map(line -> {
            String[] fields = line.split(" ");
            return new ApacheLogEvent(
                    fields[0],
                    fields[1],
                    // 17/05/2015:10:05:35
                    new SimpleDateFormat("dd/MM/yyyy:HH:mm:ss").parse(fields[3]).getTime(),
                    fields[5],
                    fields[6]
            );
        }).assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<ApacheLogEvent>(Time.minutes(1)) {
            @Override
            public long extractTimestamp(ApacheLogEvent element) {
                return element.getTimestamp();
            }
        });

        // step 4. 统计分析
        // String ip, String userId, Long timestamp, String method, String url
        DataStream<PageViewCount> PageViewCountDataStream = apacheLogEventDataStream.filter(e -> "GET".equalsIgnoreCase(e.getMethod()))
                .keyBy(ApacheLogEvent::getUrl)
                .timeWindow(Time.minutes(10), Time.seconds(5))
                .allowedLateness(Time.minutes(1))  // 允许1分钟内迟到的数据
                .sideOutputLateData(new OutputTag<ApacheLogEvent>("late"){})
                .aggregate(new MyApacheLogEventIncrementAggregationFunctions(),
                        new MyApacheLogEventFullWindowFunctions());

        PageViewCountDataStream.keyBy(PageViewCount::getWindowEnd)
                .process(new MyPageViewCountTopNProcessFunction(5))
                .print();

        env.execute("Network Flow analysis");
    }

    private static class MyApacheLogEventIncrementAggregationFunctions implements AggregateFunction<ApacheLogEvent, Long, Long> {

        @Override
        public Long createAccumulator() {
            return 0L;
        }

        @Override
        public Long add(ApacheLogEvent value, Long accumulator) {
            return accumulator + 1;
        }

        @Override
        public Long getResult(Long accumulator) {
            return accumulator;
        }

        @Override
        public Long merge(Long a, Long b) {
            return a + b;
        }
    }

    private static class MyApacheLogEventFullWindowFunctions implements WindowFunction<Long, PageViewCount, String, TimeWindow> {
        @Override
        public void apply(String url, TimeWindow window, Iterable<Long> input, Collector<PageViewCount> out) throws Exception {
            Long windowEnd = window.getEnd();
            Long count = input.iterator().next();
            out.collect(new PageViewCount(url, windowEnd, count));
        }
    }

    private static class MyPageViewCountTopNProcessFunction extends KeyedProcessFunction<Long, PageViewCount, String> {

        private final int size;
        private MapState<String, Long> pageViewCountMapState;

        public MyPageViewCountTopNProcessFunction(int size) {
            this.size = size;
        }

        @Override
        public void open(Configuration parameters) throws Exception {
            // 富函数获取运行时上下文，用于存储状态
            // 保存为map集合，防止数据重复。其中key是url，value是count
            pageViewCountMapState = getRuntimeContext().getMapState(new MapStateDescriptor<>("net-flow", String.class, Long.class));
        }

        @Override
        public void processElement(PageViewCount value, Context ctx, Collector<String> out) throws Exception {
            pageViewCountMapState.put(value.getUrl(), value.getCount());

            // 注册一个定时器，用于清空状态
            ctx.timerService().registerProcessingTimeTimer(value.getWindowEnd() + 60 * 1000L);

            // 注册定时器，用于输出统计结果
            ctx.timerService().registerProcessingTimeTimer(value.getWindowEnd() + 1);
        }

        @Override
        public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out) throws Exception {

            ArrayList<Map.Entry<String, Long>> pageViewCounts = Lists.newArrayList(pageViewCountMapState.entries().iterator());

            // 如果定时器的定时时间timestamp，正好等于当前的windowEnd + 60秒， 那么清空这60秒的状态
            if (timestamp == ctx.getCurrentKey() + 60 * 1000L) {
                pageViewCountMapState.clear();
                return;
            }

            pageViewCounts.sort((e1, e2) -> -1 * Long.compare(e1.getValue(), e2.getValue()));
            StringBuilder result = new StringBuilder();
            result.append("==============================");
            result.append(ctx.timestamp()).append("\n");

            for (int i = 0; i < Math.min(size, pageViewCounts.size()); i++) {
                result.append(pageViewCounts.get(i)).append("\n");
            }

            Thread.sleep(1000L);

            out.collect(result.toString());
        }
    }
}
