package analysis;


import beans.ApacheLogEvent;
import beans.PageViewCount;
import org.apache.commons.compress.utils.Lists;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Date;
import java.util.Map;
import java.util.regex.Pattern;

/**
 * @author zkq
 * @date 2022/10/2 18:17
 */
//改良了上面版本的一些问题 上面版本5秒钟输出一次结果，可是时间延迟达到1分钟这很明显是不对的，需要改良才能合理
public class HotPages_Optimize {
    public static void main(String[] args) throws Exception {
        // 1. 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 2. 读取数据， 创建DataStream
        DataStreamSource<String> inputStream = env.socketTextStream("hadoop102", 7777);
        SingleOutputStreamOperator<ApacheLogEvent> Stream = inputStream
                .map(data -> {
                    String[] splits = data.split(" ");
                    SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yy:HH:mm:ss");
                    Date parse = simpleDateFormat.parse(splits[3]);
                    long ts = parse.getTime();
                    return new ApacheLogEvent(splits[0], splits[1], ts, splits[5], splits[6]);
                })
                .assignTimestampsAndWatermarks(WatermarkStrategy.<ApacheLogEvent>forBoundedOutOfOrderness(Duration.ofSeconds(1))
                        .withTimestampAssigner(new SerializableTimestampAssigner<ApacheLogEvent>() {
                            @Override
                            public long extractTimestamp(ApacheLogEvent element, long recordTimestamp) {
                                return element.getTimestamp();
                            }
                        })
                );
        Stream.print("data");
        //创建侧输出流标签
        OutputTag<ApacheLogEvent> outputTag = new OutputTag<ApacheLogEvent>("late") {
        };

        // 4. 分组开窗聚合，得到每个窗口内各个页面的count 值
        SingleOutputStreamOperator<PageViewCount> aggregate = Stream
                .filter(data -> "GET".equals(data.getMethod()))
                .filter( data -> {
                    String regex = "^((?!\\.(css|js|png|ico)$).)*$";
                    return Pattern.matches(regex, data.getUrl());
                } )
                .keyBy(data -> data.getUrl())
                .window(SlidingEventTimeWindows.of(Time.minutes(10), Time.seconds(5)))
                .allowedLateness(Time.minutes(1))
                .sideOutputLateData(outputTag)
                .aggregate(new PageCountAgg(), new PageCountResult());

        aggregate.print("agg");
        // 5. 收集同一窗口的所有页面count 值，排序输出TopN
        SingleOutputStreamOperator<String> process = aggregate
                .keyBy(data -> data.getWindowEnd())
                .process(new TopNHotPage(5));

        process.print();

        env.execute("hot page");

    }
    public static class PageCountAgg implements AggregateFunction<ApacheLogEvent,Long,Long> {

        @Override
        public Long createAccumulator() {
            return 0L;
        }

        @Override
        public Long add(ApacheLogEvent value, Long accumulator) {
            return accumulator + 1;
        }

        @Override
        public Long getResult(Long accumulator) {
            return accumulator;
        }

        @Override
        public Long merge(Long a, Long b) {
            return a + b;
        }
    }
    public static class PageCountResult extends ProcessWindowFunction<Long,PageViewCount,String, TimeWindow>{
        @Override
        public void process(String s, Context context, Iterable<Long> elements, Collector<PageViewCount> out) throws Exception {
            Long count = elements.iterator().next();
            Long windowEnd = context.window().getEnd();
            String url = s;
            out.collect(new PageViewCount(url,windowEnd,count));
        }
    }
    public static class  TopNHotPage extends KeyedProcessFunction<Long,PageViewCount,String>{
        private Integer topN;
        //因为加了窗口关闭延迟，还用list的话会有问题，每次来的数据都会加进去而不是替换，然而这样输出就有误了
        //用map的话问题迎刃而解，不会出现 如 no1:/presentations 3 no2：/presentations 2 这种错误结果
        private MapState<String, Long> mapState;

        public TopNHotPage(Integer topN) {
            this.topN = topN;
        }

        @Override
        public void open(Configuration parameters) throws Exception {
            mapState = getRuntimeContext().getMapState(new MapStateDescriptor<String, Long>("page-count-map", String.class, Long.class));

        }
        @Override
        public void processElement(PageViewCount value, Context ctx, Collector<String> out) throws Exception {
            mapState.put(value.getUrl(),value.getCount());
            ctx.timerService().registerEventTimeTimer(value.getWindowEnd()+1);
            //注册一个一分钟之后的定时器，时间到了就意味着窗口再也不会触发计算了，那么就可以清空之前的状态节省内存
            ctx.timerService().registerEventTimeTimer(value.getWindowEnd()+60*1000);
        }

        @Override
        public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out) throws Exception {
            if (timestamp == ctx.getCurrentKey()+60*1000){
                mapState.clear();
                return;
            }
            ArrayList<Map.Entry<String, Long>> entries = Lists.newArrayList(mapState.entries().iterator());
            entries.sort(new Comparator<Map.Entry<String, Long>>() {
                @Override
                public int compare(Map.Entry<String, Long> o1, Map.Entry<String, Long> o2) {
                    return o2.getValue().intValue() - o1.getValue().intValue();
                }
            });
            StringBuilder result = new StringBuilder();
            result.append("==================================\n");
            result.append("窗口结束时间：").append(new Timestamp(timestamp - 1L)).append("\n");

            for (int i = 0; i < Math.min(entries.size(), topN); i++) {
                Map.Entry<String, Long> pageViewCount = entries.get(i);
                result.append("No").append(i + 1).append(":")
                        .append(" 页面URL=")
                        .append(pageViewCount.getKey())
                        .append(" 浏览量=")
                        .append(pageViewCount.getValue())
                        .append("\n");

            }
            result.append("====================================\n\n");
            out.collect(result.toString());
            Thread.sleep(1000);
            entries.clear();
        }
    }
}
