package org.example.hoturls;


import org.apache.commons.compress.utils.Lists;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.example.hotcommodity.HotItems;
import org.example.hotcommodity.ItemViewCount;
import org.example.hotcommodity.UserBehavior;

import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Properties;
public class HotUrls {
    public static void main(String[] args) throws Exception {
        //创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        /**
         * 读取数据并转换成pojo，按事件时间处理就必须先分配时间戳和watermark
         * 要想kafka从头开始消费时数据，group.id必须是全新的，消费策略必须是earliest
         */
        Properties ps = new Properties();
        ps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.26.120:9092");//集群地址
        ps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "consumer_group");//消费者组
        ps.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");//key反序列化方式
        ps.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");//value反序列化方式
        ps.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");//消费策略
        //其实第二个参数指定了序列化方式，那key和value的序列化方式就不用指定了
        DataStream<Log> dataSource = env.fromCollection(Arrays.asList(
                new Log("192.168.0.1", 1L, 1558430841L, "get", "admin/courses"),
                new Log("192.168.0.2", 3L, 1558430842L, "put", "admin/classrooms"),
                new Log("192.168.0.3", 2L, 1558430843L, "post", "admin/resources")
        ));
        DataStream<Log> dataStream=dataSource
                .assignTimestampsAndWatermarks(
                        new AscendingTimestampExtractor<Log>() { //升序
                            @Override
                            public long extractAscendingTimestamp(Log log) {//获取事件时间戳，秒级转毫秒级
                                return log.getEventTime()*1000L;
                            }
                        });
        //分组聚合得到结果数据
        DataStream<PageViewCount> aggStream=dataStream
                .filter(item -> "get".equals(item.getMethod()))
                .keyBy(Log::getUrl)
                .timeWindow(Time.minutes(10),Time.seconds(5))
                .aggregate(new PageCountAgg(),new WindowItemCountResult());

        //收集同一窗口所有商品的count数据，按top 5输出
        DataStream resultDs=aggStream
                .keyBy("windowEnd")
                .process(new TopNItems(5));
        //输出并执行
        resultDs.print("每隔5秒最近10分钟前五的热门页面");
        env.execute("hot pages analysis");
    }
    public static class PageCountAgg implements AggregateFunction<Log,Long,Long>{

        @Override
        public Long createAccumulator() {//创建累加器并给初始值
            return 0L;
        }

        @Override
        public Long add(Log log,Long accumulator) {//每次计算累加器加一,并返回新的累加器值
            return accumulator+1;
        }

        @Override
        public Long getResult(Long accumulator) {//累加器最终给外部返回的值
            return accumulator;
        }

        @Override
        public Long merge(Long a, Long b) { //合并两个累加器，返回合并后的累加器的状态，这儿用不到.用不到.
            return a+b;
        }
    }

    public static class WindowItemCountResult implements WindowFunction<Long,PageViewCount, Long, TimeWindow>{

        //迭代器中装的是输入类型
        @Override
        public void apply(Long key, TimeWindow window, Iterable<Long> iterable, Collector<PageViewCount> collector) throws Exception {
            //包装成一个ItemViewCount对象输出
            collector.collect(new PageViewCount(key.toString(),window.getEnd(),iterable.iterator().next()));
        }
    }

    //参数1：keyBy返回值类型  参数2：输入类型  参数3：输出类型
    public static class TopNItems extends KeyedProcessFunction<Tuple,PageViewCount,String>{
        private Integer topSize;
        private ListState<PageViewCount> listState; //列表状态，保存当前窗口所有输出的ItemViewCount

        public TopNItems(Integer topSize) {
            this.topSize = topSize;
        }

        @Override
        public void open(Configuration parameters) throws Exception {
            listState =getRuntimeContext().getListState(new ListStateDescriptor<PageViewCount>("item-view-count-list",PageViewCount.class));
        }

        //每来一条数据如何处理
        @Override
        public void processElement(PageViewCount value, Context context, Collector<String> collector) throws Exception {
            //每来一条数据，存入List中，并注册定时器（只有触发时间一样，定时器就是同一个）
            listState.add(value);
            context.timerService().registerEventTimeTimer(value.getWindowEnd());//注册定时器
        }

        //定时器触发时的逻辑
        @Override
        public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out) throws Exception {
            //转换成Arraylist再排序
            ArrayList<PageViewCount> pageViewCounts = Lists.newArrayList(listState.get().iterator());

            pageViewCounts.sort(new Comparator<PageViewCount>() {
                @Override
                public int compare(PageViewCount o1, PageViewCount o2) {//前大于后返回负数，为倒序
                    if(o1.getCount() > o2.getCount())
                        return -1;
                    else if (o1.getCount() == o2.getCount())
                        return 0;
                    else
                        return 1;
                }
            });

            //定义输出结果格式
            StringBuilder resultBuilder=new StringBuilder();
            resultBuilder.append("===================\n");
            resultBuilder.append("窗口结束时间：").append(new Timestamp(timestamp)).append("\n"); //输出windowend

            //遍历输出
            for (int i = 0; i < Math.min(topSize,pageViewCounts.size()); i++) {
                PageViewCount currentPageViewCount = pageViewCounts.get(i);
                resultBuilder.append("Number").append(i+1).append(":")
                        .append("URl：").append(currentPageViewCount.getUrl())
                        .append("浏览量：").append(currentPageViewCount.getCount())
                        .append("\n");
            }

            resultBuilder.append("===================\n\n");

            Thread.sleep(1000L);//控制输出频率
            out.collect(resultBuilder.toString());
            listState.clear();//清空状态
        }

        @Override
        public void close() throws Exception {
            listState.clear();//清空状态
        }
    }
}
