package com.study.ProcessFunction;

import com.study.entity.ItemViewCount;
import com.study.entity.UserBehavior;
import com.study.utils.JsonHelper;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple1;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.util.Collector;

import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Properties;

/**
 * @author sjw
 * @className StreamTopN
 * @date 2020/12/3 16:45
 * @description: TODO
 */
public class StreamTopN {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        executionEnvironment.setParallelism(1);
        executionEnvironment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        Properties props = new Properties();
        props.put("bootstrap.servers", "192.168.10.200:9092");
        props.put("zookeeper.connect", "192.168.10.200:2181");
        props.put("group.id", "metric-group");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");  //key 反序列化
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("auto.offset.reset", "latest"); //value 反序列化

        DataStreamSource<String> dataStreamSource = executionEnvironment.addSource(new FlinkKafkaConsumer010<String>(
                "logEvent", new SimpleStringSchema(), props));
        DataStream<String> timeData  = dataStreamSource.assignTimestampsAndWatermarks(new AscendingTimestampExtractor<String>() {
            @Override
            public long extractAscendingTimestamp(String element) {
                return JsonHelper.jsonToObject(element, UserBehavior.class).timestamp;
            }
        });

        DataStream<UserBehavior> userBehaviorDataStream = timeData.map(new MapFunction<String, UserBehavior>() {
            @Override
            public UserBehavior map(String value) throws Exception {
                return JsonHelper.jsonToObject(value, UserBehavior.class);
            }
        });

        DataStream<UserBehavior> pvUserBehaviorDataStream = userBehaviorDataStream.filter(new FilterFunction<UserBehavior>() {
            @Override
            public boolean filter(UserBehavior value) throws Exception {
                return "pv".equals(value.behavior);
            }
        });

        DataStream<ItemViewCount> itemViewCountDataStream = pvUserBehaviorDataStream
                .keyBy("itemId")
                .timeWindow(Time.minutes(5), Time.minutes(1))
                .aggregate(new AggregateFunction<UserBehavior, Long, Long>() {
                    @Override
                    public Long createAccumulator() {
                        return 0L;
                    }

                    @Override
                    public Long add(UserBehavior value, Long accumulator) {
                        //System.out.println("add " + value.toString());
                        return accumulator + 1;
                    }

                    @Override
                    public Long getResult(Long accumulator) {
                        //System.out.println("getResult");
                        return accumulator;
                    }

                    @Override
                    public Long merge(Long a, Long b) {
                        //System.out.println("merge");
                        return a + b;
                    }
                }, new WindowFunction<Long, ItemViewCount, Tuple, TimeWindow>() {
                    @Override
                    public void apply(Tuple tuple, TimeWindow window, Iterable<Long> input, Collector<ItemViewCount> out) throws Exception {
                        //System.out.println("apply");
                        Long itemId = ((Tuple1<Long>) tuple).f0;
                        Long count = input.iterator().next();
                        out.collect(ItemViewCount.of(itemId, window.getEnd(), count));
                    }
                });

        itemViewCountDataStream
                .keyBy("windowEnd")
                .process(new TopN(3)).print();

        executionEnvironment.execute("topN job");

    }

    private static class TopN extends KeyedProcessFunction<Tuple, ItemViewCount, String>{

        private final int topSize;

        public TopN(int topSize) {
            this.topSize = topSize;
        }

        // 用于存储商品与点击数的状态，待收齐同一个窗口的数据后，再触发 TopN 计算
        private ListState<ItemViewCount> itemState;


        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            // 状态的注册
            ListStateDescriptor<ItemViewCount> itemsStateDesc = new ListStateDescriptor<>(
                    "itemState-state",
                    ItemViewCount.class);
            itemState = getRuntimeContext().getListState(itemsStateDesc);
        }

        @Override
        public void processElement(ItemViewCount value, Context ctx, Collector<String> out) throws Exception {


            // 每条数据都保存到状态中
            itemState.add(value);
            // 注册 windowEnd+1 的 EventTime Timer, 当触发时，说明收齐了属于windowEnd窗口的所有商品数据
            ctx.timerService().registerEventTimeTimer(value.windowEnd + 1);
        }
        @Override
        public void onTimer(
                long timestamp, OnTimerContext ctx, Collector<String> out) throws Exception {
            // 获取收到的所有商品点击量
            List<ItemViewCount> allItems = new ArrayList<>();
            for (ItemViewCount item : itemState.get()) {
                allItems.add(item);
            }
            // 提前清除状态中的数据，释放空间
            itemState.clear();
            // 按照点击量从大到小排序
            allItems.sort(new Comparator<ItemViewCount>() {
                @Override
                public int compare(ItemViewCount o1, ItemViewCount o2) {
                    return (int) (o2.viewCount - o1.viewCount);
                }
            });
            // 将排名信息格式化成 String, 便于打印
            StringBuilder result = new StringBuilder();
            result.append("====================================\n");
            result.append("时间: ").append(new Timestamp(timestamp-1)).append("\n");
            int len= Math.min(allItems.size(), topSize);
            for (int i=0;i<len;i++) {
                ItemViewCount currentItem = allItems.get(i);
                // No1:  商品ID=12224  浏览量=2413
                result.append("No").append(i).append(":")
                        .append("  商品ID=").append(currentItem.itemId)
                        .append("  浏览量=").append(currentItem.viewCount)
                        .append("\n");
            }
            result.append("====================================\n\n");

            out.collect(result.toString());
        }

    }





}
