import bean.ItemCountByWindow;
import bean.UserBehavior;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.shaded.guava18.com.google.common.collect.Lists;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Comparator;

/*
* 我们将实现一个“实时热门商品”的需求，可以将“实时热门商品”翻译成程序员更好理解的需求：
* 每隔5分钟输出最近一小时内点击量最多的前N个商品。将这个需求进行分解我们大概要做这么几件事情：
•	抽取出业务时间戳，告诉Flink框架基于业务时间做窗口
•	过滤出点击行为数据
•	按一小时的窗口大小，每5分钟统计一次，做滑动窗口聚合（Sliding Window）
•	按每个窗口聚合，输出每个窗口中点击量前N名的商品
*/
public class ItemTopNByWindow {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        env.setParallelism(1);
        DataStream<String> inputStream = env.readTextFile("C:\\JAVAProject\\UserTag\\HotItemsAnalysis\\src\\main\\resources\\UserBehavior.csv");
        DataStream<UserBehavior> dataStream = inputStream.map(new myMapFun()).filter(obj -> "pv".equals(obj.getBehavior()))
                .assignTimestampsAndWatermarks(new AscendingTimestampExtractor<UserBehavior>() {
                    @Override
                    public long extractAscendingTimestamp(UserBehavior element) {
                        return element.getTimestamp() * 1000L;
                    }
                });
        DataStream<ItemCountByWindow> aggStream = dataStream
                .keyBy("itemId")
                .timeWindow(Time.hours(1), Time.minutes(5))
                .aggregate(new aggreFun(), new windFun());
//        aggStream.print();
        DataStream<String> resultStream =
                aggStream.keyBy("windowEnd").process(new myKeyProFun(2));//.print();

        resultStream.addSink(new mySinkFun());
        env.execute();
    }


    public static class myMapFun implements MapFunction<String, UserBehavior> {
        @Override
        public UserBehavior map(String value) throws Exception {
            String[] split = value.split(",");
            return new UserBehavior(new Long(split[0]), new Long(split[1]), new Long(split[2]), split[3], new Long(split[4]));
        }
    }


    public static class aggreFun implements AggregateFunction<UserBehavior, Long, Long> {

        @Override
        public Long createAccumulator() {
            return 0L;
        }

        @Override
        public Long add(UserBehavior value, Long accumulator) {
            return accumulator + 1;
        }

        @Override
        public Long getResult(Long accumulator) {
            return accumulator;
        }

        @Override
        public Long merge(Long a, Long b) {
            return a + b;
        }
    }

    public static class windFun implements WindowFunction<Long, ItemCountByWindow, Tuple, TimeWindow> {

        @Override
        public void apply(Tuple tuple, TimeWindow window, Iterable<Long> input, Collector<ItemCountByWindow> out) throws Exception {
            Long itemId = tuple.getField(0);
            long windowEnd = window.getEnd();
            Long count = input.iterator().next();
            out.collect(new ItemCountByWindow(itemId, windowEnd, count));
        }
    }

    public static class myKeyProFun extends KeyedProcessFunction<Tuple, ItemCountByWindow, String> {
        private int rank;
        private ListState<ItemCountByWindow> itemList;

        public myKeyProFun(int rank) {
            this.rank = rank;
        }

        @Override
        public void open(Configuration parameters) throws Exception {
            itemList = getRuntimeContext().getListState(new ListStateDescriptor<ItemCountByWindow>("item", ItemCountByWindow.class));
        }

        @Override
        public void processElement(ItemCountByWindow value, Context ctx, Collector<String> out) throws Exception {
            itemList.add(value);
            ctx.timerService().registerEventTimeTimer(value.getWindowEnd() + 100);
        }

        @Override
        public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out) throws Exception {
            ArrayList<ItemCountByWindow> itemCountByWindowsList = Lists.newArrayList(itemList.get());
            itemCountByWindowsList.sort(new comp());

            StringBuilder strBuilder = new StringBuilder();
            for (int i = 0; i < Math.min(rank, itemCountByWindowsList.size()); i++) {
                strBuilder.append("窗口结束时间为:" + itemCountByWindowsList.get(i).getWindowEnd() + "中的第" + (i + 1) + "名是" + itemCountByWindowsList.get(i).getItemId() + " 点击量为:" + itemCountByWindowsList.get(i).getCount() + "\n");
            }
            out.collect(strBuilder.toString());
            //itemList.clear();

        }

    }

    public static class comp implements Comparator<ItemCountByWindow> {
        @Override
        public int compare(ItemCountByWindow o1, ItemCountByWindow o2) {
            return o2.getCount().intValue() - o1.getCount().intValue();
        }
    }

    public static class schame implements KafkaSerializationSchema<String> {
        @Override
        public ProducerRecord<byte[], byte[]> serialize(String element, @Nullable Long timestamp) {
            return null;
        }
    }
}
