package com.alison.module.hotitem;

import com.alison.module.po.ItemViewCount;
import com.alison.module.po.UserBehavior;
import org.apache.commons.compress.utils.Lists;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.runtime.operators.util.AssignerWithPeriodicWatermarksAdapter;
import org.apache.flink.util.Collector;

import java.sql.Timestamp;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;

/**
 * @Author alison
 * @Date 2024/4/13 9:50
 * @Version 1.0
 * @Description
 */
public class E2_HotItemsWithKafka {

    public static void main(String[] args) throws Exception {
        // 1. 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行度为1
        env.setParallelism(1);
        // 2. 从csv文件中获取数据
//        String filePath = "D:\\workspace\\lab\\learnbigdata\\learnflink\\flink-datastream\\src\\main\\resources\\hotitem\\UserBehavior.csv";
//        DataStream<String> inputStream = env.readTextFile(filePath);
        // 替换从kafka提取数据
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "localhost:9092");
        properties.setProperty("group.id", "consumer");
// 下面是一些次要参数
        properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty("auto.offset.reset", "latest");

// 2. 从kafka消费数据
        DataStream<String> inputStream = env.addSource(new FlinkKafkaConsumer<>("hotitems", new SimpleStringSchema(), properties ));


        // 3. 转换成POJO,分配时间戳和watermark
        AtomicLong size = new AtomicLong(1);
        DataStream<UserBehavior> userBehaviorDataStream = inputStream.map(line -> {
//            System.out.println(size.getAndIncrement());
            String[] fields = line.split(",");
            return new UserBehavior(new Long(fields[0]), new Long(fields[1]), new Integer(fields[2]), fields[3], new Long(fields[4]));
        }).assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarksAdapter.Strategy<>(
                new BoundedOutOfOrdernessTimestampExtractor<UserBehavior>(Time.of(200, TimeUnit.MILLISECONDS)) {
                    @Override
                    public long extractTimestamp(UserBehavior element) {
                        return element.getTimestamp() * 1000L;
                    }
                }
        ));
        // 4. 分组开窗聚合，得到每个窗口内各个商品的count值
        DataStream<ItemViewCount> windowAggStream = userBehaviorDataStream
                // 过滤只保留pv行为
                .filter(userbehavior -> "pv".equals(userbehavior.getBehavior()))
                //  按照商品ID分组
                .keyBy(UserBehavior::getItemId)
                // 滑动窗口
//                .window(SlidingEventTimeWindows.of(Time.hours(1), Time.minutes(25)))
                .window(SlidingEventTimeWindows.of(Time.hours(1), Time.minutes(5)))
                .aggregate(new ItemCountAgg(), new WindowItemCountResult());

        // 校验
//        String outFilePath = "D:\\workspace\\lab\\learnbigdata\\learnflink\\flink-datastream\\src\\main\\resources\\hotitem\\out.csv";
//        windowAggStream.addSink(new SinkFunction<ItemViewCount>() {
//            @Override
//            public void invoke(ItemViewCount value, Context context) throws Exception {
//                if (value.getItemId() == 3076029) {
//                    FileUtil.appendString(value + FileUtil.getLineSeparator(), outFilePath, StandardCharsets.UTF_8);
//                }
//            }
//        });
        // 5. 收集同一窗口的所有商品的count数据，排序输出top n
        DataStream<String> resultStream = windowAggStream
                // 按照窗口分组
                .keyBy(ItemViewCount::getWindowEnd)
                // 用自定义处理函数排序取前5
                .process(new TopNHotItems(5));

        resultStream.print();
        env.execute("hot items analysis");
    }

    // 实现自定义增量聚合函数
    private static class ItemCountAgg implements AggregateFunction<UserBehavior, Long, Long> {

        @Override
        public Long createAccumulator() {
            return 0L;
        }

        @Override
        public Long add(UserBehavior value, Long accumulator) {
            return accumulator + 1;
        }

        @Override
        public Long getResult(Long accumulator) {
            return accumulator;
        }

        @Override
        public Long merge(Long a, Long b) {
            return a + b;
        }
    }

    // 自定义全窗口函数
    private static class WindowItemCountResult implements WindowFunction<Long, ItemViewCount, Long, TimeWindow> {
        @Override
        public void apply(Long itemId, TimeWindow window, Iterable<Long> input, Collector<ItemViewCount> out) throws Exception {
            Long count = input.iterator().next();
            DateTimeFormatter ftf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
            String startStr = ftf.format(LocalDateTime.ofInstant(Instant.ofEpochMilli(window.getStart()), ZoneId.systemDefault()));
            String endStr = ftf.format(LocalDateTime.ofInstant(Instant.ofEpochMilli(window.getEnd()), ZoneId.systemDefault()));
            out.collect(new ItemViewCount(itemId, window.getStart(), startStr, window.getEnd(), endStr, count));
        }
    }

    // 实现自定义KeyedProcessFunction
    private static class TopNHotItems extends KeyedProcessFunction<Long, ItemViewCount, String> {

        // 定义属性， topN大小
        private Integer topSize;
        // 定义状态列表，保存当前窗口内所有输出的ItemViewCount
        ListState<ItemViewCount> itemViewCountListState;

        public TopNHotItems(int topSize) {
            this.topSize = topSize;
        }

        @Override
        public void open(Configuration parameters) throws Exception {
            itemViewCountListState = getRuntimeContext().getListState(new ListStateDescriptor<>("item-view-count-list", ItemViewCount.class));
        }

        @Override
        public void processElement(ItemViewCount value, Context ctx, Collector<String> out) throws Exception {
            // 每来一条数据，存入List中，并注册定时器
            itemViewCountListState.add(value);
            // 模拟等待，所以这里时间设的比较短(1ms)
            ctx.timerService().registerEventTimeTimer(value.getWindowEnd() + 1);
        }

        @Override
        public void onTimer(long timestamp, KeyedProcessFunction<Long, ItemViewCount, String>.OnTimerContext ctx, Collector<String> out) throws Exception {
            // 定时器触发，当前已收集到所有数据，排序输出
            ArrayList<ItemViewCount> itemViewCounts = Lists.newArrayList(itemViewCountListState.get().iterator());
            // 从多到少(越热门越前面)
            itemViewCounts.sort((a, b) -> Long.compare(b.getCount(), a.getCount()));
            StringBuilder resultBuilder = new StringBuilder();
            resultBuilder.append("============================").append(System.lineSeparator());
            resultBuilder.append("窗口结束时间：").append(new Timestamp(timestamp - 1)).append(System.lineSeparator());
            // 遍历列表，取top n输出
            for (int i = 0; i < Math.min(topSize, itemViewCounts.size()); i++) {
                ItemViewCount currentItemViewCount = itemViewCounts.get(i);
                resultBuilder.append("NO ").append(i + 1).append(":")
                        .append(" 商品ID = ").append(currentItemViewCount.getItemId())
//                        .append(" startTime = ").append(currentItemViewCount.getWindowStartHuman())
//                        .append(" endTime = ").append(currentItemViewCount.getWindowEndHuman())
                        .append(" 热门度 = ").append(currentItemViewCount.getCount())
                        .append(System.lineSeparator());
            }
            resultBuilder.append("===============================").append(System.lineSeparator());
            // 控制输出频率
            Thread.sleep(1000L);
            out.collect(resultBuilder.toString());
        }
    }
    /*

    输出

============================
窗口结束时间：2017-11-26 10:10:00.0
NO 1: 商品ID = 2338453 热门度 = 30
NO 2: 商品ID = 812879 热门度 = 18
NO 3: 商品ID = 2563440 热门度 = 14
NO 4: 商品ID = 138964 热门度 = 12
NO 5: 商品ID = 3244134 热门度 = 12
===============================

============================
窗口结束时间：2017-11-26 10:15:00.0
NO 1: 商品ID = 2338453 热门度 = 33
NO 2: 商品ID = 812879 热门度 = 18
NO 3: 商品ID = 3244134 热门度 = 13
NO 4: 商品ID = 2563440 热门度 = 13
NO 5: 商品ID = 2364679 热门度 = 13
===============================

============================
窗口结束时间：2017-11-26 10:20:00.0
NO 1: 商品ID = 2338453 热门度 = 32
NO 2: 商品ID = 812879 热门度 = 18
NO 3: 商品ID = 3244134 热门度 = 15
NO 4: 商品ID = 4649427 热门度 = 13
NO 5: 商品ID = 2364679 热门度 = 12
===============================
     */
}
