package cn._51doit.live.jobs;

import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.pojo.ItemEventCount;
import cn._51doit.live.udfs.HotGoodsAggFunction;
import cn._51doit.live.udfs.HotGoodsTopNFunction;
import cn._51doit.live.udfs.HotGoodsWindowFunction;
import cn._51doit.live.udfs.JsonToBeanFunction;
import cn._51doit.live.utils.FlinkUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;
import java.util.HashMap;

/**
 * 统计热门商品topN
 */
public class HotGoodsTopN {

    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<String> lines = FlinkUtils.createKafkaStream(parameterTool, SimpleStringSchema.class);
        SingleOutputStreamOperator<DataBean> dataBeanStream = lines.process(new JsonToBeanFunction());
        //划分窗口使用EventTime
        SingleOutputStreamOperator<DataBean> beanStreamWithWaterMark = dataBeanStream.assignTimestampsAndWatermarks(WatermarkStrategy.<DataBean>forBoundedOutOfOrderness(Duration.ofSeconds(0)).withTimestampAssigner(new SerializableTimestampAssigner<DataBean>() {
            @Override
            public long extractTimestamp(DataBean element, long recordTimestamp) {
                return element.getTimestamp();
            }
        }));
        SingleOutputStreamOperator<DataBean> filtered = beanStreamWithWaterMark.filter(bean -> bean.getEventId().startsWith("product"));

        //先keyBy
        //key为Tuple3<分类ID，商品ID，事件类型>
        KeyedStream<DataBean, Tuple3<String, String, String>> keyedStream = filtered.keyBy(new KeySelector<DataBean, Tuple3<String, String, String>>() {
            @Override
            public Tuple3<String, String, String> getKey(DataBean value) throws Exception {
                HashMap<String, Object> properties = value.getProperties();
                String categoryId = properties.get("category_id").toString();
                String productId = properties.get("product_id").toString();
                String eventId = value.getEventId();
                return Tuple3.of(categoryId, productId, eventId);
            }
        });

        //划分滑动窗口
        WindowedStream<DataBean, Tuple3<String, String, String>, TimeWindow> window = keyedStream.window(SlidingEventTimeWindows.of(Time.minutes(10), Time.minutes(1)));
        //调用window Operator，在窗口内进行增量聚合
        SingleOutputStreamOperator<ItemEventCount> reduced = window.aggregate(new HotGoodsAggFunction(), new HotGoodsWindowFunction());

        //先keyBy，然后使用定时器或窗口将数据攒起来进行排序
        //keyBy的目的是将同一种分类、同一种事件类型，并且在同一个窗口中的多个不同商品搞到一起（目的为实例排序）
        KeyedStream<ItemEventCount, Tuple4<String, String, Long, Long>> keyedStream2 = reduced.keyBy(new KeySelector<ItemEventCount, Tuple4<String, String, Long, Long>>() {
            @Override
            public Tuple4<String, String, Long, Long> getKey(ItemEventCount item) throws Exception {
                String categoryId = item.categoryId;
                String eventId = item.eventId;
                long windowStart = item.windowStart;
                long windowEnd = item.windowEnd;
                return Tuple4.of(categoryId, eventId, windowStart, windowEnd);
            }
        });

        SingleOutputStreamOperator<ItemEventCount> res = keyedStream2.process(new HotGoodsTopNFunction());

        //将结果写入到MySQL或Redis中
        res.print();

        FlinkUtils.env.execute();

    }
}
