package cn._51doit.live.jobs;

import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.pojo.ItemEventCount;
import cn._51doit.live.udf.HotGoodTopNAggFunction;
import cn._51doit.live.udf.HotGoodTopNSortFunction;
import cn._51doit.live.udf.HotGoodTopNWindowFunction;
import cn._51doit.live.udf.JsonToBeanFunction;
import cn._51doit.live.utils.FlinkUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;

import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;

public class HotGoodTopN {

    public static void main(String[] args) throws Exception{

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<String> kafkaStream = FlinkUtils.createKafkaStream(parameterTool, SimpleStringSchema.class);

        SingleOutputStreamOperator<DataBean> beanStream = kafkaStream.process(new JsonToBeanFunction());

        //提取EventTime，生成WaterMark
        SingleOutputStreamOperator<DataBean> beanStreamWithWaterMark = beanStream.assignTimestampsAndWatermarks(WatermarkStrategy.<DataBean>forBoundedOutOfOrderness(Duration.ofSeconds(0)).withTimestampAssigner(new SerializableTimestampAssigner<DataBean>() {
            @Override
            public long extractTimestamp(DataBean element, long recordTimestamp) {
                return element.getTimestamp();
            }
        }));

        SingleOutputStreamOperator<DataBean> filtered = beanStreamWithWaterMark.filter(bean -> bean.getEventId().startsWith("product"));

        //按照指定的条件进行keyBy
        KeyedStream<DataBean, Tuple3<String, String, String>> keyedStream = filtered.keyBy(new KeySelector<DataBean, Tuple3<String, String, String>>() {
            @Override
            public Tuple3<String, String, String> getKey(DataBean bean) throws Exception {
                String eventId = bean.getEventId();
                String categoryId = bean.getProperties().get("category_id").toString();
                String productId = bean.getProperties().get("product_id").toString();
                return Tuple3.of(categoryId, productId, eventId);
            }
        });

        //将keyBy后的Stream划分窗口（按照EventTime划分窗口）
        WindowedStream<DataBean, Tuple3<String, String, String>, TimeWindow> windowedStream = keyedStream.window(SlidingEventTimeWindows.of(Time.minutes(10), Time.minutes(1)));

        //在窗口中进行增量聚合
        SingleOutputStreamOperator<ItemEventCount> aggInWindowStream = windowedStream.aggregate(new HotGoodTopNAggFunction(), new HotGoodTopNWindowFunction());

        //将数据按照categoryId，eventId，windowStart， windowEnd进行keyBy
        KeyedStream<ItemEventCount, Tuple4<String, String, Long, Long>> keyedStream2 = aggInWindowStream.keyBy(new KeySelector<ItemEventCount, Tuple4<String, String, Long, Long>>() {

            @Override
            public Tuple4<String, String, Long, Long> getKey(ItemEventCount item) throws Exception {
                String categoryId = item.categoryId;
                String eventId = item.eventId;
                long windowStart = item.windowStart;
                long windowEnd = item.windowEnd;

                return Tuple4.of(categoryId, eventId, windowStart, windowEnd);
            }
        });

        //在数据攒起来进行排序
        SingleOutputStreamOperator<ItemEventCount> res = keyedStream2.process(new HotGoodTopNSortFunction());

        res.print();

        FlinkUtils.env.execute();


    }
}
