package com.atguigu.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.Func.AsyncDimFunction;
import com.atguigu.Func.MyWindowFunction;
import com.atguigu.bean.VideoChapterBean;
import com.atguigu.util.ClickHouseUtil_hjy;
import com.atguigu.util.MyKafkaUtil_hjy;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.TimestampAssignerSupplier;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * @author hjy
 * @create 2023/3/27 9:02
 */
public class Dws_videoChapterVideoWindow {
    public static void main(String[] args) throws Exception {
        //todo 1 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://47.102.112.46:8020/gmall-flink/check/videoChapter");
//        env.setStateBackend(new HashMapStateBackend());
//        System.setProperty("HADOOP_USER_NAME","atguigu");
        //todo 2 从kafka（dwd_traffic_video_log）获取数据
        String topic="dwd_traffic_video_log";
        String groupId="video_chapter";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil_hjy.getKafkaConsumer(topic, groupId));
        //todo 3 转为jsonObj对象
        SingleOutputStreamOperator<JSONObject> jsonObjWithWM = kafkaDS.map(JSONObject::parseObject)
                .assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                        .withTimestampAssigner(TimestampAssignerSupplier.of(new SerializableTimestampAssigner<JSONObject>() {
                            @Override
                            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                                return element.getLong("ts");
                            }
                        })));

        //todo 4 按照uid分组，统计观看人数
        KeyedStream<JSONObject, String> keyedByUidDS = jsonObjWithWM.keyBy(value -> value.getJSONObject("common").getString("uid"));

        //todo 5 转为javaBean对象
        SingleOutputStreamOperator<VideoChapterBean> javaBeanDS = keyedByUidDS.map(new RichMapFunction<JSONObject, VideoChapterBean>() {
            private ValueState<String> existState;

            @Override
            public void open(Configuration parameters) throws Exception {
                StateTtlConfig ttlCfg = StateTtlConfig.newBuilder(Time.days(1))
                        .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
                        .build();
                ValueStateDescriptor<String> valueStateDescriptor = new ValueStateDescriptor<>("exist", String.class);
                valueStateDescriptor.enableTimeToLive(ttlCfg);
                existState = getRuntimeContext().getState(valueStateDescriptor);
            }

            @Override
            public VideoChapterBean map(JSONObject value) throws Exception {
                String exist = existState.value();
                Long uv = 0L;
                JSONObject appVideo = value.getJSONObject("appVideo");
                if (exist == null) {
                    existState.update("1");
                    uv = 1L;
                }
                return VideoChapterBean.builder()
                        .uvCt(1L)
                        .videoCt(1L)
                        .video_id(appVideo.getString("video_id"))
                        .durSum(appVideo.getLong("play_sec"))
                        .build();
            }
        });
//        javaBeanDS.print("javaBeanDS>>>>>>>");
        //todo 6 关联dim层维表
        //用到旁路缓存和异步IO来做优化  首先从维表拿数据
        SingleOutputStreamOperator<VideoChapterBean> videoChapterBeanWithChapterInfo = AsyncDataStream.unorderedWait(javaBeanDS, new AsyncDimFunction<VideoChapterBean>("DIM_CHAPTER_INFO") {
            @Override
            public void getDimInfo(VideoChapterBean input, JSONObject dimInfo) {
                input.setChapter_id(dimInfo.getString("ID"));
                input.setChapter_name(dimInfo.getString("CHAPTER_NAME"));
            }

            @Override
            public String getId(VideoChapterBean input) {
                return input.getVideo_id();
            }

            @Override
            public String getFilterId() {
                return "video_id";
            }
        }, 5, TimeUnit.MINUTES);

        //todo 7 按照章节分组
        KeyedStream<VideoChapterBean, String> keyedStream = videoChapterBeanWithChapterInfo.keyBy(VideoChapterBean::getChapter_id);

        //todo 8 开窗聚合
        SingleOutputStreamOperator<VideoChapterBean> reduceDS = keyedStream.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<VideoChapterBean>() {
                    @Override
                    public VideoChapterBean reduce(VideoChapterBean value1, VideoChapterBean value2) throws Exception {
                        value1.setUvCt(value1.getUvCt() + value2.getUvCt());
                        value1.setDurSum(value1.getDurSum() + value2.getDurSum());
                        value1.setVideoCt(value1.getVideoCt() + value2.getVideoCt());
                        value1.setDurAvg((value1.getDurSum() * 1D )/ value1.getUvCt());
                        return value1;
                    }
                }, new WindowFunction<VideoChapterBean, VideoChapterBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<VideoChapterBean> input, Collector<VideoChapterBean> out) throws Exception {

                        VideoChapterBean next = input.iterator().next();
                        out.collect(MyWindowFunction.getWindow(next, window));
                    }
                });
        //todo 9 写入clickHouse
        reduceDS.print("reduceDS>>>>>>>>>>>");
        reduceDS.addSink(ClickHouseUtil_hjy.getClickHouseSink("insert into dws_video_chapter_video_window values(?,?,?,?,?,?,?,?,?)"));


        //todo 10 启动程序
        env.execute();
    }
}
