package com.atguigu.edu.realtime.app.dwd.db;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;

import com.atguigu.edu.realtime.bean.DwdLearnPlayBean;
import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.EventTimeSessionWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

/**
 * 学习域播放事务事实表（会话粒度）
 */
public class DwdPlayPageView {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //TODO 2.检查点相关的设置
        env.enableCheckpointing(5000L);
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1, Time.seconds(1L)));
        //TODO 3.从kafka的主题中读取数据
        //3.1 声明消费的主题以及消费者组
        String topic = "dwd_traffic_video_log";
        String groupId = "dwd_play_page_view_group";
        //3.2 创建消费者对象
        KafkaSource<String> kafkaSource = MyKafkaUtil.getKafkaSource(topic, groupId);
        //3.3 消费数据  封装为流
        DataStreamSource<String> kafkaDs = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka_source");
        //kafkaDs.print();
        //TODO 4.对流中数据类型进行转换    jsonStr->jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDs.map(JSON::parseObject);
        //TODO 5.转换成实体类
        SingleOutputStreamOperator<DwdLearnPlayBean> beanDs = jsonObjDS.process(new ProcessFunction<JSONObject, DwdLearnPlayBean>() {
            @Override
            public void processElement(JSONObject value, ProcessFunction<JSONObject, DwdLearnPlayBean>.Context ctx, Collector<DwdLearnPlayBean> out) throws Exception {
                DwdLearnPlayBean playBean = DwdLearnPlayBean.builder()
                        .sessionId(value.getJSONObject("common").getString("sid"))
                        .userId(value.getJSONObject("common").getString("uid"))
                        .videoId(value.getJSONObject("appVideo").getString("video_id"))
                        .playSec(value.getJSONObject("appVideo").getInteger("play_sec"))
                        .positionSec(value.getJSONObject("appVideo").getInteger("position_sec"))
                        .ts(value.getLong("ts"))
                        .build();
                out.collect(playBean);
            }
        });
        //TODO 6.指定Watermark以及提取事件时间字段 [Flink指定水位线时要求指定为毫秒,而binlog的时间为秒]
        SingleOutputStreamOperator<DwdLearnPlayBean> withWaterMarkDs = beanDs.assignTimestampsAndWatermarks(
                WatermarkStrategy.<DwdLearnPlayBean>forMonotonousTimestamps()
                        .withTimestampAssigner(new SerializableTimestampAssigner<DwdLearnPlayBean>() {
                            @Override
                            public long extractTimestamp(DwdLearnPlayBean element, long recordTimestamp) {
                                return element.getTs() ;
                            }
                        })
        );

        //TODO 7.按照会话id进行分组
        KeyedStream<DwdLearnPlayBean, String> keyByDs = withWaterMarkDs.keyBy(DwdLearnPlayBean::getSessionId);

        //TODO 8.开窗,聚合
        WindowedStream<DwdLearnPlayBean, String, TimeWindow> windowDs = keyByDs.window(EventTimeSessionWindows.withGap(org.apache.flink.streaming.api.windowing.time.Time.seconds(3)));
        SingleOutputStreamOperator<DwdLearnPlayBean> reduceDs = windowDs.reduce(new ReduceFunction<DwdLearnPlayBean>() {
            @Override
            public DwdLearnPlayBean reduce(DwdLearnPlayBean value1, DwdLearnPlayBean value2) throws Exception {
                value1.setPlaySec(value1.getPlaySec() + value2.getPlaySec());
                if (value1.getPositionSec() < value2.getPositionSec()) {
                    value1.setPositionSec(value2.getPositionSec());
                }
                return value1;
            }
        }, new WindowFunction<DwdLearnPlayBean, DwdLearnPlayBean, String, TimeWindow>() {
            @Override
            public void apply(String s, TimeWindow window, Iterable<DwdLearnPlayBean> input, Collector<DwdLearnPlayBean> out) throws Exception {
                for (DwdLearnPlayBean element : input) {
                    out.collect(element);
                }
            }
        });
        SingleOutputStreamOperator<String> mapDS = reduceDs.map(JSON::toJSONString);
        mapDS.print();
        //TODO 10.将聚合结果写到Kafka
        mapDS.sinkTo(MyKafkaUtil.getKafkaSink("dwd_play_page_view"));
        env.execute();
    }
}
