package com.atguigu.edu.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.common.Constant;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicyWithNodeGroup;
import org.apache.kafka.clients.producer.ProducerRecord;


import javax.annotation.Nullable;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

import static org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION;


/** 视频播放事实表:  播放日志生成规则为：每过30s或关闭视频生成一条，从打开视频到关掉视频视为一次播放。
 * todo 这里主要是去重 并且累加播放时间
 相对于电商，发现在线教育多了播放日志
 {
 "appVideo":{
     "play_sec":30,
     "position_sec":30,
     "video_id":"2735"
 },
 "common":{
     "ar":"29",
     "ba":"iPhone",
     "ch":"Appstore",
     "is_new":"0",
     "md":"iPhone 8",
     "mid":"mid_395",
     "os":"iOS 13.3.1",
     "sc":"1",
     "sid":"16095a05-2593-466a-8156-fd71dc0e5309",
     "uid":"45",
     "vc":"v2.1.134"
 },
 "ts":1671553070355
 }

 */
public class Dwd_TrafficLogAppVideoDetail {

    public static final String ckAndGroupIdAndJobName = "Dwd_TrafficLogAppVideoDetail";
    public static final String source_topic = Constant.TOPIC_DWD_TRAFFIC_APPVIDEO;
    public static final Integer port = 3004;
    // todo 这个地方后期可以优化改为执行传参
    public static final Integer parallelism = 2;

    public static void main(String[] args) {
        /**
            1、设置系统用户，不设置会默认使用window的用户，报错
         */
        System.setProperty("HADOOP_USER_NAME", "atguigu");

        /**
            2、创建Stream执行环境
         */
        Configuration configuration = new Configuration();
        configuration.setString("pipeline.name", ckAndGroupIdAndJobName);
        configuration.setInteger("rest.port", port);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(configuration);
        env.setParallelism(parallelism);
        env.enableCheckpointing(3000);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop162:8020/edu/" + ckAndGroupIdAndJobName);
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(RETAIN_ON_CANCELLATION);

        /**
             3、读取kafka里的dwd_traffic_page数据: 得到dwd_traffic_appVideo流数据
             env.fromSource(参数1,参数2,参数3)
         */
        DataStreamSource<String> appVideoStrFromKafkaDs = env.fromSource(
                KafkaSource.<String>builder()
                        .setBootstrapServers(Constant.KAFKA_BROKERS)
                        .setTopics(source_topic)
                        .setGroupId(ckAndGroupIdAndJobName)
                        .setStartingOffsets(OffsetsInitializer.latest())
                        // 自定义反序列化器。
                        .setValueOnlyDeserializer(new DeserializationSchema<String>() {
                            @Override
                            public String deserialize(byte[] message) throws IOException {
                                if (message != null) {
                                    return new String(message, StandardCharsets.UTF_8);
                                }
                                return null;
                            }

                            @Override
                            public boolean isEndOfStream(String s) {
                                return false;
                            }

                            @Override
                            public TypeInformation<String> getProducedType() {
                                return TypeInformation.of(new TypeHint<String>() {});
                            }
                        })
                        .setProperty("isolation.level", "read_committed")
                        .build()

                , WatermarkStrategy.noWatermarks()

                , "kafka-source"
        );

        /**
            4、将JsonString转为JsonObject方便后面操作
         */
        SingleOutputStreamOperator<JSONObject> appVideoFromKafkaDs
                = appVideoStrFromKafkaDs.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String jsonStr) throws Exception {
                return JSON.parseObject(jsonStr);
            }
        });


        // todo 分析：任务书描述的播放日志生成规则为：每过30s或关闭视频生成一条(session会话窗口 或者 定时器实现)，从打开视频到关掉视频视为一次播放
        //  所以当uid和video_id相同时看成一次视频播放。先keyBy再用状态编程+定时器

        /**
            5、根据uid，video_id分组
            todo 根据两个字段keyBy，只能return一个值。方案一：可以用uid_videoId 方案二：用Tuple2
         */
        KeyedStream<JSONObject, Tuple2<String, String>> keyedDS = appVideoFromKafkaDs.keyBy(new KeySelector<JSONObject, Tuple2<String, String>>() {
            @Override
            public Tuple2<String, String> getKey(JSONObject jsonObject) throws Exception {
                String uid = jsonObject.getJSONObject("common").getString("uid");
                String videoId = jsonObject.getJSONObject("appVideo").getString("video_id");
                return Tuple2.of(uid, videoId);
            }
        });

        /**
         6、状态编程 + 定时器
            累加播放时长，每过30s或关闭视频生成一条，从打开视频到关掉视频视为一次播放
         */
        SingleOutputStreamOperator<JSONObject> processDS = keyedDS.process(new KeyedProcessFunction<Tuple2<String, String>, JSONObject, JSONObject>() {

            private ValueState<Long> lastProcessingTimeTimerState;
            private ValueState<Long> accPlaySecState;

            @Override
            public void open(Configuration parameters) throws Exception {
                // 当前用户播放这个视频的上一个日期
                lastProcessingTimeTimerState = getRuntimeContext().getState(new ValueStateDescriptor<Long>("lastDateState", Long.class));
                // 累积播放时长
                accPlaySecState = getRuntimeContext().getState(new ValueStateDescriptor<Long>("playsecAccState", Long.class));

            }

            @Override
            public void processElement(JSONObject jsonObject, Context context, Collector<JSONObject> collector) throws Exception {
                Long accPlaySec = accPlaySecState.value();
                //当前数据播放时长
                Long playSec = Long.valueOf(jsonObject.getJSONObject("appVideo").getString("play_sec"));

                // 当前用户播放这个视频的第一条数据
                if (accPlaySec == null) {
                    accPlaySecState.update(playSec);
                    // 更新定时器时长，30s输出数据（每过30s或关闭视频生成一条，从打开视频到关掉视频视为一次播放）
                    long newTimerProcessing = context.timerService().currentProcessingTime() + (30 * 1000L);
                    context.timerService().registerProcessingTimeTimer(newTimerProcessing);

                    lastProcessingTimeTimerState.update(newTimerProcessing);
                } else {
                    // 更新定时器
                    context.timerService().deleteProcessingTimeTimer(lastProcessingTimeTimerState.value());
                    long newTimerProcessing = context.timerService().currentProcessingTime() + (30 * 1000L);
                    context.timerService().registerProcessingTimeTimer(newTimerProcessing);
                    lastProcessingTimeTimerState.update(newTimerProcessing);
                    //累加播放时长
                    accPlaySec += playSec;
                    accPlaySecState.update(accPlaySec);
                }
            }

            @Override
            public void onTimer(long timestamp, OnTimerContext ctx, Collector<JSONObject> out) throws Exception {
                Long accPlaySec = accPlaySecState.value();
                if (accPlaySec != null) {
                    String user_id = ctx.getCurrentKey().f0;
                    String video_id = ctx.getCurrentKey().f1;
                    JSONObject jsonObject = new JSONObject();
                    jsonObject.put("user_id", user_id);
                    jsonObject.put("video_id", video_id);
                    jsonObject.put("accPlaySec", accPlaySec);
                    jsonObject.put("ts", System.currentTimeMillis());
                    out.collect(jsonObject);

                    // 重置定时器
                    accPlaySecState.clear();
                    lastProcessingTimeTimerState.clear();
                }
            }

        });
        processDS.print();

        /**
            7、把数据写到kafka
         */
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", Constant.KAFKA_BROKERS);
        //transaction.max.timeout.ms broker 的参数: 15 分钟, 生成者的设置不能超过这个值
        //transaction.timeout.ms  flink 的生产者默认是 60 分钟, 这个值应该小于transaction.max.timeout.ms
        properties.setProperty("transaction.timeout.ms", 15 * 60 * 1000 + "");
        processDS.print(">>>>>>>>>>>>>");
        processDS.map(jsonObject -> jsonObject.toJSONString())
                .addSink(
                        new FlinkKafkaProducer<String>(
                            "default",
                            new KafkaSerializationSchema<String>() {
                                @Override
                                public ProducerRecord<byte[], byte[]> serialize(String element, @Nullable Long aLong) {
                                    return new ProducerRecord<>(Constant.TOPIC_DWD_TRAFFIC_APPVIDEO_DETAIL, element.getBytes(StandardCharsets.UTF_8));
                                }
                            }
                            , properties

                            ,FlinkKafkaProducer.Semantic.EXACTLY_ONCE
                )
                );

        /**
            8、执行环境
         */
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }

    }

}
