package com.atguigu.gmall.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.utils.KafkaUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.functions.PatternProcessFunction;
import org.apache.flink.cep.functions.TimedOutPartialMatchHandler;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.List;
import java.util.Map;

/**
 * 跳出明细统计
 * <p>
 * flink 1.12 版本开始，默认时间语义为：事件时间语义
 * <p>
 * 程序执行流程同：com.atguigu.gmall.realtime.app.dwm.UniqueVisitorApp
 *
 * @author lvbingbing
 * @date 2022-03-27 10:37
 * @see org.apache.flink.streaming.api.environment.StreamExecutionEnvironment#DEFAULT_TIME_CHARACTERISTIC
 */
public class UserJumpDetailApp {
    public static void main(String[] args) throws Exception {
        // 1. 基本环境准备
        // 1.1 流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 1.2 并行度设置
        env.setParallelism(4);
        // 2. 检查点相关设置
        // 2.1 开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        // 2.2 设置检查点超时时间
        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        checkpointConfig.setCheckpointTimeout(60000L);
        // 2.3 开启外部化检查点，作业取消时保留检查点
        checkpointConfig.enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 2.4 设置重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 6000L));
        // 2.5 设置状态后端
        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/ck/gmall"));
        // 2.6 指定操作 hdfs 的用户
        System.setProperty("HADOOP_USER_NAME", "atguigu");
        // 3. 从 kafka 中读取数据
        DataStreamSource<String> kafkaDs = env.addSource(KafkaUtils.getKafkaSource("dwd_page_log", "user_jump_detail_app_group"));
        // 4. 数据结构转换
        SingleOutputStreamOperator<JSONObject> jsonObjDs = kafkaDs.map(JSON::parseObject);
        // 5. 跳出明细核心逻辑
        // 5.1 设定时间语义为事件时间，并指定数据中的 ts字段为事件时间字段
        // flink 1.12 默认的时间语义就是事件时间
        SingleOutputStreamOperator<JSONObject> jsonObjWithWatermarkDs = jsonObjDs.assignTimestampsAndWatermarks(
                WatermarkStrategy.<JSONObject>forMonotonousTimestamps()
                        .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                            @Override
                            public long extractTimestamp(JSONObject jsonObject, long recordTimestamp) {
                                return jsonObject.getLong("ts");
                            }
                        }));
        // 5.2 根据 mid 进行分组
        KeyedStream<JSONObject, String> keyedDs = jsonObjWithWatermarkDs.keyBy(jsonObj -> jsonObj.getJSONObject("common").getString("mid"));
        // 5.3 定义匹配模式
        Pattern<JSONObject, JSONObject> pattern = Pattern.<JSONObject>begin("firstEvent")
                .where(new SimpleCondition<JSONObject>() {
                    @Override
                    public boolean filter(JSONObject jsonObject) throws Exception {
                        // lastPageId 为空，说明该页面是用户近期访问的第一个页面
                        String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                        return StringUtils.isEmpty(lastPageId);
                    }
                })
                .next("secondEvent")
                .where(new SimpleCondition<JSONObject>() {
                    @Override
                    public boolean filter(JSONObject value) throws Exception {
                        // 访问了其它页面(有数据过来就表示访问了其它页面)
                        return true;
                    }
                })
                // 在 30min 内
                .within(Time.minutes(30));
        // 5.4 将 pattern 应用到流上
        PatternStream<JSONObject> patternStream = CEP.pattern(keyedDs, pattern);
        // 5.5 定义侧输出流标签
        OutputTag<JSONObject> timeoutTag = new OutputTag<JSONObject>("timeoutTag") {
        };
        // 5.6 处理检测到的模式序列
        SingleOutputStreamOperator<JSONObject> outputStreamOperator = patternStream.process(new MyPatternTimeOutProcessFunction(timeoutTag));
        // 5.7 从侧输出流中获取跳出数据
        DataStream<JSONObject> timeoutDs = outputStreamOperator.getSideOutput(timeoutTag);
        timeoutDs.print("跳出明细");
        // 7. 将跳出明细写回到 kafka 的 dwm 层
        timeoutDs.map(JSONAware::toJSONString)
                .addSink(KafkaUtils.getKafkaSinkAtLeastOnce("dwm_user_jump_detail"));
        // 8. 触发程序执行
        env.execute();
    }

    public static class MyPatternTimeOutProcessFunction extends PatternProcessFunction<JSONObject, JSONObject> implements TimedOutPartialMatchHandler<JSONObject> {

        private OutputTag<JSONObject> outputTag;

        private MyPatternTimeOutProcessFunction() {
        }

        public MyPatternTimeOutProcessFunction(OutputTag<JSONObject> outputTag) {
            this.outputTag = outputTag;
        }

        @Override
        public void processMatch(Map<String, List<JSONObject>> match, Context ctx, Collector<JSONObject> out) throws Exception {
            // 跳转事件，不需要统计
        }

        @Override
        public void processTimedOutMatch(Map<String, List<JSONObject>> match, Context ctx) throws Exception {
            // 超时事件就是要统计的跳出事件
            List<JSONObject> jsonObjectList = match.get("firstEvent");
            ctx.output(outputTag, jsonObjectList.get(0));
        }
    }
}
