package com.atguigu.gamll.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gamll.realtime.app.dwd.BaseApp;
import com.atguigu.gamll.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternFlatSelectFunction;
import org.apache.flink.cep.PatternFlatTimeoutFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.List;
import java.util.Map;

// 用户跳出明细
public class UserJumpDetailApp extends BaseApp {
    public static void main(String[] args) throws Exception {
        // TODO 1.基本环境准备
        // TODO 2.检查点相关设置
        UserJumpDetailApp userJumpDetailApp = new UserJumpDetailApp();
        userJumpDetailApp.baseEntry();
    }

    @Override
    public void biz(StreamExecutionEnvironment env) {
        // TODO 3.从kafka读取数据
        // 3.1 声明消费主题及消费者组
        String topic = "dwd_page_log";
        String groupId = "user_jump_detail_group";
        // 3.2 获取消费者对象
        FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtil.getKafkaSource(topic, groupId);
        // 3.3 消费数据
        DataStreamSource<String> kafkaStrDS = env.addSource(kafkaSource);

        // TODO 4.对读取的数据进行类型转换
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaStrDS.map(JSON::parseObject);
//        jsonObjDS.print(">>>");

        // TODO 5.指定Watermark以及提取事件时间
        SingleOutputStreamOperator<JSONObject> jsonObjWithWatermarkDS = jsonObjDS.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forMonotonousTimestamps()
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject jsonObject, long l) {
                        return jsonObject.getLong("ts");
                    }
                })
        );
        // TODO 6.使用mid进行分组
        KeyedStream<JSONObject, String> keyedDS = jsonObjWithWatermarkDS.keyBy(jsonObject -> jsonObject.getJSONObject("common").getString("mid"));
        // TODO 7.使用FlinkCEP进行数据过滤
        // 7.1 定义pattern
        Pattern<JSONObject, JSONObject> pattern = Pattern.<JSONObject>begin("first")
                .where(
                        new SimpleCondition<JSONObject>() {
                            @Override
                            public boolean filter(JSONObject jsonObject) throws Exception {
                                String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                                if (lastPageId == null || lastPageId.length() == 0) {
                                    return true;
                                }
                                return false;
                            }
                        }
                ).next("second").where(
                        new SimpleCondition<JSONObject>() {
                            @Override
                            public boolean filter(JSONObject jsonObject) throws Exception {
                                String pageId = jsonObject.getJSONObject("page").getString("page_id");
                                if (pageId != null && pageId.length() > 0) {
                                    return true;
                                }
                                return false;
                            }
                        }
                ).within(Time.seconds(10));
        // 7.2 将pattern应用到流上
        PatternStream<JSONObject> patternDS = CEP.pattern(keyedDS, pattern);
        // 7.3 从流中提取数据
        OutputTag<String> timeOutputTag = new OutputTag<String>("timeOutputTag") {
        };
        SingleOutputStreamOperator<String> filterDS = patternDS.flatSelect(
                timeOutputTag,
                new PatternFlatTimeoutFunction<JSONObject, String>() {
                    @Override
                    public void timeout(Map<String, List<JSONObject>> pattern, long timeoutTimestamp, Collector<String> out) throws Exception {
                        // 从流中提取和pattern匹配的超时数据
                        List<JSONObject> jsonObjectList = pattern.get("first");
                        for (JSONObject jsonObject : jsonObjectList) {
                            // 注意：这里虽然使用的是out.collect向下游传递数据，其实是将数据放到参数1声明出来的侧输出流了
                            out.collect(jsonObject.toJSONString());
                        }
                    }
                },
                new PatternFlatSelectFunction<JSONObject, String>() {
                    @Override
                    public void flatSelect(Map<String, List<JSONObject>> map, Collector<String> collector) throws Exception {
                        // 从流中提取和pattern完全匹配的数据 属于跳转，我们要计算的是跳出明细，需要的是超时数据，所以这里我们不做处理
                    }
                }
        );

        DataStream<String> timeOutputDS = filterDS.getSideOutput(timeOutputTag);
        timeOutputDS.print(">>>");

        // TODO 7.将过滤后的数据写到kafka主题中
        timeOutputDS.addSink(MyKafkaUtil.getKafkaSink("dwm_user_jump_detail"));
    }
}
