package com.atguigu.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternFlatSelectFunction;
import org.apache.flink.cep.PatternFlatTimeoutFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.time.Duration;
import java.util.List;
import java.util.Map;

public class DwdTrafficJumpOutApp {
    //1 环境
    //2 数据源
    //3 调整数据结构jsonObj

    //4  流 要对流进行keyby
    //5  设定表达式
    //6  把流和表达式组合 成为表达式流
    //7  从表达式流中，提取事件，成为新流

    //8  超时事件侧输出
    //9  超时事件和两次会话事件合流
    //10 写入跳出主题

    public static void main(String[] args) throws Exception {

        Configuration configuration = new Configuration();
        configuration.setString(RestOptions.BIND_PORT,"5577");

        //1 环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(configuration);
        env.setParallelism(4);
        String topic="dwd_traffic_page_log";
        String groupId="dwd_traffic_jump_out";
        //2 数据源
        DataStreamSource<String> kafkaStream = env.addSource(MyKafkaUtil.getKafkaConsumer(topic, groupId));

        //3 调整数据结构jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjStream = kafkaStream.map(jsonString -> JSON.parseObject(jsonString));

        SingleOutputStreamOperator<JSONObject> jsonObjWithWaterMarkStream = jsonObjStream.assignTimestampsAndWatermarks(WatermarkStrategy

                .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(5)).withIdleness(Duration.ofSeconds(10))
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject jsonObject, long recordTimestamp) {
                        return jsonObject.getLong("ts");
                    }
                }));

       // jsonObjWithWaterMarkStream.filter(jsonObj-> jsonObj.getJSONObject("page").getString("last_page_id")==null).print();

        //4  流 要对流进行keyby
        KeyedStream<JSONObject, String> midKeyedStream = jsonObjWithWaterMarkStream.keyBy(jsonObj -> jsonObj.getJSONObject("common").getString("mid"));

        //5  设定表达式
        //5.1  访问首页（第一个页面)
        //        一段时间内，该mid没有后续的访问页面，视为跳出。 (10s)     超时事件
        //5.2   连续两次会话首页，那么第一次会话首页被视为跳出。    会话首页 ： last_page_id ==null   主要事件
        Pattern<JSONObject, JSONObject> pattern = Pattern.<JSONObject>begin("first")  //第一次会话
                .where(new SimpleCondition<JSONObject>() {
                    @Override
                    public boolean filter(JSONObject jsonObject) throws Exception {
                        String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                        return  lastPageId==null||lastPageId.length()==0;
                    }
                }).next("second").where(new SimpleCondition<JSONObject>() { //第二次会话
                    @Override
                    public boolean filter(JSONObject jsonObject) throws Exception {
                        String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                        return  lastPageId==null||lastPageId.length()==0;
                    }
                }).within(Time.milliseconds(5));


        //6  把流和表达式组合 成为表达式流
        PatternStream<JSONObject> pattenStream = CEP.pattern(midKeyedStream, pattern);

        //7  从表达式流中，提取事件，成为新流  -> 1 匹配事件：两次会话首页的事件  2 超时事件 单次会话后续超时10s
        OutputTag<JSONObject> timeoutTag=new OutputTag<JSONObject>("timeout"){};
        SingleOutputStreamOperator<JSONObject> twiceJumpStream = pattenStream.flatSelect(timeoutTag,
                new PatternFlatTimeoutFunction<JSONObject, JSONObject>() {
                    @Override  //超时数据提取
                    public void timeout(Map<String, List<JSONObject>> pattern, long timeoutTimestamp, Collector<JSONObject> out) throws Exception {
                        JSONObject timeOutJumpJsonObject = pattern.get("first").get(0);
                        // timeOutJumpJsonObject.put("jump_ts", timeOutJumpJsonObject.getLong("ts") + 5000L);
                        out.collect(timeOutJumpJsonObject);

                    }
                }, new PatternFlatSelectFunction<JSONObject, JSONObject>() {
                    @Override //匹配事件提取
                    public void flatSelect(Map<String, List<JSONObject>> pattern, Collector<JSONObject> out) throws Exception {
                        JSONObject twiceFirstJumpJsonObject = pattern.get("first").get(0);
                        JSONObject twiceSecondJumpJsonObject = pattern.get("second").get(0);
                        //第二个页面的访问时间作为第一个页面的跳出时间
                        // twiceFirstJumpJsonObject.put("jump_ts", twiceSecondJumpJsonObject.getLong("ts"));
                        out.collect(twiceFirstJumpJsonObject);
                    }
                });


        //8  超时事件侧输出
        DataStream<JSONObject> timeoutStream = twiceJumpStream.getSideOutput(timeoutTag);
       // twiceJumpStream.print("twice:::");
        timeoutStream.print("timeout:::");



        //9  超时事件和两次会话事件合流
        DataStream<JSONObject> unionStream = twiceJumpStream.union(timeoutStream);


        //10 写入跳出主题
        unionStream.print();
        unionStream.map(jsonObj->jsonObj.toJSONString()).addSink(MyKafkaUtil.getKafkaProducer("dwd_traffic_jumpout"));


        env.execute();
    }
}
