package com.atguigu.edu.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternFlatSelectFunction;
import org.apache.flink.cep.PatternFlatTimeoutFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.List;
import java.util.Map;

public class DwdTrafficUserJumpOutApp { //用户跳出

    public static void main(String[] args) throws Exception { //***************
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //1.取得kafka 流 dwd_traffic_page_log
        String sourceTopic="dwd_traffic_page_log"; //主题
        String groupId="dwd_traffic_user_jump_out_app"; //消费者组
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(sourceTopic, groupId);

        //将数据送到环境中得到流
        DataStreamSource<String> kafkaDstream= (DataStreamSource<String>) env.addSource(kafkaConsumer);

        //2.转成jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDstream = kafkaDstream.map(JSON::parseObject);

        //3.定义时间语义  事件时间
        SingleOutputStreamOperator<JSONObject> jsonObjWithWatermarkDstream = jsonObjDstream.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forMonotonousTimestamps() //无乱序的
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {//时间戳分配器
                    @Override
                    public long extractTimestamp(JSONObject jsonObject, long recordTimestamp) {
                        return jsonObject.getLong("ts");//将每一条jsonObject数据中的ts转换成事件时间
                    }
                }));//加了时间戳和水位线

        //4.keyby  mid   //把相同的数据，汇总到相同的分区中  -->  分组
        KeyedStream<JSONObject, String> midKeyedDstream = jsonObjWithWatermarkDstream.keyBy(jsonObj -> jsonObj.getJSONObject("common").getString("mid"));

        //5.定义CEP表达式
        // 步骤1 ：  首次访问（last_page_id==null)
        // 步骤2： 2.1  超时
        // 2.2  紧邻访问（last_page_id==null)
        //流的事件类型<进入的，输出的>                                           第一次访问
        Pattern<JSONObject, JSONObject> jumpOutPattern = Pattern.<JSONObject>begin("first").where(
                new SimpleCondition<JSONObject>() {
                    @Override
                    public boolean filter(JSONObject jsonObject) {  //是否是会话首次访问，判断条件：last_page_id==null
                        String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                        return  lastPageId==null||lastPageId.length()==0;//两个条件满足任意一个返回true
                    }
                }
        ).next("second").where(//第二次访问
                new SimpleCondition<JSONObject>() {
                    @Override
                    public boolean filter(JSONObject jsonObject) { //是否是会话首次访问，判断条件：last_page_id==null
                        String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
                        return  lastPageId==null||lastPageId.length()==0;//两个条件满足任意一个返回true
                    }
                }
        ).within(Time.milliseconds(10000));//-------超时时间规定

        //6   把CEP表达式应用到流中                             流 + 表达式  -->  带表达式的流
        PatternStream<JSONObject> patternStream = CEP.pattern(midKeyedDstream, jumpOutPattern);
        OutputTag<JSONObject> timeoutTag=new OutputTag<JSONObject>("timeoutTag"){};//超时标签的定义

        //7   从流中提取表达命中结果 --> 1 匹配结果 2 超时结果
        SingleOutputStreamOperator<JSONObject> twiceSessionStream = patternStream.flatSelect(//flatSelect提取多个流&数据
                timeoutTag, new PatternFlatTimeoutFunction<JSONObject, JSONObject>() {//方法作用：如有超时 --> 提取超时数据
                    @Override  //String:阶段（first or second）
                    public void timeout(Map<String, List<JSONObject>> pattern, long timeoutTimestamp, Collector<JSONObject> out) throws Exception {
                        List<JSONObject> firstList = pattern.get("first");  //超时数据提取
                        out.collect(firstList.get(0));//提取first阶段数据匹配结果的第一个  --> 0
                    }
                }, new PatternFlatSelectFunction<JSONObject, JSONObject>() {
                    @Override    // 两次会话数据提取 --> 也就是第一次超时或跳出 和 之后的第二次超时或跳出
                    public void flatSelect(Map<String, List<JSONObject>> pattern, Collector<JSONObject> out) throws Exception {
                        List<JSONObject> firstList = pattern.get("first");
                        out.collect(firstList.get(0));
                    }
                }
        );

        //8   把超时结果侧输出为流  --:: timeoutTag里面有两条流 ： timeout + flatSelect
        DataStream<JSONObject> timeoutStream = twiceSessionStream.getSideOutput(timeoutTag);//得到超时的流
//        timeoutStream.print("timeout:::::");//超时的
//        twiceSessionStream.print("twice:::::");//两次会话的

        //9   再把俩中结果合并
        DataStream<JSONObject> jumpOutStream = twiceSessionStream.union(timeoutStream);

        //10  写入kafka
        jumpOutStream.map(json->JSON.toJSONString(json)).addSink(MyKafkaUtil.getKafkaProducer("dwd_traffic_user_jump_out"));
        env.execute();
    }
}