package com.atguigu.app.dwm;

import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.util.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.PatternTimeoutFunction;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.OutputTag;

import java.time.Duration;

/**
 * 跳出率
 *  什么是跳出
 *      跳出就是用户成功访问了网站的一个页面后就退出，不在继续访问网站的其它页面。而
 *      跳出率就是用跳出次数除以访问次数。
 *      关注跳出率，可以看出引流过来的访客是否能很快的被吸引，渠道引流过来的用户之间
 *      的质量对比，对于应用优化前后跳出率的对比也能看出优化改进的成果。
 **/
//数据流：web/app -> Nginx -> SpringBoot -> Kafka(ods) -> FlinkApp -> Kafka(dwd) -> FlinkApp -> Kafka(dwm)
//程  序：mockLog -> Nginx -> Logger.sh  -> Kafka(ZK)  -> BaseLogApp -> kafka -> UserJumpDetailApp -> Kafka
public class UserJumpDetailApp
{
    public static void main(String[] args) throws Exception
    {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //生产环境,与Kafka分区数保持一致
        env.setParallelism(1);

        //1.1 设置CK&状态后端
        //env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/gmall-flink-210325/ck"));
        //env.enableCheckpointing(5000L);
        //env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //env.getCheckpointConfig().setCheckpointTimeout(10000L);
        //env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        //env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000);

        //env.setRestartStrategy(RestartStrategies.fixedDelayRestart());

        //TODO 2.读取Kafka主题的数据创建流
        String sourceTopic = "dwd_page_log";
        String sinkTopic = "dwm_user_jump";
        String groupId = "base_log_app_210325";
        DataStream<String> jsonStream = env.addSource(MyKafkaUtil.getKafkaConsumer(sourceTopic, groupId));

        //TODO 3.将每行数据转换为JSON对象并提取时间戳生成Watermark
        SingleOutputStreamOperator<JSONObject> jsonObjDS = jsonStream
                .map(JSONObject::parseObject)
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                // 定义 2s watermark
                                .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                                // 定义事件时间
                                .withTimestampAssigner(
                                        (SerializableTimestampAssigner<JSONObject>) (element, recordTimestamp) -> element.getLong("ts")));

        //TODO 4. CEP编程1: 定义模式序列
        Pattern<JSONObject, JSONObject> pattern = Pattern
                // first
                .<JSONObject>begin("start")
                .where(new SimpleCondition<JSONObject>()
                {
                    @Override
                    public boolean filter(JSONObject value) throws Exception
                    {
                        String lastPageId = value.getJSONObject("page").getString("last_page_id");
                        return lastPageId == null || lastPageId.length() <= 0;
                    }
                })
                // next
//                .next("next")
//                .where(new SimpleCondition<JSONObject>()
//                {
//                    @Override
//                    public boolean filter(JSONObject value) throws Exception
//                    {
//                        String lastPageId = value.getJSONObject("page").getString("last_page_id");
//                        return lastPageId == null || lastPageId.length() <= 0;
//                    }
//                })
                //使用循环模式  定义模式序列
                .times(2)
                // consecutive: 指定严格近邻(next) 否则不加则表示 followBy
                .consecutive()
                // 超时时间 10s
                .within(Time.seconds(10));

        //TODO 5. CEP编程2: 将模式序列作用到流上
        PatternStream<JSONObject> patternStream = CEP.pattern(jsonObjDS.keyBy(json -> json.getJSONObject("common").getString("mid")), pattern);

        //TODO 6. CEP编程3: 提取匹配上的和超时事件
        OutputTag<JSONObject> timeoutTag = new OutputTag<JSONObject>("timeout"){};
        SingleOutputStreamOperator<JSONObject> selectStream = patternStream.select(timeoutTag,
                (PatternTimeoutFunction<JSONObject, JSONObject>) (pattern1, timeoutTimestamp) -> pattern1.get("start").get(0),
                (PatternSelectFunction<JSONObject, JSONObject>) pattern12 -> pattern12.get("start").get(0)
        );
        DataStream<JSONObject> timeoutStream = selectStream.getSideOutput(timeoutTag);

        //TODO 7.UNION 两种事件（主流 和 超时流）
        DataStream<JSONObject> unionStream = selectStream.union(timeoutStream);

        //TODO 8.将数据写入Kafka
        unionStream.print();
        unionStream.map(JSONAware::toJSONString).addSink(MyKafkaUtil.getKafkaProducer(sinkTopic));

        //TODO 9.启动任务
        env.execute("UserJumpDetailApp");
    }
}
