package com.bw.dwd;




import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.PatternTimeoutFunction;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.OutputTag;

import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.Properties;

public class XT2UserJumpApp {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //从kafka中读取dwd层的页面数据，过滤数据并转换类型为JSON对象,定义 CEP 匹配规则
        //dwd_page_log
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop102:9092");
        properties.setProperty("group.id", "group1");
        DataStream<String> stream = env.addSource(MyKafkaUtil.getKafkaConsumer("dwd_page_log_yk2","test1113"));
        stream.print("<<<");
        SingleOutputStreamOperator<JSONObject> mapDS = stream.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String s) throws Exception {
                return JSON.parseObject(s);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((event, timestamp) -> event.getLong("ts")));

        //过滤跳出明细数据
        KeyedStream<JSONObject, String> input = mapDS.keyBy(x -> x.getJSONObject("common").getString("mid"));
        //规则 某用户在一次会话中只访问了一个页面 是跳出，超时也是跳出
        Pattern<JSONObject, JSONObject> pt = Pattern.<JSONObject>begin("one")
                .where(new SimpleCondition<JSONObject>() {
                    @Override
                    public boolean filter(JSONObject value) throws Exception {
                        return value.getJSONObject("page").getString("last_page_id") == null;
                    }
                }).next("two").where(new SimpleCondition<JSONObject>() {
                    @Override
                    public boolean filter(JSONObject value) throws Exception {
                        return value.getJSONObject("page").getString("last_page_id") == null;
                    }
                }).within(Time.seconds(10));
        //
        PatternStream<JSONObject> ps = CEP.pattern(input, pt);
        //提取数据
        OutputTag<JSONObject> tag = new OutputTag<JSONObject>("timeout") {};
        SingleOutputStreamOperator<JSONObject> mainDS = ps.select(tag,
                new PatternTimeoutFunction<JSONObject, JSONObject>() {
                    @Override
                    public JSONObject timeout(Map<String, List<JSONObject>> map, long l) throws Exception {
                        return map.get("one").get(0);
                    }
                },
                new PatternSelectFunction<JSONObject, JSONObject>() {
                    @Override
                    public JSONObject select(Map<String, List<JSONObject>> map) throws Exception {
                        return map.get("one").get(0);
                    }
                });

        DataStream<JSONObject> jumpDS = mainDS.union(mainDS.getSideOutput(tag));
        jumpDS.print();
        FlinkKafkaProducer<String> myProducer = new FlinkKafkaProducer<>( "dwm_user_jump_yk2",new SimpleStringSchema(), properties);
        jumpDS.map(x->x.toString()).addSink(myProducer);

        env.execute();
    }
}
