package yuekao7.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.PatternTimeoutFunction;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import yuekao7.entity.Page;
import yuekao7.util.KafkaUtil;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.List;
import java.util.Map;

public class NewLastData {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //1）创建Flink流式程序，启用检查点机制（5秒一次Checkpoint），设置状态后端为FsStateBackend，实时消费Kafka主题topic-log数据；（5分）
//        env.enableCheckpointing(5000);
//        env.setStateBackend(new FsStateBackend("file:///E:\\Checkpoint"));
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.kafkaSources("topic-log"));
        //2）流量域未经加工的事务事实表，将数据进行清洗（不能转JSON格式数据过滤），并且进行新老用户的校验（5分）
        SingleOutputStreamOperator<String> process = streamSource.keyBy(x -> JSON.parseObject(x).getJSONObject("common").getString("mid"))
                .process(new KeyedProcessFunction<String, String, String>() {
                    ValueState<String> state;

                    @Override
                    public void open(Configuration parameters) {
                        state = getRuntimeContext().getState(new ValueStateDescriptor<>("state", Types.STRING));
                    }

                    @Override
                    public void processElement(String s, KeyedProcessFunction<String, String, String>.Context context, Collector<String> collector) throws Exception {
                        String is_new = JSON.parseObject(s).getJSONObject("common").getString("is_new");
                        if (is_new.equals("1") && state.value() != null) {
                            JSON.parseObject(s).getJSONObject("common").put("is_new", 0);
                        } else {
                            state.update("老用户");
                        }
                        collector.collect(s);
                    }
                });
//        process.print();
        //3）利用侧输出流将 topic-log 数据进行分流，生成五张事务事实表及一张播放预处理表写入 Kafka。
        //	流量域页面主题
        OutputTag<String> page = new OutputTag<String>("page") {
        };
        //	流量域启动主题
        OutputTag<String> start = new OutputTag<String>("start") {
        };
        //	流量域动作主题
        OutputTag<String> actions = new OutputTag<String>("actions") {
        };
        //	流量域曝光主题
        OutputTag<String> displays = new OutputTag<String>("displays") {
        };
        //	流量域错误主题
        OutputTag<String> err = new OutputTag<String>("err") {
        };
        //	流量域播放预处理主题 （5分）
        OutputTag<String> appVideo = new OutputTag<String>("appVideo") {
        };

        SingleOutputStreamOperator<String> process1 = streamSource.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                if (jsonObject.containsKey("page")) {
                    context.output(page, s);
                }
                if (jsonObject.containsKey("start")) {
                    context.output(start, s);
                }
                if (jsonObject.containsKey("actions")) {
                    context.output(actions, s);
                }
                if (jsonObject.containsKey("displays")) {
                    context.output(displays, s);
                }
                if (jsonObject.containsKey("err")) {
                    context.output(err, s);
                }
                if (jsonObject.containsKey("appVideo")) {
                    context.output(appVideo, s);
                }
            }
        });
//        process1.getSideOutput(page).print("page:");
//        process1.getSideOutput(start).print("start:");
//        process1.getSideOutput(actions).print("actions:");
//        process1.getSideOutput(displays).print("displays:");
//        process1.getSideOutput(err).print("err:");
//        process1.getSideOutput(appVideo).print("appVideo:");
//        process1.getSideOutput(page).addSink(KafkaUtil.kafkaSink("page-log"));
//        process1.getSideOutput(start).addSink(KafkaUtil.kafkaSink("start-log"));
//        process1.getSideOutput(actions).addSink(KafkaUtil.kafkaSink("actions-log"));
//        process1.getSideOutput(displays).addSink(KafkaUtil.kafkaSink("displays-log"));
//        process1.getSideOutput(err).addSink(KafkaUtil.kafkaSink("err-log"));
//        process1.getSideOutput(appVideo).addSink(KafkaUtil.kafkaSink("appVideo-log"));
        SingleOutputStreamOperator<Page> pageData =process1.getSideOutput(page).map(new MapFunction<String, Page>() {
            @Override
            public Page map(String s) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                String ar = jsonObject.getJSONObject("common").getString("ar");
                String is_new = jsonObject.getJSONObject("common").getString("is_new");
                String mid = jsonObject.getJSONObject("common").getString("mid");
                String os = jsonObject.getJSONObject("common").getString("os");
                String during_time = jsonObject.getJSONObject("page").getString("during_time");
                String last_page_id = jsonObject.getJSONObject("page").getString("last_page_id");
                String page_id = jsonObject.getJSONObject("page").getString("page_id");
                Long ts = jsonObject.getLong("ts");
                return new Page(ar, is_new, mid, os, during_time, last_page_id, page_id, ts);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<Page>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((event, timestamp) -> event.getTs()));

//        pageData.print();
        //4）流量域独立访客事务事实表，flink读取流量域页面浏览主题，过滤 last_page_id 不为 null 的数据，独立访客数据对应的页面必然是会话起始页面，
        // last_page_id 必为 null。过滤 last_page_id != null 的数据，减小数据量，提升计算效率。（5分）
        SingleOutputStreamOperator<Page> stateData = pageData.keyBy(x -> x.getMid()).process(new KeyedProcessFunction<String, Page, Page>() {
            ValueState<String> state;

            @Override
            public void open(Configuration parameters) {
                state = getRuntimeContext().getState(new ValueStateDescriptor<>("state", Types.STRING));
            }

            @Override
            public void processElement(Page page, KeyedProcessFunction<String, Page, Page>.Context context, Collector<Page> collector) throws Exception {
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                String format = sdf.format(page.getTs());
                if (page.getLast_page_id() == null && !format.equals(state.value())) {
                    collector.collect(page);
                    state.update(format);
                }
            }
        });

//        stateData.print();

        //5）流量域用户跳出主题，flink读取流量域页面浏览主题用CEP实现跳出：跳出是指会话中只有一个页面的访问行为，如果能获取会话的所有页面，只要筛选页面数为 1 的会话即可获取跳出明细数据（5分）
        KeyedStream<Page, String> keyby = pageData.keyBy(x -> x.getMid());

        Pattern<Page, Page> within = Pattern.<Page>begin("one").where(
                new SimpleCondition<Page>() {
                    @Override
                    public boolean filter(Page event) {
                        return event.getLast_page_id() == null;
                    }
                }
        ).next("two").where(
                new SimpleCondition<Page>() {
                    @Override
                    public boolean filter(Page subEvent) {
                        return subEvent.getLast_page_id() == null;
                    }
                }
        ).within(Time.minutes(10));

        PatternStream<Page> patternStream = CEP.pattern(keyby, within);

        OutputTag<Page> outputTag = new OutputTag<Page>("outputTag") {
        };

        SingleOutputStreamOperator<Page> select = patternStream.select(outputTag, new PatternTimeoutFunction<Page, Page>() {
            @Override
            public Page timeout(Map<String, List<Page>> map, long l) throws Exception {
                return map.get("one").get(0);
            }
        }, new PatternSelectFunction<Page, Page>() {
            @Override
            public Page select(Map<String, List<Page>> map) throws Exception {
                return map.get("one").get(0);
            }
        });

//        select.getSideOutput(outputTag).print();
//        select.print();

        env.execute();


    }
}
