package com.atguigu.edu.realtime220815.app.dwd.log;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime220815.util.DateFormatUtil;
import com.atguigu.edu.realtime220815.util.KafkaUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * @Classname DwdTrafficBaseLogSplit
 * @Description TODO
 * @Date 2023/2/15 19:03
 * @Created by lzx
 */
public class DwdTrafficBaseLogSplit {
    public static void main(String[] args) throws Exception {
        /**
         * 1.创建流式执行环境
         * 2.检查点设置
         * 3.从kafka的topic_log中读取数据
         * 4.过滤,数据格式转换 Str -> JSONObject
         * 5.新老访客修复
         * 6.进行日志分流
         */
        //1.创建流式执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //2.检查点设置
        /*env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.setStateBackend(new HashMapStateBackend());
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop101:8020/eduRealTime/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");*/
        //3.从kafka的topic_log中读取数据
        DataStreamSource<String> topicLogDS = env.addSource(KafkaUtils.getFlinkKafkaConsumer("topic_log", "dwd_traffic_base_log_split_group"));

        //4.过滤,数据格式转换 Str -> JSONObject
        SingleOutputStreamOperator<JSONObject> process = topicLogDS.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, ProcessFunction<String, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    if (StringUtils.isNotEmpty(value)) {
                        JSONObject jsonObject = JSON.parseObject(value);
                        out.collect(jsonObject);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });
        //process.print(">>>");
        //5.新老访客修复
        SingleOutputStreamOperator<JSONObject> operator = process.keyBy(value -> value.getJSONObject("common").getString("mid"))
                .process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {
                    private ValueState<String> lastDateState = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        lastDateState = getRuntimeContext().getState(new ValueStateDescriptor<String>("last_date_state", String.class));
                    }

                    @Override
                    public void processElement(JSONObject value, KeyedProcessFunction<String, JSONObject, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                        JSONObject common = value.getJSONObject("common");
                        String isNew = common.getString("is_new");
                        Long ts = value.getLong("ts");
                        if ("1".equals(isNew)) {
                            String lastDate = lastDateState.value();
                            if (!StringUtils.isEmpty(lastDate)) {
                                String date = DateFormatUtil.toDate(ts);
                                if (!date.equals(lastDate)) {
                                    common.put("is_new", "0");
                                }
                            } else {
                                lastDateState.update(DateFormatUtil.toDate(ts));
                            }
                        } else {
                            String lastDate = lastDateState.value();
                            if (StringUtils.isEmpty(lastDate)) {
                                String yesterday = DateFormatUtil.toDate(ts - 24 * 60 * 60 * 1000);
                                lastDateState.update(yesterday);
                            }
                        }
                        out.collect(value);
                    }
                });

        /*
        6.进行日志分流
        日志结构大致可分为三类，分别是页面埋点日志、启动日志和播放日志。
        首先：有 page 字段的是页面埋点日志 有 start字段的是启动日志  有  appVideo字段的是播放日志
        所以,主流数据往下游传递的是页面埋点日志,两条侧输出流分别往下游传递启动和播放日志 , 以及都可能会发生的错误流
         */
        OutputTag<String> startTag = new OutputTag<String>("startTag") {};
        OutputTag<String> appVideoTag = new OutputTag<String>("appVideoTag") {};
        OutputTag<String> errTag = new OutputTag<String>("errTag") {};
        SingleOutputStreamOperator<String> streamOperator = operator.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject value, ProcessFunction<JSONObject, String>.Context ctx, Collector<String> out) throws Exception {
                Long ts = value.getLong("ts");
                //流中数据来之后,首先需要判断该条数据是什么日志
                JSONObject start = value.getJSONObject("start");
                JSONObject appVideo = value.getJSONObject("appVideo");
                JSONObject err = value.getJSONObject("err");
                //首先,如果err字段提取出来的JSONObject对象不为null , 说明该条日志发生错误,需要发往错误流
                if (err != null) {
                    ctx.output(errTag, value.toJSONString());
                    value.remove("err");
                }

                //然后再判断具体是哪种日志
                if (appVideo != null) {
                    //如果appVideo字段提取出来的JSONObject对象不为null , 说明该条日志为播放日志
                    ctx.output(appVideoTag, value.toJSONString());
                } else if (start != null) {
                    //如果start字段提取出来的JSONObject对象不为null , 说明该条日志为启动日志
                    ctx.output(startTag, value.toJSONString());
                } else {
                    //如果上面两个字段提取出来的JSONObject对象都为null,则说明该条日志为页面埋点日志
                    out.collect(value.toJSONString());
                }
            }
        });
        streamOperator.addSink(KafkaUtils.getFlinkKafkaProducer("dwd_traffic_page_log"));
        streamOperator.getSideOutput(errTag).addSink(KafkaUtils.getFlinkKafkaProducer("dwd_traffic_err_log"));
        streamOperator.getSideOutput(appVideoTag).addSink(KafkaUtils.getFlinkKafkaProducer("dwd_traffic_appVideo_log"));
        streamOperator.getSideOutput(startTag).addSink(KafkaUtils.getFlinkKafkaProducer("dwd_traffic_start_log"));

        env.execute();
    }
}
