package com.atguigu.gmallrealtime.app.dwd.log;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmallrealtime.common.Constant;
import com.atguigu.gmallrealtime.util.DateFormatUtil;
import com.atguigu.gmallrealtime.util.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * @author yhm
 * @create 2023-09-24 9:30
 */
public class DwdTrafficBaseLogSplit {
    public static void main(String[] args) throws Exception {
        // TODO 1 创建flink环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        // TODO 2 添加检查点和状态后端
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//
//        //2.2 设置检查点超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        //2.3 设置job取消之后 检查点是否保留
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        //2.4 设置两个检查点之间最小的时间间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
//        //2.5 设置重启策略
//        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
//
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//
//        System.setProperty("HADOOP_USER_NAME","atguigu");

        // TODO 3 读取kafka的topic_log数据
        String topicName = Constant.TOPIC_ODS_LOG;
        String groupId = "dwd_traffic_base_log_split";
        DataStreamSource<String> dataStreamSource = env.fromSource(MyKafkaUtil.getKafkaSource(topicName, groupId), WatermarkStrategy.noWatermarks(), "ods_log");

        // TODO 4 ETL对数据清洗
        // 使用侧输出流接收脏数据
        // 创建侧输出流标签 需要限定泛型
        OutputTag<String> dirtyTag = new OutputTag<String>("dirty", TypeInformation.of(String.class)) {
        };
        SingleOutputStreamOperator<JSONObject> jsonObjStream = dataStreamSource.process(new ProcessFunction<String, JSONObject>() {

            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    String mid = jsonObject.getJSONObject("common").getString("mid");
                    if (mid != null) {
                        out.collect(jsonObject);
                    }
                } catch (JSONException jsonException) {
                    System.out.println("当前为脏数据:" + value);
                    ctx.output(dirtyTag, value);
                }
            }
        });

//        jsonObjStream.print("ETL>>");
        SideOutputDataStream<String> dirtyStream = jsonObjStream.getSideOutput(dirtyTag);
//        dirtyStream.print("dirty");

        String dirtyTopic = "topic_dirty";
        dirtyStream.sinkTo(MyKafkaUtil.getKafkaSink(dirtyTopic));

        // TODO 5 对新旧访客进行校验修复
        // 将日志数据按照相同的mid分组
        KeyedStream<JSONObject, String> keyedStream = jsonObjStream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {
                JSONObject common = value.getJSONObject("common");
                return common.getString("mid");
            }
        });
        SingleOutputStreamOperator<JSONObject> fixIsNewStream = keyedStream.process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {

            ValueState<String> firstDtState = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                firstDtState = getRuntimeContext().getState(new ValueStateDescriptor<String>("first_dt", String.class));
            }

            @Override
            public void processElement(JSONObject value, Context ctx, Collector<JSONObject> out) throws Exception {
                // 1. 判断读取的数据is_new是1还是0
                JSONObject common = value.getJSONObject("common");
                String isNew = common.getString("is_new");
                String curDt = DateFormatUtil.toDate(value.getLong("ts"));
                String firstDt = firstDtState.value();
                if ("1".equals(isNew)) {
                    if (firstDt == null) {
                        // 2. 如果为1 -> 调用状态 -> 为null(当前为新访客 状态更新为当前日期)
                        firstDtState.update(curDt);
                    } else if (!firstDt.equals(curDt)) {
                        // 3. 如果为1 -> 调用状态 -> 不为null -> 日期相同(重复登录不处理) -> 日期不同(将is_new改为0)
                        common.put("is_new", "0");
                    } else {
                    }
                } else if ("0".equals(isNew)) {
                    // 4. 如果为0 -> 调用状态 -> 为null(更新状态日期为前一天)
                    if (firstDt == null) {
                        firstDtState.update(DateFormatUtil.toDate(value.getLong("ts") - 1000 * 60 * 60 * 24));
                    } else {
                        // 5. 如果为0 -> 调用状态 -> 不为null(正常情况)
                    }
                } else {
                    // 错误数据不处理
                }
                out.collect(value);
            }
        });

//        fixIsNewStream.print("fix");

        // TODO 6 将数据分流
        // 将页面日志写到主流  错误 启动 曝光 动作都写到侧输出流
        OutputTag<String> errTag = new OutputTag<String>("err",TypeInformation.of(String.class)){};
        OutputTag<String> startTag = new OutputTag<String>("start",TypeInformation.of(String.class)){};
        OutputTag<String> displayTag = new OutputTag<String>("display",TypeInformation.of(String.class)){};
        OutputTag<String> actionTag = new OutputTag<String>("action",TypeInformation.of(String.class)){};
        SingleOutputStreamOperator<String> pageStream = fixIsNewStream.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject value, Context ctx, Collector<String> out) throws Exception {
                // 1. 输出错误日志
                JSONObject err = value.getJSONObject("err");
                if (err != null) {
                    ctx.output(errTag, err.toJSONString());
                    value.remove("err");
                }

                // 2. 判断是哪一类  start  page
                JSONObject pageInfo = value.getJSONObject("page");
                if (pageInfo != null) {
                    // page日志
                    // 提取大家都需要的公共数据
                    JSONObject common = value.getJSONObject("common");

                    String ts = value.getString("ts");
                    // 判断是否存在曝光
                    JSONArray displays = value.getJSONArray("displays");
                    if (displays != null && displays.size() > 0) {
                        for (int i = 0; i < displays.size(); i++) {
                            JSONObject display = displays.getJSONObject(i);
                            display.put("common", common);
                            display.put("page", pageInfo);
                            display.put("ts", ts);
                            ctx.output(displayTag, display.toJSONString());
                        }
                    }
                    value.remove("displays");
                    // 判断是否存在动作
                    JSONArray actions = value.getJSONArray("actions");
                    if (actions != null && actions.size() > 0) {
                        for (int i = 0; i < actions.size(); i++) {
                            JSONObject action = actions.getJSONObject(i);
                            action.put("common", common);
                            action.put("page", pageInfo);
                            ctx.output(actionTag, action.toJSONString());
                        }
                    }
                    value.remove("actions");
                    out.collect(value.toJSONString());

                } else {
                    // 启动日志
                    ctx.output(startTag, value.toJSONString());
                }

            }
        });

        SideOutputDataStream<String> errStream = pageStream.getSideOutput(errTag);
        SideOutputDataStream<String> actionStream = pageStream.getSideOutput(actionTag);
        SideOutputDataStream<String> displayStream = pageStream.getSideOutput(displayTag);
        SideOutputDataStream<String> startStream = pageStream.getSideOutput(startTag);

//        errStream.print("err>>>");
//        actionStream.print("action>>>");
//        displayStream.print("display>>>");
//        startStream.print("start>>>");
//        pageStream.print("page>>>");


        // TODO 7 将数据写出到kafka的对应主题
        pageStream.sinkTo(MyKafkaUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_PAGE));
        errStream.sinkTo(MyKafkaUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_ERR));
        actionStream.sinkTo(MyKafkaUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_ACTION));
        displayStream.sinkTo(MyKafkaUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_DISPLAY));
        startStream.sinkTo(MyKafkaUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_START));

        // TODO 8 执行任务
        env.execute();
    }
}
