package com.atguigu.app.dwd.log;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.util.DateFormatUtil;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.time.OffsetDateTime;

/**
 * @author yhm
 * @create 2022-11-19 10:20
 */
public class BaseLogApp {
    public static void main(String[] args) throws Exception {
        // 1. 创建env环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 2. 对环境添加设置
        env.setParallelism(1);

        /*
        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
        );
        env.setRestartStrategy(RestartStrategies.failureRateRestart(
                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
        ));
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");

        // 修改当前程序的用户名  获取写入hdfs的权限
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        // TODO 1 读取kafka的topic_log主题数据
        String topicName = "topic_log";
        String groupId = "base_log_app";
        DataStreamSource<String> kafkaLogStream = env.addSource(KafkaUtil.getFlinkKafkaConsumer(topicName, groupId));

        // TODO 2 对数据进行清洗过滤
        // 将脏数据输出到侧输出流中
//        kafkaLogStream.filter(new FilterFunction<String>() {
//            @Override
//            public boolean filter(String value) throws Exception {
//                try {
//                    JSON.parseObject(value);
//                }catch (JSONException e){
//                    return false;
//                }
//                return true;
//            }
//        });

        // 如果想要使用侧输出流  需要使用process
        // 需要创建侧输出流标签
        OutputTag<String> dirtyOutputTag = new OutputTag<>("dirty", TypeInformation.of(String.class));

        SingleOutputStreamOperator<JSONObject> processStream = kafkaLogStream.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {

                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    // 输出到主流
                    if (jsonObject.getJSONObject("common") != null) {
                        out.collect(jsonObject);
                    } else {
                        ctx.output(dirtyOutputTag, value);
                    }
                } catch (Exception e) {
                    // 输出到侧输出流中
                    ctx.output(dirtyOutputTag, value);
                }
            }
        });

        // 直接打印是主流数据
//        processStream.print("filter >>");
        // 需要先获取侧输出流数据
        DataStream<String> dirtyOutputStream = processStream.getSideOutput(dirtyOutputTag);
        dirtyOutputStream.print("dirty>>");


        // TODO 3 对新旧访客进行修复
        // 根据设备id分组
        KeyedStream<JSONObject, String> keyedStream = processStream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {
                // 获取json的设备id
                return value.getJSONObject("common").getString("mid");
            }
        });

        SingleOutputStreamOperator<JSONObject> fixIsNewStream = keyedStream.map(new RichMapFunction<JSONObject, JSONObject>() {
            ValueState<String> firstLoginDt = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                firstLoginDt = getRuntimeContext().getState(new ValueStateDescriptor<String>("first_login_dt", String.class));
            }

            @Override
            public JSONObject map(JSONObject value) throws Exception {
                String isNew = value.getJSONObject("common").getString("is_new");
                //If(状态为null)
                //	1)  is_new=1   =>   将数据的日期写入状态
                //	2)  is_new=0   =>   修复状态  1970-01-01(昨天及以前的日期)
                if (firstLoginDt.value() == null) {
                    if ("1".equals(isNew)) {
                        Long ts = value.getLong("ts");
                        String dt = DateFormatUtil.toDate(ts);
                        firstLoginDt.update(dt);
                    } else if ("0".equals(isNew)) {
                        firstLoginDt.update("1970-01-01");
                    }
                } else {
                    //Else(状态不为空)
                    //	1)  is_new=1 && 日期和状态不一致  =>  修复状态is_new=0
                    //  2)  is_new=1 && 日期和状态一致    =>
                    //	3)  is_new=0 && 日期一致   =>  bug
                    //	4)  is_new=0 && 日期不一致 =>
                    String stateDt = firstLoginDt.value();
                    Long ts = value.getLong("ts");
                    String dt = DateFormatUtil.toDate(ts);
                    if ("1".equals(isNew) && !stateDt.equals(dt)) {
                        value.getJSONObject("common").put("is_new", "0");
                    } else if ("0".equals(isNew) && stateDt.equals(dt)) {
                        System.out.println("error:>>>>>>> 状态和is_new无法对应");
                    }
                }
                return value;
            }
        });

//        fixIsNewStream.print("fix>>>");
        // TODO 4 拆分为5条输出流
        // page页面日志  start启动日志  action行动日志  display曝光  error错误
        OutputTag<String> errorOutputTag = new OutputTag<>("error", TypeInformation.of(String.class));
        OutputTag<String> displayOutputTag = new OutputTag<>("display", TypeInformation.of(String.class));
        OutputTag<String> actionOutputTag = new OutputTag<>("action", TypeInformation.of(String.class));
        OutputTag<String> startOutputTag = new OutputTag<>("start", TypeInformation.of(String.class));

        SingleOutputStreamOperator<String> pageStream = fixIsNewStream.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject value, Context ctx, Collector<String> out) throws Exception {
                // 判断数据为页面日志还是启动日志
                JSONObject page = value.getJSONObject("page");
                JSONObject start = value.getJSONObject("start");
                if (page != null) {
                    // 此处为页面日志
                    // page当中包含page、start、error、display
                    // 获取公共信息
                    JSONObject common = value.getJSONObject("common");
                    Long ts = value.getLong("ts");


                    // 分离error数据
                    JSONObject err = value.getJSONObject("err");
                    if (err != null) {
                        // 输出到错误的侧输出流
                        ctx.output(errorOutputTag, err.toString());
                    }

                    // 分离曝光数据
                    JSONArray displays = value.getJSONArray("displays");
                    if (displays != null && displays.size() != 0) {
                        for (int i = 0; i < displays.size(); i++) {
                            JSONObject display = displays.getJSONObject(i);
                            JSONObject jsonObject = new JSONObject();
                            // 添加公共信息
                            jsonObject.put("common", common);
                            jsonObject.put("ts", ts);
                            jsonObject.put("page", page);
                            jsonObject.put("display", display);
                            ctx.output(displayOutputTag, jsonObject.toString());
                        }
                    }

                    // 分离动作数据
                    JSONArray actions = value.getJSONArray("actions");
                    if (actions != null && actions.size() != 0){
                        for (int i = 0; i < actions.size(); i++) {
                            JSONObject action = actions.getJSONObject(i);
                            JSONObject jsonObject = new JSONObject();
                            jsonObject.put("common",common);
                            jsonObject.put("ts",ts);
                            jsonObject.put("page",page);
                            jsonObject.put("action",action);
                            ctx.output(actionOutputTag,jsonObject.toString());
                        }
                    }

                    // 输出页面数据
                    value.remove("err");
                    value.remove("displays");
                    value.remove("actions");
                    out.collect(value.toString());

                } else if (start != null) {
                    // 此处为启动日志
                    // 启动日志包含错误数据和启动的数据
                    // 分离错误数据
                    JSONObject err = start.getJSONObject("err");
                    if (err != null){
                        ctx.output(errorOutputTag,err.toString());
                    }
                    // 输出启动日志
                    value.remove("err");
                    ctx.output(startOutputTag,value.toString());
                }
            }
        });

        DataStream<String> errorStream = pageStream.getSideOutput(errorOutputTag);
        DataStream<String> displayStream = pageStream.getSideOutput(displayOutputTag);
        DataStream<String> actionStream = pageStream.getSideOutput(actionOutputTag);
        DataStream<String> startStream = pageStream.getSideOutput(startOutputTag);

        errorStream.print("error>>>>");
        displayStream.print("display>>>");
        actionStream.print("action>>>");
        startStream.print("start>>>");
        pageStream.print("page >>>");

        // TODO 5 写出到对应的kafka主题
        String page_topic = "dwd_traffic_page_log";
        String start_topic = "dwd_traffic_start_log";
        String display_topic = "dwd_traffic_display_log";
        String action_topic = "dwd_traffic_action_log";
        String error_topic = "dwd_traffic_error_log";

        errorStream.addSink(KafkaUtil.getFlinkKafkaProducer(error_topic));
        actionStream.addSink(KafkaUtil.getFlinkKafkaProducer(action_topic));
        displayStream.addSink(KafkaUtil.getFlinkKafkaProducer(display_topic));
        startStream.addSink(KafkaUtil.getFlinkKafkaProducer(start_topic));
        pageStream.addSink(KafkaUtil.getFlinkKafkaProducer(page_topic));

        // TODO 6 执行环境
        env.execute(groupId);
    }
}
