package com.atguigu.edu.app.dwd.log;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import util.DateFormatUtil;
import util.MyKafkaUtil;

/**
 * 日志数据分流：获得流量域相关的事实表
 */
public class DwdTrafficBaseLogSplit {
    public static void main(String[] args) throws Exception {
        //TODO 1.环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //TODO 2.检查点相关设置
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://39.101.79.248:8020//edu_realtime/checkpoint");
//        System.setProperty("HADOOP_USER_NAME", "atguigu");
        //TODO 3.从kafka的topic_log中读取数据
        String topic = "ODS_BASE_LOG";
        String groupId = "dwd_traffic_log_split_group";
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(topic, groupId);
        DataStreamSource<String> kafkaStrDS = env.addSource(kafkaConsumer);
        //TODO 4.数据过滤
        SingleOutputStreamOperator<String> filterDS = kafkaStrDS.filter(
                new FilterFunction<String>() {
                    @Override
                    public boolean filter(String jsonStr) throws Exception {
                        try {
                            JSONObject jsonObj = JSON.parseObject(jsonStr);
                            return true;
                        } catch (Exception e) {
                            return false;
                        }

                    }
                }
        );
        //转换结构 jsonStr->jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDS = filterDS.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String jsonStr) throws Exception {
                return JSON.parseObject(jsonStr);
            }
        });
        //TODO 5.使用flink状态编程，进行新老访客标记修复
        //5.1按照mid进行分组
        KeyedStream<JSONObject, String> keyedDS = jsonObjDS.keyBy(data -> data.getJSONObject("common").getString("mid"));
        //5.2修复标记
        SingleOutputStreamOperator<JSONObject> fixedDS = keyedDS.map(new RichMapFunction<JSONObject, JSONObject>() {
            //声明状态，保存上次登录日期
            private ValueState<String> lastVisitDateState;

            //在生命周期开始的方法中获取状态的值
            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> valueStateDescriptor = new ValueStateDescriptor<>("lastVisitDateState", String.class);
                lastVisitDateState = getRuntimeContext().getState(valueStateDescriptor);
            }

            //每来一条数据，取出新老访客标记,并根据状态中是否存在，进行修复
            @Override
            public JSONObject map(JSONObject jsonObj) throws Exception {
                //获取新老访客标记
                String isNew = jsonObj.getJSONObject("common").getString("is_new");
                //获取上次访问日期
                String lastVisitDate = lastVisitDateState.value();
                //获取当前访问日期
                Long ts = jsonObj.getLong("ts");
                String curVisitDate = DateFormatUtil.toDate(ts);
                if ("1".equals(isNew)) {
                    if (StringUtils.isNotEmpty(lastVisitDate)) {
                        //已经访问过，将标记修复，改为0
                        jsonObj.getJSONObject("common").put("is_new", "0");
                    } else {
                        //没有访问过，将状态更新
                        lastVisitDateState.update(curVisitDate);
                    }
                } else {
                    //曾经访问过，但是状态中没有记录，可以补充一个过去的日期（如昨天），作为上次访问日期
                    if (StringUtils.isEmpty(lastVisitDate)) {
                        String yestDay = DateFormatUtil.toDate(ts - 1000 * 3600 * 24);
                        lastVisitDateState.update(yestDay);
                    }
                }
                return jsonObj;
            }
        });

        //TODO 6.使用flink侧输出流，对日志进行分流
        //6.1定义侧输出流标签(一定要加大括号，new 子类)
        OutputTag<String> errTag = new OutputTag<String>("errTag") {
        };
        OutputTag<String> startTag = new OutputTag<String>("startTag") {
        };
        OutputTag<String> displayTag = new OutputTag<String>("displayTag") {
        };
        OutputTag<String> actionTag = new OutputTag<String>("actionTag") {
        };
        OutputTag<String> videoTag = new OutputTag<String>("videoTag") {
        };
        //6.2分流(process)
        SingleOutputStreamOperator<String> pageDS = fixedDS.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject jsonObj, ProcessFunction<JSONObject, String>.Context ctx, Collector<String> out) throws Exception {
                //错误日志 放到错误侧输出流，将err移除，继续向下游传递
                JSONObject errJsonObj = jsonObj.getJSONObject("err");
                if (errJsonObj != null) {
                    ctx.output(errTag, jsonObj.toJSONString());
                    jsonObj.remove("err");
                }

                //启动日志 放到启动侧输出流
                JSONObject startObj = jsonObj.getJSONObject("start");
                if (startObj != null) {
                    ctx.output(startTag, jsonObj.toJSONString());
                } else {
                    //播放日志 放到播放侧输出流
                    JSONObject videoJson = jsonObj.getJSONObject("appVideo");
                    if (videoJson != null) {
                        ctx.output(videoTag, jsonObj.toJSONString());
                    } else {
                        //页面日志
                        //将common、page、ts取出
                        JSONObject commonJsonObj = jsonObj.getJSONObject("common");
                        JSONObject pageJsonObj = jsonObj.getJSONObject("page");
                        Long ts = jsonObj.getLong("ts");

                        //曝光日志
                        JSONArray displayArr = jsonObj.getJSONArray("displays");
                        if (displayArr != null && displayArr.size() > 0) {
                            for (int i = 0; i < displayArr.size(); i++) {
                                JSONObject displayJsonObj = displayArr.getJSONObject(i);
                                JSONObject displayNewObj = new JSONObject();
                                displayNewObj.put("common", commonJsonObj);
                                displayNewObj.put("page", pageJsonObj);
                                displayNewObj.put("display", displayJsonObj);
                                displayNewObj.put("ts", ts);
                                //放到曝光侧输出流中
                                ctx.output(displayTag, displayNewObj.toJSONString());
                            }
                        }

                        //动作日志
                        JSONArray actionArr = jsonObj.getJSONArray("actions");
                        if (actionArr != null && actionArr.size() > 0) {
                            for (int i = 0; i < actionArr.size(); i++) {
                                JSONObject actionJsonObj = actionArr.getJSONObject(i);
                                JSONObject actionNewObj = new JSONObject();
                                actionNewObj.put("common", commonJsonObj);
                                actionNewObj.put("page", pageJsonObj);
                                actionNewObj.put("action", actionJsonObj);
                                actionNewObj.put("ts", ts);
                                //放到动作侧输出流中
                                ctx.output(actionTag, actionNewObj.toJSONString());
                            }
                        }

                        //将页面上的曝光 以及 动作删除掉 将日志放到主流
                        jsonObj.remove("displays");
                        jsonObj.remove("actions");
                        out.collect(jsonObj.toJSONString());

                    }

                }
            }
        });

        //TODO 7.将不同的流写到kafka的不同主题
        DataStream<String> errDS = pageDS.getSideOutput(errTag);
        DataStream<String> videoDS = pageDS.getSideOutput(videoTag);
        DataStream<String> startDS = pageDS.getSideOutput(startTag);
        DataStream<String> displayDS = pageDS.getSideOutput(displayTag);
        DataStream<String> actionDS = pageDS.getSideOutput(actionTag);

        pageDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_traffic_page_log"));
        startDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_traffic_start_log"));
        displayDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_traffic_display_log"));
        actionDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_trafffic_action_log"));
        videoDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_traffic_video_log"));
        errDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_traffic_err_log"));
        pageDS.print(">>>>");
        env.execute();
    }
}
