package com.atguigu.edu.realtime.app.dwd.log;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.MykafkaUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * @ClassName DwdTrafficBaseLogSplit
 * @Description TODO 流量域：未经加工事实表处理(日志分流)
 * @Author AFei
 * @Date 2022/9/4 21:56
 * @Version 1.0
 * 需要启动的进程
 *       flume、zk、kafka、DwdTrafficBaseLogSplit
 */
public class DwdTrafficBaseLogSplit {
    public static void main(String[] args) throws Exception {
        // 基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 检查点设置(略)
        // 消费kafka
        String topic = "topic_log";
        String groupId = "dwd_traffic_log_split_group";
        FlinkKafkaConsumer<String> kafkaConsumer = MykafkaUtil.getKafkaConsumer(topic, groupId);
        DataStreamSource<String> kafkaStrDS = env.addSource(kafkaConsumer);
        //kafkaStrDS.print(">>>>>");
        // TODO 4. 对数据的读取进行类型转换 并进行简单的ETL 将脏数据放到侧输出流 输出到kafka主题
        // 4.1 定义侧输出流标签
        OutputTag<String> dirtyTag = new OutputTag<String>("dirty_tag"){};
        // 4.2 类型转换以及ETL
        SingleOutputStreamOperator<JSONObject> etlDS = kafkaStrDS.process(
                new ProcessFunction<String, JSONObject>() {
                    @Override
                    public void processElement(String jsonStr, ProcessFunction<String, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
                        try {
                            JSONObject jsonObj = JSON.parseObject(jsonStr);
                            out.collect(jsonObj);
                        } catch (Exception e) {
                            // 如果发生异常 说明jsonStr不是标准的json字符串 属于脏数据 放到侧输出流中
                            ctx.output(dirtyTag, jsonStr);
                        }

                    }
                }
        );
        // 4.3 放到侧输出流
        //etlDS.print(">>>>>");
        DataStream<String> dirtyDS = etlDS.getSideOutput(dirtyTag);
        //dirtyDS.print("####");
        dirtyDS.addSink(MykafkaUtil.getKafkaProducer("dirty_data"));
        // TODO 5. 使用Flink的状态编程 修复新老访客标记
        // 5.1 按照mid进行分组
        KeyedStream<JSONObject, String> keyedDS = etlDS.keyBy(jsonObj -> jsonObj.getJSONObject("common").getString("mid"));
        // 5.2 修复标记
        SingleOutputStreamOperator<JSONObject> fixedDS = keyedDS.map(
                new RichMapFunction<JSONObject, JSONObject>() {
                    //  TODO 注意 不能在声明的时候 直接进行初始化  因为获取不到运行时上下文
                    private ValueState<String> lastValueDataState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        lastValueDataState = getRuntimeContext().getState(new ValueStateDescriptor<String>("lastValueDataState", String.class));
                    }

                    @Override
                    public JSONObject map(JSONObject jsonObj) throws Exception {
                        // 获取新老访客标记值
                        String isNew = jsonObj.getJSONObject("common").getString("is_new");
                        // 获取上次访问日期
                        String lastVisitDate = lastValueDataState.value();
                        // 获取当前访问日期
                        Long ts = jsonObj.getLong("ts");
                        String curVisitDate = DateFormatUtil.toDate(ts);

                        if ("1".equals(isNew)) {
                            if (StringUtils.isEmpty(lastVisitDate)) {
                                lastValueDataState.update(curVisitDate);
                            } else {
                                if (!lastVisitDate.equals(curVisitDate)) {
                                    isNew = "0";
                                    jsonObj.getJSONObject("common").put("is_new", isNew);
                                }
                            }
                        } else {
                            // 如果is_new = 0 说明当前设备曾经访问过 但是如果状态中没有记录曾经访问日期 我们需要补充一个日期
                            if (StringUtils.isEmpty(lastVisitDate)) {
                                String yesterDay = DateFormatUtil.toDate(ts - 3600 * 24 * 1000);
                                lastValueDataState.update(yesterDay);
                            }
                        }
                        return jsonObj;
                    }
                }
        );
        //fixedDS.print(">>>>");
        // TODO 6. 使用Flink的侧输出流 对日志进行分流
        // 6.1 定义侧输出流标签
        OutputTag<String> errTag = new OutputTag<String>("errTag") {};
        OutputTag<String> startTag = new OutputTag<String>("startTag") {};
        OutputTag<String> displayTag = new OutputTag<String>("displayTag") {};
        OutputTag<String> actionTag = new OutputTag<String>("actionTag") {};
        OutputTag<String> appVideoTag = new OutputTag<String>("appVideoTag") {};
        // 6.2 分流
        SingleOutputStreamOperator<String> pageDS = fixedDS.process(
                new ProcessFunction<JSONObject, String>() {
                    @Override
                    public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, String>.Context ctx, Collector<String> out) throws Exception {
                        // 错误日志 放到侧输出流
                        JSONObject errJsonObj = jsonObject.getJSONObject("err");
                        if (errJsonObj != null) {
                            ctx.output(errTag, jsonObject.toJSONString());
                            jsonObject.remove("err");
                        }
                        // 视频播放日志
                        JSONObject appVideoJsonObj = jsonObject.getJSONObject("appVideo");
                        if (appVideoJsonObj != null) {
                            // 放到视频侧输出流
                            ctx.output(appVideoTag, jsonObject.toJSONString());
                        } else {
                            // 启动日志放到侧输出流
                            JSONObject startJsonObj = jsonObject.getJSONObject("start");
                            if (startJsonObj != null) {
                                ctx.output(startTag, jsonObject.toJSONString());
                            } else {
                                // 页面日志
                                JSONObject commonJsonObj = jsonObject.getJSONObject("common");
                                JSONObject pageJsonObj = jsonObject.getJSONObject("page");
                                Long ts = jsonObject.getLong("ts");
                                // 曝光日志
                                JSONArray displaysArr = jsonObject.getJSONArray("displays");
                                if (displaysArr != null && displaysArr.size() > 0) {
                                    // 说明页面有曝光信息 对曝光数据进行遍历
                                    for (int i = 0; i < displaysArr.size(); i++) {
                                        JSONObject displayJsonObj = displaysArr.getJSONObject(i);
                                        JSONObject displayNewObj = new JSONObject();
                                        displayNewObj.put("common", commonJsonObj);
                                        displayNewObj.put("page", pageJsonObj);
                                        displayNewObj.put("display", displayJsonObj);
                                        displayNewObj.put("ts", ts);
                                        // 放到曝光侧输出流
                                        ctx.output(displayTag, displayNewObj.toJSONString());
                                    }
                                }
                                // 动作日志
                                JSONArray actionsArr = jsonObject.getJSONArray("actions");
                                if (actionsArr != null && actionsArr.size() > 0) {
                                    // 说明在当前页面上有动作
                                    for (int i = 0; i < actionsArr.size(); i++) {
                                        JSONObject actionJsonObj = actionsArr.getJSONObject(i);
                                        JSONObject actionNewObj = new JSONObject();
                                        actionNewObj.put("common", commonJsonObj);
                                        actionNewObj.put("page", pageJsonObj);
                                        actionJsonObj.put("action", actionJsonObj);
                                        // 放到动作侧输出流
                                        ctx.output(actionTag, actionNewObj.toJSONString());
                                    }
                                }
                                // 将页面上曝光以及动作删除掉 将日志放到主流
                                jsonObject.remove("displays");
                                jsonObject.remove("actions");
                                out.collect(jsonObject.toJSONString());
                            }
                        }
                    }
                }
        );
        // TODO 7. 将不同流的数据写到kafka主题中
        DataStream<String> errDS = pageDS.getSideOutput(errTag);
        DataStream<String> startDS = pageDS.getSideOutput(startTag);
        DataStream<String> displayDS = pageDS.getSideOutput(displayTag);
        DataStream<String> actionDS = pageDS.getSideOutput(actionTag);
        DataStream<String> appVideoDS = pageDS.getSideOutput(appVideoTag);

        pageDS.print(">>>>");
        startDS.print("###");
        displayDS.print("~~~");
        actionDS.print("$$$");
        errDS.print("&&&");
        appVideoDS.print("<><><><><");

        pageDS.addSink(MykafkaUtil.getKafkaProducer("dwd_traffic_page_log"));
        startDS.addSink(MykafkaUtil.getKafkaProducer("dwd_traffic_start_log"));
        displayDS.addSink(MykafkaUtil.getKafkaProducer("dwd_traffic_display_log"));
        actionDS.addSink(MykafkaUtil.getKafkaProducer("dwd_traffic_action_log"));
        errDS.addSink(MykafkaUtil.getKafkaProducer("dwd_traffic_err_log"));
        appVideoDS.addSink(MykafkaUtil.getKafkaProducer("dwd_traffic_appVideo_log"));
            

        env.execute();

    }
}
