package com.zshstart.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.zshstart.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * 数据流:web/app -> nginx -> SpringBoot -> Kafka(ODS) -> FlinkApp   -> Kafka(DWD)
 * 程  序:mock    -> nginx -> Logger     -> Kafka(ZK)  -> BaseLogApp -> Kafka(ZK)
 * <p>
 * 将数据从kafka(ods_base_log) 写入到 kafka(dwd_page_log,dwd_start_log,dwd_display_log)
 * 通过BaseLogApp,并判断该数据产生的日志数据是不是一个新用户
 * <p>
 * 新用户的判断方式:
 * ①:通过ods_base_log 中common字段中的is_new 判断(0为老用户,1为新用户)
 * ②:产生的问题:is_new通过前端埋点的方式,卸载重装可能会使老用户is_new判断为1,因此is_new=1的字段不准确
 * ③:解决方式:通过状态编程 将is_new=1 的数据,已经存在的状态 修改为0(老用户).
 * <p>
 * 关于将启动 曝光 页面日志的如何分流??
 * ①通过观察数据可知启动数据和页面数据是互斥的. 即不是启动数据就是页面数据
 * ②同时一个页面数据里可能包含多个曝光数据(JSON数组形式)
 * ③因此可以将页面数据作为主流  曝光数据和启动数据作为侧输出流
 *
 * @author zshstart
 * @create 2021-09-23 10:49
 */
public class BaseLogApp {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);//生产环境,并行度设置与kafka主题的分区数一致

//        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/flink-cdc-210426/ck"));
//        env.enableCheckpointing(5000L);
//        env.getCheckpointConfig().setCheckpointTimeout(1000L);
//        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);

        //TODO 2.读取kafka ods_base_log 主题数据创建流
        String topic = "ods_base_log";
        String groupId = "base_log_app_2021";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getKafkaConsumer(topic, groupId));

        //TODO 3.将每行数据转换为JSON对象
        OutputTag<String> dirtyOutPutTag = new OutputTag<String>("Dirty") {
        };
        SingleOutputStreamOperator<JSONObject> jsonObjectDataStream = kafkaDS.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context context, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);//这里可能有脏数据 因此需要处理
                    out.collect(jsonObject);
                } catch (Exception e) {
                    //将脏数据输出到侧输出流
                    context.output(dirtyOutPutTag, value);
                }

            }
        });
        jsonObjectDataStream.getSideOutput(dirtyOutPutTag).print("Dirty");

        //TODO 4.按照id分组
        KeyedStream<JSONObject, String> keyedStream = jsonObjectDataStream.keyBy(jsonObjectDS -> jsonObjectDS.getJSONObject("common").getString("mid"));


        //TODO 5.使用状态编程实现新老用户校验
        SingleOutputStreamOperator<JSONObject> jsonObjectWithNewFlagDS = keyedStream.map(new RichMapFunction<JSONObject, JSONObject>() {
            private ValueState<String> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                valueState = getRuntimeContext().getState(new ValueStateDescriptor<String>("value-state", String.class));
            }

            @Override
            public JSONObject map(JSONObject value) throws Exception {
                //a.获取is_new 标记
                String isNew = value.getJSONObject("common").getString("is_new");

                //b.如果标记为"1" 处理
                if ("1".equals(isNew)) {
                    //获取状态数据
                    String state = valueState.value();
                    if (state != null) {
                        //修改标记
                        value.getJSONObject("common").put("is_new", "0");
                    } else {
                        //更新状态
                        valueState.update("0");
                    }
                }
                return value;
            }
        });

        //TODO 6.使用侧输出流 实现分流功能 页面 启动 曝光
        OutputTag<String> startOutputTag = new OutputTag<String>("start") {
        };

        OutputTag<String> displaysOutputTag = new OutputTag<String>("displays") {
        };

        SingleOutputStreamOperator<String> pageDS = jsonObjectWithNewFlagDS.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject value, Context context, Collector<String> out) throws Exception {
                //获取启动数据
                String start = value.getString("start");
                if (start != null) {
                    //将数据写入启动 侧输出流
                    context.output(startOutputTag, value.toJSONString());
                } else {
                    //将数据写入页面 主流
                    out.collect(value.toJSONString());

                    //提取曝光数据
                    JSONArray displays = value.getJSONArray("displays");
                    if (displays != null && displays.size() > 0) {
                        String pageId = value.getJSONObject("page").getString("page_id");
                        //遍历数组写出曝光数据到侧输出流 (同时将displays json数组侧写炸开)
                        for (int i = 0; i < displays.size(); i++) {
                            JSONObject display = displays.getJSONObject(i);
                            display.put("page_id", pageId);
                            context.output(displaysOutputTag, display.toJSONString());
                        }
                    }
                }
            }
        });
        //TODO 7.将多个流分别写入到kafka主题
        DataStream<String> startDS = pageDS.getSideOutput(startOutputTag);
        DataStream<String> displayDS = pageDS.getSideOutput(displaysOutputTag);

        pageDS.print("Page >>>>>>>");
        startDS.print("Start >>>>>>");
        displayDS.print("Display >>>>>>");

        pageDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_page_log"));
        startDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_start_log"));
        displayDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_display_log"));

        //TODO 8.启动任务
        env.execute("BaseLogApp");
    }
}
