package com.atguigu.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

//数据流 web/app -> nginx -> SpringBoot -> Kafka(ods) -> FlinkApp -> Kafka(dwd)
//程序 mocklog -> Nginx -> Logger.sh -> Kafka(zk) -> BaseLogApp -> Kafka

public class BaseLogApp {

    public static void main(String[] args) throws Exception {
        //1.获取执行环境,设置并行度,开启CK,设置状态后端(HDFS)
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //为Kafka主题的分区数
        env.setParallelism(1);
        //1.1 设置状态后端
//        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/gmall/dwd_log/ck"));
//        //1.2 开启CK
//        env.enableCheckpointing(10000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);

        //修改用户名
        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //2.读取Kafka ods_base_log 主题数据
        String topic = "ods_base_log";
        String groupId = "ods_dwd_base_log_app";
        FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtil.getKafkaConsumer(topic, groupId);
        DataStreamSource<String> kafkaDS = env.addSource(kafkaSource);

        //3.将每行数据转换为JsonObject

        OutputTag<String> output = new OutputTag<String>("Dirty") {
        };
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    out.collect(jsonObject);
                } catch (Exception e) {
                    //                 发生异常将数据写入侧输出流
                    ctx.output(output, value);
                }
            }
        });
//        打印脏数据 生产可以写去数据库
        jsonObjDS.getSideOutput(output).print("Dirty>>>>>>>>>>>>>>>>>>>>>>>>>>>>>.");

//        新老用户校验 状态编程
        SingleOutputStreamOperator<JSONObject> jsonObjWithNewFlagDS = jsonObjDS
                .keyBy(jsonObj -> jsonObj.getJSONObject("common")
                        .getString("mid"))
                .map(
                        new RichMapFunction<JSONObject, JSONObject>() {
                            private ValueState<String> valueState;

                            @Override
                            public void open(Configuration parameters) throws Exception {
                                valueState = getRuntimeContext().getState(new ValueStateDescriptor<String>("value" +
                                        "-state",
                                        String.class));
                            }

                            @Override
                            public JSONObject map(JSONObject value) throws Exception {
//                        获取数据中的“is_new”标记
                                String isNew = value.getJSONObject("common").getString("is_new");

//                        p判断isNew是否为“1”
                                if ("1".equals(isNew)) {
//                            获取状态数据
                                    String state = valueState.value();
                                    if ((state != null)) {
//                                修改isNew标记
                                        value.getJSONObject("common").put("is_new", "0");
                                    } else {
                                        valueState.update("1");
                                    }
                                }
                                return value;
                            }
                        });

//        分流 侧输出流 页面：主流     启动： 侧输出流   曝光：侧输出流

        OutputTag<String> startTag = new OutputTag<String>("start") {
        };
        OutputTag<String> displayTag = new OutputTag<String>("display") {
        };

        SingleOutputStreamOperator<String> pageDS = jsonObjWithNewFlagDS.process(
                new ProcessFunction<JSONObject,
                        String>() {
                    @Override
                    public void processElement(JSONObject value, Context ctx, Collector<String> out) throws Exception {
//        获取启动日志字段
                        String start = value.getString("start");
                        if (start != null && start.length() > 0) {
//            将数据写入到启动日志侧输出流
                            ctx.output(startTag, value.toJSONString());
                        } else {
//            将数据写入页面日志 主流
                            out.collect(value.toJSONString());
//            取出数据中的曝光数据
                            JSONArray displays = value.getJSONArray("displays");

                            if (displays != null && displays.size() > 0) {

//                    获取页面id
                                String pageId = value.getJSONObject("page").getString("page_id");

                                for (int i = 0; i < displays.size(); i++) {
                                    JSONObject display = displays.getJSONObject(i);
//                    添加页面id
                                    display.put("page_id", pageId);
//                            将数据写出到曝光侧输出流
                                    ctx.output(displayTag, display.toJSONString());

                                }
                            }
                        }
                    }
                });

//        提取侧输出流
        DataStream<String> startDS = pageDS.getSideOutput(startTag);
        DataStream<String> displayDS = pageDS.getSideOutput(displayTag);

//        将三个流进行打印输出到对应Kafka主题中
        startDS.print("Start>>>>>>>>>>>>>>>>>>>>>>>>>>>>");
        pageDS.print("Page>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");
        displayDS.print("Display>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");

//
        startDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_start_log"));
        pageDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_page_log"));
        displayDS.addSink(MyKafkaUtil.getKafkaProducer("dwd_display_log"));


//        启动任务

        env.execute();
    }
}
