package com.bw.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.bw.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.AggregatingState;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

public class BaseLogApp {
    // mock--->nginx--->SpringBoot--->kafka(ods_base_log)--->Flink--->kafka(dwd)

    private static final String TOPIC_START = "dwd_start_log";
    private static final String TOPIC_DISPLAY = "dwd_display_log";
    private static final String TOPIC_PAGE = "dwd_page_log";

    public static void main(String[] args) throws Exception {
        //1.创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //        开启CK，创建文件夹
//        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:9820/gmall-flink/ck"));
//         每隔5秒保存一次
//        env.enableCheckpointing(5000L);
//        精准消费一次
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        配置超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(100000L);
//        同时最大可以运行几个ck
//        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
//        上一个ck的尾部到下一个ck的头之间间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.消费kafka ods_base_log 数据
        String topic = "ods_base_log";
        String groupId = "base_log_app_242105";
        // 你为什么要消费者封装---->很多地方都会用到，为了复用
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getKafkaConsumer(topic, groupId));
        //3.把每一行数据转成JSON map, 把脏数据输出到侧输出流
        OutputTag<String> dirty = new OutputTag<String>("dirty") {
        };// 加{} 把泛型留下，防止泛型擦除
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context context, Collector<JSONObject> collector) throws Exception {
                try {
                    // ctrl+alt+t
                    JSONObject data = JSON.parseObject(value);
                    collector.collect(data);
                } catch (Exception e) {
                    e.printStackTrace();
                    // 输出到侧输出流
                    context.output(dirty, value);
                }
            }
        });
        jsonObjDS.getSideOutput(dirty).print("dirty>>>");
        //4.新老用户校验  状态  每个key都应该有自己的状态
        SingleOutputStreamOperator<JSONObject> jsonObjWithNewFlagDS = jsonObjDS.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject jsonObject) throws Exception {
                return jsonObject.getJSONObject("common").getString("mid");
            }
        }).map(new RichMapFunction<JSONObject, JSONObject>() {
            private ValueState<String> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                // 初始化
                valueState = getRuntimeContext().getState(new ValueStateDescriptor<String>("value_state", String.class));
            }

            @Override
            public JSONObject map(JSONObject jsonObject) throws Exception {
                // 取出isNew
                String isNew = jsonObject.getJSONObject("common").getString("is_new");
                if ("1".equals(isNew)) {
                    //取出状态的值
                    String value = valueState.value();
                    if (value != null) {
                        // 如果状态有，但是你is_new =1 你本身应该是老用户
                        jsonObject.getJSONObject("common").put("is_new", 0);
                    } else {
                        // 如果状态没有
                        valueState.update("1");
                    }
                }
                return jsonObject;
            }
        });
        //5.分流，页面主流，启动日志侧流、曝光侧流
        OutputTag<String> startTag = new OutputTag<String>("start") {
        };
        OutputTag<String> displayTag = new OutputTag<String>("display") {
        };
        SingleOutputStreamOperator<String> sinkDS = jsonObjWithNewFlagDS.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject jsonObject, Context context, Collector<String> collector) throws Exception {
                String start = jsonObject.getString("start");
                if (start != null && start.length() > 0) {
                    // 把启动输出到侧流
                    context.output(startTag, jsonObject.toJSONString());
                } else {
                    // 曝光日志一定页面日志，页面日志不一定是曝光
                    collector.collect(jsonObject.toJSONString());
                    // 取出曝光日志
                    JSONArray displays = jsonObject.getJSONArray("displays");
                    String pageId = jsonObject.getJSONObject("page").getString("page_id");
                    if (displays != null && displays.size() > 0) {
                        for (int i = 0; i < displays.size(); i++) {
                            JSONObject display = displays.getJSONObject(i);
                            display.put("page_id", pageId);
                            context.output(displayTag, display.toJSONString());
                        }
                    }
                }

            }
        });
        //6.提取测流
        DataStream<String> startDS = sinkDS.getSideOutput(startTag);
        DataStream<String> displayDS = sinkDS.getSideOutput(displayTag);
        //7.发送不同主题
        startDS.print("start>>>");
        displayDS.print("display>>>");
        sinkDS.print("page>>>");
        sinkDS.addSink(MyKafkaUtil.getKafkaProducer(TOPIC_PAGE));
        startDS.addSink(MyKafkaUtil.getKafkaProducer(TOPIC_START));
        displayDS.addSink(MyKafkaUtil.getKafkaProducer(TOPIC_DISPLAY));
        //8.执行任务
        env.execute("BaseLogApp");

    }
}
