package com.atguigu.app.dwd.log;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.common.Constant;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.text.SimpleDateFormat;

//数据流:web/app -> nginx -> 日志服务器(log文件) -> Flume -> Kafka(ODS) -> FlinkApp -> Kafka(DWD)
//程  序:Mock -> Flume(f1.sh) -> Kafka(ZK) -> Dwd01_TrafficBaseLogSplit -> Kafka(ZK)
public class Dwd01_TrafficBaseLogSplit {

    public static void main(String[] args) throws Exception {

        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);  //生产环境中,主题并行度与Kafka主题的分区数保持一致

        //Logger logger = LoggerFactory.getLogger(Dwd01_TrafficBaseLogSplit.class);
        //logger.info("aaa");

        //1.1 开启CheckPoint
        //env.enableCheckpointing(60000 * 5);
        //env.setStateBackend(new HashMapStateBackend());

        //1.2 CheckPoint相关设置
        //CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        //checkpointConfig.setCheckpointTimeout(10000L);
        //checkpointConfig.setCheckpointStorage("hdfs://hadoop102:8020/flink-ck");
        //Cancel任务时保存最后一次CheckPoint结果
        //checkpointConfig.setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //checkpointConfig.setMinPauseBetweenCheckpoints(5000L);
        //设置重启策略
        //env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 1000L));

        //TODO 2.读取Kafka topic_log 主题数据创建流
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(Constant.TOPIC_ODS_LOG, "dwd01_traffic_base_log_split_230524");
        DataStreamSource<String> kafkaDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka-source");

        //TODO 3.过滤脏数据并转换为JSON对象
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSONObject.parseObject(value);
                    out.collect(jsonObject);
                } catch (JSONException e) {
                    System.out.println("脏数据：" + value);
                }
            }
        });

        //TODO 4.按照mid进行分组,新老访客标记修复   状态编程    Rich
        KeyedStream<JSONObject, String> keyedStream = jsonObjDS.keyBy(json -> json.getJSONObject("common").getString("mid"));
        SingleOutputStreamOperator<JSONObject> jsonObjWithNewFlagDS = keyedStream.map(new RichMapFunction<JSONObject, JSONObject>() {

            private ValueState<String> firstVisitDtState;
            private SimpleDateFormat sdf;

            @Override
            public void open(Configuration parameters) throws Exception {
                firstVisitDtState = getRuntimeContext().getState(new ValueStateDescriptor<String>("first-visit-dt", String.class));
                sdf = new SimpleDateFormat("yyyy-MM-dd");
            }

            @Override
            public JSONObject map(JSONObject value) throws Exception {

                //取出is_new标记 和 状态数据 和 当前数据时间戳
                String isNew = value.getJSONObject("common").getString("is_new");
                String firstDt = firstVisitDtState.value();
                Long ts = value.getLong("ts");
                String curDt = sdf.format(ts);

                //判断is_new标记
                if ("1".equals(isNew)) {
                    if (firstDt == null || firstDt.equals(curDt)) {
                        //更新状态
                        firstVisitDtState.update(curDt);
                    } else {
                        //更新标记
                        value.getJSONObject("common").put("is_new", "0");
                    }
                } else { //0
                    if (firstDt == null) {
                        firstVisitDtState.update("1970-01-01");
                    }
                }

                return value;
            }
        });

        //TODO 5.分流  将页面数据写入主流  启动、曝光、动作、错误写入侧流   process
        OutputTag<String> startTag = new OutputTag<String>("start") {
        };
        OutputTag<String> displayTag = new OutputTag<String>("display") {
        };
        OutputTag<String> actionTag = new OutputTag<String>("action") {
        };
        OutputTag<String> errorTag = new OutputTag<String>("error") {
        };
        SingleOutputStreamOperator<String> pageDS = jsonObjWithNewFlagDS.process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject value, ProcessFunction<JSONObject, String>.Context ctx, Collector<String> out) throws Exception {

                //尝试获取错误信息
                String err = value.getString("err");
                if (err != null) {  //存在错误信息
                    ctx.output(errorTag, value.toJSONString());
                    value.remove("err");
                }

                //尝试获取启动信息
                String start = value.getString("start");
                if (start != null) { //启动日志
                    ctx.output(startTag, value.toJSONString());
                } else {             //页面日志  可能包含 曝光 和 动作数据

                    //获取common pageId ts
                    JSONObject common = value.getJSONObject("common");
                    Long ts = value.getLong("ts");
                    String pageId = value.getJSONObject("page").getString("page_id");

                    //尝试获取曝光数据
                    JSONArray displays = value.getJSONArray("displays");
                    if (displays != null) {
                        //将曝光数据遍历输出
                        for (int i = 0; i < displays.size(); i++) {
                            JSONObject display = displays.getJSONObject(i);
                            display.put("common", common);
                            display.put("page_id", pageId);
                            display.put("ts", ts);
                            ctx.output(displayTag, display.toJSONString());
                        }

                        //移除曝光数据
                        value.remove("displays");
                    }

                    //尝试获取动作数据
                    JSONArray actions = value.getJSONArray("actions");
                    if (actions != null) {
                        //将动作数据遍历输出
                        for (int i = 0; i < actions.size(); i++) {
                            JSONObject action = actions.getJSONObject(i);
                            action.put("common", common);
                            action.put("page_id", pageId);
                            ctx.output(actionTag, action.toJSONString());
                        }

                        //移除动作数据
                        value.remove("actions");
                    }

                    //将页面日志写出到主流
                    out.collect(value.toJSONString());
                }
            }
        });

        //TODO 6.提取侧流数据,并写出到Kafka
        SideOutputDataStream<String> startDS = pageDS.getSideOutput(startTag);
        SideOutputDataStream<String> displayDS = pageDS.getSideOutput(displayTag);
        SideOutputDataStream<String> actionDS = pageDS.getSideOutput(actionTag);
        SideOutputDataStream<String> errorDS = pageDS.getSideOutput(errorTag);
        pageDS.print("pageDS>>>");
        startDS.print("startDS>>>");
        displayDS.print("displayDS>>>");
        actionDS.print("actionDS>>>");
        errorDS.print("errorDS>>>");

        pageDS.sinkTo(KafkaUtil.getKafkaSink(new KafkaRecordSerializationSchema<String>() {
            @Nullable
            @Override
            public ProducerRecord<byte[], byte[]> serialize(String element, KafkaSinkContext context, Long timestamp) {
                return new ProducerRecord<>(Constant.TOPIC_DWD_TRAFFIC_PAGE, element.getBytes());
            }
        }));
        startDS.sinkTo(KafkaUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_START));
        displayDS.sinkTo(KafkaUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_DISPLAY));
        actionDS.sinkTo(KafkaUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_ACTION));
        errorDS.sinkTo(KafkaUtil.getKafkaSink(Constant.TOPIC_DWD_TRAFFIC_ERR));

        //TODO 7.启动任务
        env.execute("Dwd01_TrafficBaseLogSplit");

    }
}
