package net.bwie.realtime.jtp.dwd.log.job;


import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.dwd.log.function.AdjustIsNewProcessFuncion;
import net.bwie.realtime.jtp.dwd.log.function.LogSplitProcessFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * 简介说明:
 *
 * @author: LiLi
 * @date: 2025/05/18 19:08:54
 * @version: 1.0
 */
public class JtpLogEtlJob {
    public static void main(String[] args) throws Exception {
        //执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setBufferTimeout(1);

        //    2    数据源-source
        DataStream<String> dataStream = KafkaUtil.consumerKafka(env, "topic-log");
        //        dataStream.print("kafka");
        //   3 数据转换

        //   4 数据输出
        processLog(dataStream);

        env.execute("JtpLogEtlJob");


    }

    private static void processLog(DataStream<String> dataStream) {

        DataStream<String> jsonStream = logCleaned(dataStream);

        DataStream<String> etlStream = processIsNew(jsonStream);

        DataStream<String> pageStream = splitStream(etlStream);

        KafkaUtil.producerKafka(pageStream, "dwd_traffic_page_log");

    }

    private static DataStream<String> splitStream(DataStream<String> jsonStream) {

        // 创建一个OutputTag对象，用于标记错误日志
        OutputTag<String> errorTag = new OutputTag<String>("error_log") {
        };

        // 创建一个OutputTag对象，用于标记启动日志
        OutputTag<String> startTag = new OutputTag<String>("start_log") {
        };

        // 创建一个OutputTag对象，用于标记展示日志
        OutputTag<String> displayTag = new OutputTag<String>("displays_log") {
        };

        // 创建一个OutputTag对象，用于标记行为日志
        OutputTag<String> actionTag = new OutputTag<String>("actions_log") {
        };

        // 使用LogSplitProcessFunction处理jsonStream，并生成一个SingleOutputStreamOperator
        // 这个函数根据不同的日志类型将日志分成多个侧输出流
        SingleOutputStreamOperator<String> pageStream = jsonStream.process(new LogSplitProcessFunction(
                errorTag, startTag, displayTag, actionTag
        ));

        // 获取错误日志的侧输出流，并将其发送到Kafka的主题"dwd_traffic-error-log"
        DataStream<String> sideOutput = pageStream.getSideOutput(errorTag);
        KafkaUtil.producerKafka(sideOutput, "dwd_traffic-error-log");

        // 获取启动日志的侧输出流，并将其发送到Kafka的主题"dwd_traffic-start-log"
        DataStream<String> sideOutput1 = pageStream.getSideOutput(startTag);
        KafkaUtil.producerKafka(sideOutput1, "dwd_traffic-start-log");

        // 获取展示日志的侧输出流，并将其发送到Kafka的主题"dwd_traffic-display-log"
        DataStream<String> sideOutput2 = pageStream.getSideOutput(displayTag);
        KafkaUtil.producerKafka(sideOutput2, "dwd_traffic-display-log");

        // 获取行为日志的侧输出流，并将其发送到Kafka的主题"dwd_traffic-action-log"
        DataStream<String> sideOutput3 = pageStream.getSideOutput(actionTag);
        KafkaUtil.producerKafka(sideOutput3, "dwd_traffic-action-log");

        return pageStream;
    }

    private static DataStream<String> processIsNew(DataStream<String> jsonStream) {

        KeyedStream<String, String> midStream = jsonStream.keyBy(new KeySelector<String, String>() {
            @Override
            public String getKey(String s) throws Exception {
                return JSON.parseObject(s).getJSONObject("common").getString("mid");
            }
        });
        SingleOutputStreamOperator<String> isNewStream = midStream.process(new AdjustIsNewProcessFuncion());

//        isNewStream.print("isNew");

        return isNewStream;

    }

    private static DataStream<String> logCleaned(DataStream<String> logStream) {

        OutputTag<String> dirtyTag = new OutputTag<String>("dirty-log") {
        };

        SingleOutputStreamOperator<String> cleanedStream = logStream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, Context context, Collector<String> collector) throws Exception {
                try {
                    JSON.parse(s);
                    collector.collect(s);
                } catch (Exception e) {
                    context.output(dirtyTag, s);
                }
            }
        });

        DataStream<String> dirtyStream = cleanedStream.getSideOutput(dirtyTag);
        KafkaUtil.producerKafka(dirtyStream, "dwd_traffic-dirty-log");

        return cleanedStream;
    }
}
