package net.bwie.realtime.jtp.dwd.log.job;

import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.dwd.log.function.AdjustIsNewProcessFuncation;
import net.bwie.realtime.jtp.dwd.log.function.LogSplitProcessFuncation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * DWD层数据应用开发：将ODS层采集原始日志数据，进行分类处理，存储Kafka队列。
 * 数据流向：kafka -> flink datastream -> kafka
 *
 * @Author: FuHe
 * @Date: 2025/5/18
 */
public class JtpLogEtlJob {
    public static void main(String[] args) throws Exception {
        // 1. 执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 2. 数据源-source
        DataStream<String> kafkaDataStream = KafkaUtil.consumerKafka(env, "topic-log");
//        kafkaDataStream.print();
        // 3. 数据转换-transformation
        // 4. 数据接收器-sink
        processLog(kafkaDataStream);
        // 5. 触发执行-execute
        env.execute("JtpLogEtlJob");
    }

    /**
     * 对实时获取APP流量日志数据进行ETL处理，并且进行分流，存储Kafka消息队列
     * 1-数据清洗
     * 2-新老访客状态标记修复
     * 3-数据分流
     * 4-存储数据
     *
     * @param logStream app日志流
     */
    private static void processLog(DataStream<String> logStream) {
        // 1-数据清洗
        DataStream<String> jsonStream = logCleaned(logStream);

        // 2-新老访客状态标记修复
        DataStream<String> etlStream = processIsNew(jsonStream);
        // 3-数据分流
        DataStream<String> pageStream = splitStream(etlStream);
        // 4-存储数据
//        KafkaUtil.producerKafka(pageStream, "dwd-traffic-page-log");
        // 保存app启动日志到clickhouse
        JtpLogClickHouse.savePageLog(pageStream);
    }

    /**
     * 2、新老访客状态标记修改
     */
    private static DataStream<String> processIsNew(DataStream<String> jsonStream) {
        // a.json-按照设备ID进行分组
        KeyedStream<String, String> midStream = jsonStream.keyBy(new KeySelector<String, String>() {
                 @Override
                 public String getKey(String value) throws Exception {
                     return JSON.parseObject(value).getJSONObject("common").getString("mid");
                 }
             }
        );
        // b-状态编程，对is_new检验
        SingleOutputStreamOperator<String> isNewStream = midStream.process(new AdjustIsNewProcessFuncation());
        return isNewStream;
    }

    /**
     * 3、数据分流
     */
    private static DataStream<String> splitStream(DataStream<String> jsonStream) {
        // 第0步、侧边流输出标记
        final OutputTag<String> errorTag = new OutputTag<String>("error-log"){};
        final OutputTag<String> startTag = new OutputTag<String>("start-log"){};
        final OutputTag<String> displayTag = new OutputTag<String>("display-log"){};
        final OutputTag<String> actionTag = new OutputTag<String>("action-log"){};
        // 第一步、日志分流处理
        SingleOutputStreamOperator<String> pageStream = jsonStream.process(new LogSplitProcessFuncation(errorTag, startTag, displayTag, actionTag));
        // 第二部、侧流输出
        DataStream<String> errorStream = pageStream.getSideOutput(errorTag);
        JtpLogClickHouse.saveErrorLog(errorStream);

//        KafkaUtil.producerKafka(errorStream, "dwd-traffic-error-log");
        DataStream<String> startTStream = pageStream.getSideOutput(startTag);
//
        JtpLogClickHouse.saveStartLog(startTStream);
//
//        KafkaUtil.producerKafka(startTStream, "dwd-traffic-start-log");
        DataStream<String> displayStream = pageStream.getSideOutput(displayTag);
//        KafkaUtil.producerKafka(displayStream, "dwd-traffic-display-log");
        DataStream<String> actionStream = pageStream.getSideOutput(actionTag);
//        KafkaUtil.producerKafka(actionStream, "dwd-traffic-action-log");
        // 第三部、输出主流
        return pageStream;
    }

    /**
     * 1、数据清洗
     */
    private static DataStream<String> logCleaned(DataStream<String> logStream) {
        // a.json-脏数据侧边留输出时标记
        final OutputTag<String> dirtyTag = new OutputTag<String>("dirty-log"){};
        // b-数据清洗处理
        SingleOutputStreamOperator<String> cleanedStream = logStream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, Context ctx, Collector<String> out) {
                try {
                    // a.json-解析json数据
                    JSON.parseObject(value);
                    // b.没有异常，解析输出，正常输出
                    out.collect(value);
                } catch (Exception e) {
                    // c.有异常，侧边输出
                    ctx.output(dirtyTag, value);
                }
            }
        });
        // c-侧边流输出
        DataStream<String> dirtyStream = cleanedStream.getSideOutput(dirtyTag);
//        KafkaUtil.producerKafka(dirtyStream, "dwd-traffic-dirty-log");
        return cleanedStream;
    }

}
