package net.bwie.jtp.dwd.log.job;


import com.alibaba.fastjson.JSON;
import net.bwie.jtp.dwd.log.function.AdjustIsNewPrcessFuncion;
import net.bwie.jtp.dwd.log.function.LogSplitProcessFuncion;
import net.bwie.realtime.guanjuntao.util.KafkaUtil;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
/**
 * @author: 🌹帅的一塌糊涂🌹
 * @description:
 * @params: 流量分流,清洗,输出(sink)kafka 
 * @return:  
 * @date: 2025/5/19 15:22
 */
public class JtpLogEtlJob {
    public static void main(String[] args) throws Exception {
        // TODO: 2025/5/18 创建filnk上下文 
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // TODO: 2025/5/18 设置并行数 
        env.setParallelism(1);

        // TODO: 2025/5/18 获取kafka数据 
        DataStream<String> kafkaDataStream = KafkaUtil.consumerKafka(env, "topic-log");
        kafkaDataStream.print();

        // TODO: 2025/5/18 数据转换(清洗,整合)
        // TODO: 2025/5/18 数据输出(sink)kafka
        processLog(kafkaDataStream);

        // TODO: 2025/5/18 触发执行
        env.execute("JtpLogEtlJob");

    }

    private static void processLog(DataStream<String> logstream) {
        // TODO: 2025/5/18 数据清洗
        DataStream<String> jsonStream = logCleaned(logstream);
        // TODO: 2025/5/18  新老用户状态标记修复
        DataStream<String> etlStreame = processIsNew(jsonStream);
        // TODO: 2025/5/18 数据分流
        DataStream<String> pageStream =  splitStream(etlStreame);
        // TODO: 2025/5/18 存储到kafka 
        KafkaUtil.producerKafka(pageStream,"dwd-traffic-page-log");

    }

    private static DataStream<String> splitStream(DataStream<String> etlStreame) {
        // TODO: 2025/5/18 输出topic
        final OutputTag<String> errorTag = new OutputTag<String>("error-log"){};
        final OutputTag<String> startTag = new OutputTag<String>("start-log"){};
        final OutputTag<String> displayTag = new OutputTag<String>("display-log"){};
        final OutputTag<String> actionTag = new OutputTag<String>("action-log"){};
        // TODO: 2025/5/18 日志分流处理
        SingleOutputStreamOperator<String> pageStream = etlStreame.process(
                new LogSplitProcessFuncion(errorTag, startTag, displayTag, actionTag));
        // TODO: 2025/5/18 输出分流
        DataStream<String> error = pageStream.getSideOutput(errorTag);
        KafkaUtil.producerKafka(error,"dwd-traffic-error-log");
        DataStream<String> start = pageStream.getSideOutput(startTag);
        KafkaUtil.producerKafka(start,"dwd-traffic-start-log");
        DataStream<String> display = pageStream.getSideOutput(displayTag);
        KafkaUtil.producerKafka(display,"dwd-traffic-display-log");
        DataStream<String> action = pageStream.getSideOutput(actionTag);
        KafkaUtil.producerKafka(action,"dwd-traffic-action-log");
        JtpLogClickHouse.saveStartLog(start);
        JtpLogClickHouse.savePageLog(pageStream);
        JtpLogClickHouse.saveErrLog(error);
        // TODO: 2025/5/18 输出主流
        return pageStream;

    }

    // TODO: 2025/5/18  新老用户状态标记修复
    private static DataStream<String> processIsNew(DataStream<String> jsonStream) {
        // TODO: 2025/5/18 按mid进行分组
        KeyedStream<String, String> midStream = jsonStream.keyBy(new KeySelector<String, String>() {
            @Override
            public String getKey(String value) throws Exception {
                return JSON.parseObject(value).getJSONObject("common").getString("mid");
            }
        });
        // TODO: 2025/5/18 对新老用户进行校验 
        SingleOutputStreamOperator<String> isNew = midStream.process(new AdjustIsNewPrcessFuncion());
        // TODO: 2025/5/18 返回校验结果 
        return isNew;
    }

    // TODO: 2025/5/18 数据清洗
    private static DataStream<String> logCleaned(DataStream<String> logstream) {
       final   OutputTag<String> drityTag = new OutputTag<String>("dirty-log"){};
        SingleOutputStreamOperator<String> cleanedStream = logstream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
                try {
                    // 解析json格式
                    JSON.parseObject(value);
                    // 没有异常,解析正确,正常输出
                    out.collect(value);
                }catch (Exception e){
                    // 捕获异常,侧边流输出数据
                    ctx.output(drityTag,value);
                }
            }
        });
        // 侧边流输出数据
        DataStream<String> drityStream = cleanedStream.getSideOutput(drityTag);
        KafkaUtil.producerKafka(drityStream,"dwd-traffic-dirty-log");

        // 返回正常数据流
        return cleanedStream;
    }
}


