package net.bwie.realtime.jtp.dwd.log.job;

import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * @BelongsProject: realtime-project-10zlq
 * @BelongsPackage: net.bwie.realtime.jtp.dwd.log.job
 * @Author: zhangleqing
 * @CreateTime: 2025-08-15  22:34
 * @Description:
 * @Version: 1.0
 * DWD层数据应用开发：将ODS层采集原始日志数据，进行分类处理，存储Kafka队列。
 *          数据流向：kafka -> flink datastream -> kafka
 */
public class JtpAppLogEtlJob {
    public static void main(String[] args) throws Exception {
        // 1.执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 2.数据源
        DataStream<String> kafkaDataStream = KafkaUtil.consumerKafka(env, "topic-log");
        kafkaDataStream.print("kafka==>");

        // 3.数据转换
        DataStream<String> pageStream = processLog(kafkaDataStream);

        // 4.数据输出
        KafkaUtil.producerKafka(pageStream, "dwd-traffic-page-log");

        // 5.触发执行
        env.execute("JtpAppLogEtlJob");
    }

    /**
     * 对实时获取APP流量日志数据进行ETL处理，并且进行分流
     *      1-数据清洗
     *      2-新老访客状态标记修复
     *      3-数据分流
     */
    private static DataStream<String> processLog(DataStream<String> stream) {
        // 1-数据清洗
        DataStream<String> jsonStream = appLogCleaned(stream);

        // 2-新老访客状态标记修复
        //DataStream<String> etlStream = processIsNew(jsonStream);

        // 3-数据分流
        DataStream<String> pageStream = splitStream(jsonStream);

        // 返回数据流
        return pageStream;
    }



    // 3-数据分流
    private static DataStream<String> splitStream(DataStream<String> stream) {
        // 第一步，侧边流输出标记

        // 第二步，日志分流处理

        // 第三步，侧边流输出

        // 第四步，输出主流

        return null;
    }

    // 1-数据清洗
    private static DataStream<String> appLogCleaned(DataStream<String> stream) {
        // 第一步，脏数据侧边流输出时标记
        final OutputTag<String> dirtyTag = new OutputTag<String>("dirty-log"){};

        // 第二步，数据清洗处理
        SingleOutputStreamOperator<String> cleanedStream = stream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
                try {
                    // a.解析json数据
                    JSON.parseObject(value);
                    // b.没有异常解析正确，正常输出
                    out.collect(value);
                } catch (Exception e) {
                    // c.解析异常，放入侧边流
                    ctx.output(dirtyTag, value);
                }
            }
        });

        // 第三步，侧边流输出 脏数据
        SideOutputDataStream<String> dirtyStream = cleanedStream.getSideOutput(dirtyTag);
        KafkaUtil.producerKafka(dirtyStream, "dwd-traffic-dirty-log");

        // 第四步，返回正常数据流
        return cleanedStream;
    }
}
