package net.bw.realtime.tmall.dwd.job;

import com.alibaba.fastjson.JSON;
import net.bw.realtime.tmall.common.utils.KafkaUtil;
import net.bw.realtime.tmall.dwd.function.sinkClickhouseFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/*
 * @ Author：liuyawei
 * @ Date：2025-05-26
 */
public class TmallLogEtlJob {

    public static void main(String[] args) throws Exception {

        // 1.创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 2.设置并行度
        env.setParallelism(1);

        // 3.读取数据
        DataStream<String> tmallLogStream = KafkaUtil.consumerKafka(env, "tmall_log");

        // 4.数据处理
        process(tmallLogStream);

        // 5.启动程序
        env.execute("TmallLogEtlJob");

    }

    private static void process(DataStream<String> tmallLogStream) {

        // 1.对数据进行清洗，过滤脏数据
        DataStream<String> cleanedStream = logCleaned(tmallLogStream);

        // 2.数据存储Kafka
        KafkaUtil.producerKafka(cleanedStream, "dwd-tmall-log");

        // 3.数据处理，切分省份、设备、sku,存储clickhouse
        sinkToClickhouse(cleanedStream);

    }

    // 数据存储clickhouse
    private static void sinkToClickhouse(DataStream<String> cleanedStream) {

        sinkClickhouseFunction.saveTmallLog(cleanedStream);

    }


    // 脏数据处理
    private static DataStream<String> logCleaned(DataStream<String> tmallLogStream) {

        // 1. 定义存储脏数据的测流
        OutputTag<String> dirtyTag = new OutputTag<String>("dirty-log"){};

        // 2. 对数据进行清洗，过滤脏数据
        SingleOutputStreamOperator<String> cleanedStream = tmallLogStream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
                try {
                    // 解析数据
                    JSON.parseObject(value);
                    // 输出数据
                    out.collect(value);
                } catch (Exception e) {
                    // 输出脏数据
                    ctx.output(dirtyTag, value);
                }
            }
        });

        // 3.输出测流，存储kafka
        //如果没有脏数据 可以手动发送测试一下
        // kafka-console-producer.sh --broker-list node101:9092,node102:9092,node103:9092 --topic tmall-log
        DataStream<String> dirtyStream = cleanedStream.getSideOutput(dirtyTag);
        KafkaUtil.producerKafka(dirtyStream, "dwd-tmall-dirty-log");

        //4.返回处理后的数据
        return cleanedStream;

    }

}
