package net.bw.realtime.tmall.dwd.job;

import com.alibaba.fastjson.JSON;
import net.bw.realtime.tmall.common.utils.KafkaUtil;
import net.bw.realtime.tmall.dwd.bean.TransactionEventBean;
import net.bw.realtime.tmall.dwd.function.TransactionEventBeanMapFunction;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.time.Duration;

/*
 * @ Author：liuyawei
 * @ Date：2025-05-27
 */
public class TmallLogEtlStreamJob {

    public static void main(String[] args) throws Exception {

        // 1. 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 2. 设置并行度
        env.setParallelism(1);

        // 3. 开启检查点
        env.enableCheckpointing(5000);

        // 4. 读取数据
        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "tmall_log_test_abnormal");
        //DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "tmall_log_test");


        // 5. 数据处理
        process(kafkaStream);


        // 启动程序
        env.execute("TmallLogEtlStreamJob");

    }

    private static void process(DataStream<String> kafkaStream) {

        // 1> 清洗数据
        SingleOutputStreamOperator<String> logCleanedStream = logCleaned(kafkaStream);

        // 2> 数据转换
        logConvert(logCleanedStream);

    }

    private static void logConvert(SingleOutputStreamOperator<String> logCleanedStream) {

        // 按照deviceId分组
        KeyedStream<String, String> deviceIdStream = logCleanedStream.keyBy(
                json -> JSON.parseObject(json).getJSONObject("deviceInfo").getString("deviceId")
        );

        // 封装对象
        SingleOutputStreamOperator<TransactionEventBean> beanStream = deviceIdStream.map(new TransactionEventBeanMapFunction());

//        SingleOutputStreamOperator<TransactionEventBean> watermarksStream = beanStream.assignTimestampsAndWatermarks(
//                WatermarkStrategy
//                // 设置延迟时间
//                .<TransactionEventBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
//                .withTimestampAssigner(
//                        new SerializableTimestampAssigner<TransactionEventBean>() {
//                            @Override
//                            public long extractTimestamp(TransactionEventBean element, long recordTimestamp) {
//                                return element.getTs();
//                            }
//                        }
//                )
//        );

        //watermarksStream.print();

        SingleOutputStreamOperator<String> jsonStream = beanStream.process(
                new ProcessFunction<TransactionEventBean, String>() {
                    @Override
                    public void processElement(TransactionEventBean value, Context ctx, Collector<String> out) throws Exception {
                        out.collect(JSON.toJSONString(value));
                    }
                }
        );

        //jsonStream.print();

        KafkaUtil.producerKafka(jsonStream, "transaction_event_bean-topic-abnormal");
        //KafkaUtil.producerKafka(jsonStream, "transaction_event_bean-topic");

    }

    // 清洗数据
    private static SingleOutputStreamOperator<String> logCleaned(DataStream<String> kafkaStream) {

        // 脏数据输出侧流
        OutputTag<String> dirtyTag = new OutputTag<String>("dirty-log"){};

        // 对数据进行清洗，过滤脏数据
        SingleOutputStreamOperator<String> cleanedStream = kafkaStream.process(
                new ProcessFunction<String, String>() {
                    @Override
                    public void processElement(String value, ProcessFunction<String, String>.Context ctx, Collector<String> out) throws Exception {
                        try {
                            // 解析数据
                            JSON.parseObject(value);
                            // 输出数据
                            out.collect(value);
                        } catch (Exception e) {
                            // 输出脏数据
                            ctx.output(dirtyTag, value);
                        }
                    }
                }
        );

        // 输出测流，存储kafka
        //如果没有脏数据 可以手动发送测试一下
        // kafka-console-producer.sh --broker-list node101:9092,node102:9092,node103:9092 --topic topic-log
        DataStream<String> dirtyStream = cleanedStream.getSideOutput(dirtyTag);
        KafkaUtil.producerKafka(dirtyStream, "dwd-tmall-dirty-log");

        // 输出测流
        return cleanedStream;

    }

}
