package sync.job;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import net.bwie.realtime.jtp.KafkaUtil;
import net.bwie.realtime.jtp.MysqlCdcUtil;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import sync.function.LogSplitFunction;

/**
 * @BelongsProject: realtime-project-10zlq
 * @BelongsPackage: net.bwie.realtime.sync.job
 * @Author: zhangleqing
 * @CreateTime: 2025-08-30  09:47
 * @Description: TODO
 * @Version: 1.0
 */
public class flinkCdcFromDorisToKafka {
    public static void main(String[] args) throws Exception {
        // 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 设置检查点
//        env.enableCheckpointing(5000);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://node101:8020/sm-community-realtime/ckpt");
        //获取数据
        DataStream<String> stringDataorder_base = MysqlCdcUtil.cdcMysqlInitial(env, "transactions_ods", "order_base");
        DataStream<String> stringDatarefund_detail = MysqlCdcUtil.cdcMysqlInitial(env, "transactions_ods", "refund_detail");
        DataStream<String> stringDatalogistics_info = MysqlCdcUtil.cdcMysqlInitial(env, "transactions_ods", "logistics_info");
        DataStream<String> stringDataconsumer_behavior = MysqlCdcUtil.cdcMysqlInitial(env, "transactions_ods", "consumer_behavior");

        // todo 数据转换
        DataStream<String> CarStreamorder_base = processLog(stringDataorder_base);
        DataStream<String> CarStreamrefund_detail = processLog(stringDatarefund_detail);
        DataStream<String> CarStreamlogistics_info = processLog(stringDatalogistics_info);
        DataStream<String> CarStreamconsumer_behavior = processLog(stringDataconsumer_behavior);

        CarStreamorder_base.print("order_base");
        CarStreamrefund_detail.print("refund_detail");
        CarStreamlogistics_info.print("logistics_info");

        env.execute("flinkCdcFromDoris");
    }

    private static DataStream<String> processLog(DataStream<String> mysqlStream) {
        // todo 数据清洗
        DataStream<String> stream = streamClean(mysqlStream);
        // todo 数据分流
        DataStream<String> splitStream = splitStream(stream);
        return splitStream;
    }


    private static DataStream<String> splitStream(DataStream<String> stream) {
        // todo 分流
        OutputTag<String> orderBaseLog = new OutputTag<String>("order-base"){};
        OutputTag<String> refundDetailLog = new OutputTag<String>("refund-detail"){};
        OutputTag<String> logisticsInfoLog = new OutputTag<String>("logistics-info"){};
        OutputTag<String> consumerBehaviorLog = new OutputTag<String>("consumer-behavior"){};

        SingleOutputStreamOperator<String> splitStreamLog = stream.process(
                new LogSplitFunction( orderBaseLog, refundDetailLog, logisticsInfoLog, consumerBehaviorLog)
        );

        // todo 侧边流输出
        DataStream<String> orderBase = splitStreamLog.getSideOutput(orderBaseLog);
        DataStream<String> refundDetail = splitStreamLog.getSideOutput(refundDetailLog);
        DataStream<String> logisticsInfo = splitStreamLog.getSideOutput(logisticsInfoLog);
        DataStream<String> consumerBehavior = splitStreamLog.getSideOutput(consumerBehaviorLog);


        // 将数据写入kafka
        KafkaUtil.producerKafka(orderBase,"ods-order-base-Log");
        KafkaUtil.producerKafka(refundDetail,"ods-refund-detail-log");
        KafkaUtil.producerKafka(logisticsInfo,"ods-logistics-info-log");
        KafkaUtil.producerKafka(consumerBehavior,"ods-consumer-behavior-log");

        return splitStreamLog;
    }


    private static DataStream<String> streamClean(DataStream<String> stream) {
        // todo 过滤空数据
        OutputTag<String> dirtyLog = new OutputTag<String>("dirty-log") {
        };
        SingleOutputStreamOperator<String> cleanStream = stream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value,
                                       Context ctx,
                                       Collector<String> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    if(null!=jsonObject.getJSONObject("after")){
                        out.collect(value);
                    }
                } catch (Exception e) {
                    ctx.output(dirtyLog, value);
                }

            }
        });
        // 可以选择将过滤的脏数据写入一个单独存放脏数据的topic中 方便我们后期做脏数据分析
//        SideOutputDataStream<String> dirty = cleanStream.getSideOutput(dirtyLog);
//        KafkaUtil.producerKafka(dirty, "ods-dirty-log");

        // 正常返回数据流
        return cleanStream;
    }
}
