package net.bwie.realtime.jtp.ods.job;

import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.ods.function.CdcDataEtlMapFunction;
import net.bwie.realtime.jtp.ods.utils.MysqlCdcUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 使用flink CDC实时采集 mysql数据库中jtp_mall表数据，并且进行ETL处理，并且写入kafka中  dataStream API
 * @Author: FuHe
 * @Date: 2025/6/2
 */
public class JtpCdcMallDataOdsJob {
    public static void main(String[] args) throws Exception {
        // 1 执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行度
        env.setParallelism(1);
        // 设置检查点checkpoint
        env.enableCheckpointing(5000L);
        // 2 读取kafka数据源
        // v1 原始数据采集
//        DataStream<String> stringDataStream = MysqlCdcUtil.cdcMysqlRaw(env, "jtp_mall", "order_info");
//        stringDataStream.print();

        // v2 自定义反序列化转换器
//        DataStream<String> cdcMysqlDeser = MysqlCdcUtil.cdcMysqlDeser(env, "jtp_mall", "order_info");
//        cdcMysqlDeser.print();

        // v3 业务事实表数据增量同步，公共维度数据全量+增量同步，所以需要分开同步数据
        DataStream<String> cdcStream = cdcMysqlSource(env);
//        cdcStream.print();
        // 3 对数据进行ETL清洗
        DataStream<String> etlStream = cdcStream.map(new CdcDataEtlMapFunction());
        etlStream.print();
        // 4 将清洗后的数据发送的kafka中
        KafkaUtil.producerKafka(etlStream, "topic-db");
        // 5 触发执行
        env.execute("JtpCdcMallDataOdsJob");
    }

    private static DataStream<String> cdcMysqlSource(StreamExecutionEnvironment env) {
        // 全量 增量  维度表
        String[] dimList = new String[]{
                "base_dic", "base_province", "base_region", "base_category1", "base_category2", "base_category3",
                "activity_info", "activity_rule", "base_trademark", "coupon_info", "sku_info", "spu_info", "user_info"
        };
        DataStream<String> cdcDimStream = MysqlCdcUtil.cdcMysqlInitial(env, "jtp_mall", dimList);
        // 增量 业务事实表
        String[] factList = {
                "favor_info", "cart_info", "order_info", "order_detail", "order_detail_activity", "order_detail_coupon",
                "order_status_log", "payment_info", "order_refund_info", "refund_payment", "common_info", "coupon_use"
        };
        DataStream<String> cdcFactStream = MysqlCdcUtil.cdcMysqlEarliest(env, "jtp_mall", factList);
        // 合并
        DataStream<String> cdcStream = cdcDimStream.union(cdcFactStream);
        return cdcStream;
    }
}
