package net.bwie.jtp.job;

import net.bwie.jtp.function.CdcDataEtlMapFunction;
import net.bwie.realtime.guanjuntao.util.KafkaUtil;
import net.bwie.realtime.guanjuntao.util.MysqlCdcUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class JtpCdcMallDataJob {
    public static void main(String[] args) throws Exception {

        // TODO: 2025/6/2 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(5000L);
        // TODO: 2025/6/2 读取数据
        DataStream<String> cdcStream = cdcMysqlSource(env);
        // TODO: 2025/6/2 转换数据
        DataStream<String> etlStream = cdcStream.map(new CdcDataEtlMapFunction());
        // TODO: 2025/6/2 sink输出数据
        KafkaUtil.producerKafka(etlStream, "topic-db");

        // TODO: 2025/6/2 触发执行
        env.execute("JtpCdcMallDataJob");


    }

    /*
      使用Flink CDC实时采集MySQL数据库的表数据,全量加增量采集维度表,增量采集实时表
     */
    private static DataStream<String> cdcMysqlSource(StreamExecutionEnvironment env) {
        // TODO: 2025/6/2 全量加增量采集维度表
        String[] dimList = new String[]{
                "base_dic", "base_category1", "base_category2", "base_category3",
                "base_province", "base_region", "base_trademark", "activity_info",
                "activity_rule", "coupon_info", "sku_info", "spu_info", "user_info"
        };
        DataStream<String> dimStream = MysqlCdcUtil.cdcMysqlInitial(env, "jtp_mall", dimList);

        // TODO: 2025/6/2 增量采集业务事实表
        String[] factList = new String[]{
                "cart_info", "comment_info", "activity_sku", "tbl_example",
                "coupon_range", "coupon_use", "refund_payment",
                "payment_info", "favor_info", "order_status_log", "order_detail",
                "order_detail_activity", "order_detail_coupon",
                "order_refund_info", "order_info"
        };
        DataStream<String> factStream = MysqlCdcUtil.cdcMysqlEarliest(env, "jtp_mall", factList);

        // TODO: 2025/6/2 合并数据
        DataStream<String> cdcStream = dimStream.union(factStream);
        return cdcStream;

    }


}
