package org.example.realtime.jtp.ods.job;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.example.realtime.jtp.common.utils.KafkaUtil;
import org.example.realtime.jtp.ods.function.CdcDateEtlMapFunction;
import org.example.realtime.jtp.ods.function.MysqlCdcUtil;

/**
 * @Title: JtpCdcMallDataOdsJob
 * @Author Lianzy
 * @Package org.example.realtime.jtp.ods.job
 * @Date 2025/6/2 13:52
 * @description 基于DataStream API使用CDC 获取数据，解析json字符串，并且提取字段值处理，构建json字符串，最后写入Kafka 消息队列。
 */
public class JtpCdcMallDataOdsJob {
    public static void main(String[] args) throws Exception {

        //执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(5000L);

        //数据源-source
        DataStream<String> cdcStream = cdcMysqlSource(env);

        //数据转换-transformation
        DataStream<String> etlStream=cdcStream.map(new CdcDateEtlMapFunction());
        //数据接收器 -sink
        KafkaUtil.producerKafka( etlStream, "topic-db");
        //触发执行
        env.execute("JtpCdcMallDataOdsJob");
    }

    /*
    使用FlinkCdc实时采集mysql数据库中的数据，全量增量采集维表数据。增量采集事实表数据。
     */
    private static DataStream<String> cdcMysqlSource(StreamExecutionEnvironment env) {
        //1.全量增量采集 维表数据
        String[] dimList = new String[]{
                "base_dic", "base_province", "base_region", "base_category1", "base_category2", "base_category3",
                "activity_info", "activity_rule", "base_trademark", "coupon_info", "sku_info", "spu_info", "user_info"
        };
        DataStream<String> cdcDimStream = MysqlCdcUtil.cdcMysqlInitial(env, "jtp_mall", dimList);
//        cdcDimStream.print("----");
         //2.增量采集 事实表数据
        String[] factList = new String[]{
                "favor_info", "cart_info", "order_info", "order_detail", "order_detail_activity", "order_detail_coupon",
                "order_status_log", "payment_info", "order_refund_info", "refund_payment", "comment_info", "coupon_use"
        };
        DataStream<String> cdcFactStream = MysqlCdcUtil.cdcMysqlEarliest(env, "jtp_mall", factList);
//        cdcFactStream.print("++++");
        //3.合并数据流
        DataStream<String> cdcStream = cdcDimStream.union(cdcFactStream);
        return cdcStream;
    }
}