package net.lmc.realtime.jtp.ods.job;

import net.lmc.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.lmc.bwie.realtime.jtp.common.utils.MysqlCdcUtil;
import net.lmc.realtime.jtp.ods.function.CdcDataEtlMapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class JtpCdcMallDataJob {
    public static void main(String[] args) throws Exception {
        //配置环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(5000);

        //业务事实表数据增量同步 公共维度数据是全量加增量同步 所以要分开同步数据
        DataStream<String> cdcStream = cdcMysqlSource(env);

        //数据转换
        DataStream<String> etlStream = cdcStream.map(new CdcDataEtlMapFunction());

        //数据接收
        KafkaUtil.producerKafka(etlStream,"topic_db");

        //触发执行
        env.execute("JtpCdcMallDataJob");
    }

    private static DataStream<String> cdcMysqlSource(StreamExecutionEnvironment env) {
        //全量 增量 基本维度表
        String[] dimList = new String[]{
                "base_dic","base_province","base_region","base_category1","base_category2","base_category3",
                "activity_info","activity_rule","base_trademark","coupon_info","sku_info","spu_info","user_info"
        };

        DataStream<String> cdcDimStream = MysqlCdcUtil.cdcMysqlInitial(env, "jtp_mall", dimList);

        //增量 事实表
        String[] factList = new String[]{
                "favor_info","cart_info","order_info","order_detail","order_detail_activity","order_detail_coupon",
                "order_status_log","payment_info","order_refund_info","refund_payment","comment_info","coupon_use"
        };

        DataStream<String> cdcFactStream = MysqlCdcUtil.cdcMysqlEarliest(env, "jtp_mall", factList);

        DataStream<String> cdcStream = cdcDimStream.union(cdcFactStream);

        return cdcStream;
    }
}
