package net.wlm.realtime.job;


import net.wlm.realtime.funcation.CdcDataEtlMapFunction;
import net.wlm.realtime.utils.KafkaUtil;
import net.wlm.realtime.utils.MysqlCdcUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;


/**
 * 使用Flink CDC实时采集MySQL数据库中jtp_mall表数据，并且进行ETL转换操作，存储Kafka消息队列
 *       基于Java语言开发DataStream API编程
 */

public class JtpCdcMallDataJob {
    public static void main(String[] args) throws Exception {
        // 1.执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // MysqlCdcUtil.cdcMysqlRaw(env,"jtp_mall","sku_info").print();
        // 2.数据源-source
        DataStream<String> cdcStream =  cdcMysqlSource(env);
        // cdcStream.print();
        // 3.数据转换-transformation
        DataStream<String> eltStream = cdcStream.map(new CdcDataEtlMapFunction());
        // 4.数据接收器-sink
        KafkaUtil.producerKafka(eltStream,"topic-db");
        // 5.触发执行
        env.execute("JtpCdcMallDataJob");
    }

    /**
     * 使用Flink CDC实时采集MySQL数据库表数据：全量+增量采集维表数据 和 增量采集事实表数据
     */
    private static DataStream<String> cdcMysqlSource(StreamExecutionEnvironment env) {
        // todo s1. 全量+增量：基础维度表数据
        String[] dimList = new String[]{"base_dic","user_info","sku_info","spu_info","base_trademark",
                "base_category1","base_category2","base_category3","base_province",
                "base_region","activity_info","activity_rule","coupon_info"};
        DataStream<String> dimListStream = MysqlCdcUtil.cdcMysqlInitial(env, "jtp_mall", dimList);
        // todo s2. 增量：业务事实表数据
        String[] factList = new String[]{"order_info", "order_detail", "order_detail_activity",
                "order_detail_coupon", "order_status_log", "payment_info", "favor_info",
                "order_info", "order_status_log", "refund_payment", "comment_info", "order_refund_info",
                "refund_payment", "coupon_use", "cart_info"};
        DataStream<String> cdcFactStream = MysqlCdcUtil.cdcMysqlEarliest(env, "jtp_mall", factList);
        // todo s3. 合并数据流
        DataStream<String> cdcStream = dimListStream.union(cdcFactStream);
        // 返回数据流
        return cdcStream;
    }
}