package mn10;

import com.bw.gmall.realtime.utils.MyKafkaUtil;
import com.bw.gmall.realtime.utils.MysqlUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class DemoTest01 {
    public static void main(String[] args) throws Exception {
//        2）、编写Flink流式程序，使用Flink CDC实时全量+增量采集MySQL数据库表（主要业务表数据，不少于5张表）的数据，
//        发送存储至Kafka消息队列：topic-db；（4分）
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream<String> dataStream1 = MysqlUtil.cdcMysql(env, "gmall", "order_info");
        DataStream<String> dataStream2 = MysqlUtil.cdcMysql(env, "gmall", "order_detail");
        DataStream<String> dataStream3 = MysqlUtil.cdcMysql(env, "gmall", "comment_info");
        DataStream<String> dataStream4 = MysqlUtil.cdcMysql(env, "gmall", "payment_info");
        DataStream<String> dataStream5 = MysqlUtil.cdcMysql(env, "gmall", "refund_payment");

        dataStream1.addSink(MyKafkaUtil.getFlinkKafkaProducer("topic_db2"));
        dataStream2.addSink(MyKafkaUtil.getFlinkKafkaProducer("topic_db2"));
        dataStream3.addSink(MyKafkaUtil.getFlinkKafkaProducer("topic_db2"));
        dataStream4.addSink(MyKafkaUtil.getFlinkKafkaProducer("topic_db2"));
        dataStream5.addSink(MyKafkaUtil.getFlinkKafkaProducer("topic_db2"));
        env.execute();
    }
}
