package com.gmall.app.ods;

import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.gmall.app.function.CustomerDeserialiazation;
import com.gmall.utils.MykafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class flinkCDC_CustomerDeserialization {
    public static void main(String[] args) throws Exception {
        // 获取运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //flinkCDC创建sourcefuncition
        DebeziumSourceFunction<Object> sourceFunction = MySQLSource.builder()
                .hostname("192.168.77.69")
                .port(3306)
                .username("nacos")
                .password("Zjhw@123")
                .databaseList("mp_user","mp_exam")
                //.tableList()可配置单独一张表进行监控，不加监控整个database
                .deserializer(new CustomerDeserialiazation())
                .startupOptions(StartupOptions.latest())
                .build();

        DataStreamSource<Object> streamSource = env.addSource(sourceFunction);

        //3.打印数据并将数据写入Kafka
        streamSource.print();
        String sinkTopic = "ods_user_db";
        streamSource.addSink(MykafkaUtil.getKafkaProducer(sinkTopic));

        //4.启动任务
        env.execute("FlinkCDC");

    }
}
