package com.bw.app.ods;

import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.bw.util.MyDeserializationSchemaFunction;
import com.bw.util.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;

public class Flink_CDC {
    public static void main(String[] args) throws Exception {
        //执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //CDC读取数据
        SourceFunction<String> sourceFunction = MySQLSource.<String>builder()
                .hostname("hadoop101")
                .port(3306)
                .databaseList("gmall-2022-realtime") // monitor all tables under inventory database
                .username("root")
                .password("123456")
                .deserializer(new MyDeserializationSchemaFunction()) // converts SourceRecord to String
//                .startupOptions(StartupOptions.earliest())//全量添加
                .build();

        DataStreamSource<String> source = env.addSource(sourceFunction);
        source.print(">>>>");
        String topic = "ods_base_db";
        source.addSink(MyKafkaUtil.getKafkaSink(topic));


        env.execute();
    }
}
