package com.qingyunge.app.ods;

import com.qingyunge.app.func.MysqlToKafkaDeserizlization;
import com.qingyunge.common.MysqlConfig;
import com.qingyunge.util.MyKafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class FlinkMysqlToKafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);

        MySqlSource<String> build = MySqlSource.<String>builder()
                .hostname(MysqlConfig.HOSTNAME)
                .port(MysqlConfig.PORT)
                .username(MysqlConfig.USERNAME)
                .password(MysqlConfig.PASSWARD)
                .databaseList(MysqlConfig.DATABASE)
                .tableList(MysqlConfig.DATABASE + ".*")
                .startupOptions(StartupOptions.initial())
                .deserializer(new MysqlToKafkaDeserizlization())
                .build();
        String topic = "topic_db";
        String gourpid = "odstokafka";
        DataStreamSource<String> mysqlSource = env.fromSource(build, WatermarkStrategy.noWatermarks(), "MysqlSource");
        mysqlSource.addSink(MyKafkaUtil.getFlinkKafkaProducer(topic,gourpid));
        mysqlSource.print();
        env.execute("FlinkMysqlToKafka");
    }
}
