package com.bw.yk06;

import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;

public class test1 {
    public static void main(String[] args) throws Exception {
        //流式环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度为1
        env.setParallelism(1);
        //cdc读取数据
        DebeziumSourceFunction<String> build = MySQLSource.<String>builder()
                .hostname("hadoop-single")
                .port(3306)
                .databaseList("tms001") // monitor all tables under inventory database
                .username("root")
                .password("root")
                .startupOptions(StartupOptions.initial())
                .deserializer(new MyDeserializationSchemaFunction()) // converts SourceRecord to String
                .build();
        //生成流
        DataStreamSource<String> streamSource = env.addSource(build);
        //打印流
        streamSource.print();
//        //传入kafka
//        Properties properties = new Properties();
//        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
//        properties.setProperty("group.id", "test1");
//        streamSource.addSink(new FlinkKafkaProducer<>("tms_ods", new SimpleStringSchema(), properties));


        //执行
        env.execute();
    }
}
