package com.cssl.app.ods;

import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.cssl.app.function.CustomerDeserialization;
import com.cssl.utils.CommonUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @Author: chen
 * @Date: 2021/11/11 19:51
 * @Desc:
 */
public class FlinkCDC {
    public static void main(String[] args) throws Exception {
        System.setProperty("HADOOP_USER_NAME", "atguigu");
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //设置ck&状态后端
        /*env.setStateBackend(new FsStateBackend("hdfs://hadoop102:9820/gmall-flink/ck"));
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(10000);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000);*/

        //cdc source
        DebeziumSourceFunction<String> sourceFunction = MySQLSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .username("root")
                .password("root")
                .databaseList("gmall-flink")
                .deserializer(new CustomerDeserialization())
                .startupOptions(StartupOptions.latest())
                .build();

        //发送到kafka
        String topic = CommonUtils.getProperties().getProperty("kafka.topic.db.ods");
        env.addSource(sourceFunction).addSink(CommonUtils.getKafkaProducer(topic));

        env.execute("flinkcdc-writer-kafka");
    }
}