package com.atguigu.app.ods;

import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.atguigu.app.function.FlinkSerializeDefine;
import com.atguigu.common.ODSStaticConstants;
import com.atguigu.utils.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @Author:GaoFei
 * @Description: 业务数据采集
 * @Date:Created in 18:11
 * @Modified By:
 */
public class FlinkCDC {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        /**
         * 生产环境下一定要写的，不过学习阶段还要开启hdfs比较麻烦先关闭
         *         System.setProperty("HADOOP_USER_NAME","atguigu");
         *         env.setStateBackend(new FsStateBackend(OdsStaticConstants.CHECK_POINT_URL));
         *         // 设置checkpoint时间
         *         env.enableCheckpointing(5000L);
         *         // 设置精准一次性
         *         env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
         *         // 设置check超时时间
         *         env.getCheckpointConfig().setCheckpointTimeout(10000L);
         *         // 设置最多两个checkpoint一块
         *         env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
         *         // 设置两个check间隔
         *         env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
         */
        DebeziumSourceFunction<String> build = MySQLSource.<String>builder()
                .hostname(ODSStaticConstants.FREQUENTLY_USE_IP_ADDR)
                .port(3306)
                .username("root")
                .password("admin")
                .databaseList("gmall-0625-flink")
                //.tableList("gmall-0625-flink.base_trademark")
                .deserializer(new FlinkSerializeDefine())
                // 实时计算从现在开始计算
                .startupOptions(StartupOptions.initial())
                .build();
        // 发送到Kafka
        DataStreamSource<String> result = env.addSource(build);

        result.addSink(KafkaUtil.getKafkaProducter(ODSStaticConstants.CDC_TO_ODS_KAFKA_TOPIC_DB));
        env.execute();
    }
}
