package yuekao3.ods;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import yuekao3.util.KafkaUtil;
import yuekao3.util.MyDeserializationSchemaFunction;

public class ReadFilkCDC {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
               env.setParallelism(1); // use parallelism 1 for sink to keep message ordering

        //ODS层）编写Flink DataStream程序，适应FlinkCDC将业务数据表：订单表（order_info、order_detail）、收藏表（favor_info）、
        // 加购表（cart_info）、支付表（payment_info），数据实时同步到Kafka消息队列：ods_db_data，考虑全量+增量同步；（7分）
        SourceFunction<String> sourceFunction = MySQLSource.<String>builder()
                .hostname("hadoop-single")
                .port(3306)
                .databaseList("yuekao01") // monitor all tables under inventory database
                .tableList("yuekao01.order_info,yuekao01.order_detail,yuekao01.favor_info,yuekao01.cart_info,yuekao01.payment_info")
//                .startupOptions(StartupOptions.initial())
                .username("root")
                .password("root")
                .deserializer(new MyDeserializationSchemaFunction()) // converts SourceRecord to String
                .build();


        DataStreamSource<String> streamSource = env.addSource(sourceFunction);
//        streamSource.print();
        streamSource.addSink(KafkaUtil.kafkaSink("ods_db_data"));
        env.execute();
    }
}
