package com.zzw.demo.text_A;

import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.zzw.demo.util.CastDebeziumDeserializationSchema;
import com.zzw.demo.util.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;


public class Text02 {
    public static void main(String[] args) throws Exception {
        //创建flink环境,利用flink CDC 技术过滤数据 剔除不可用的数据
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(1);
        //flink CDC 设置读取MYSQL
        DebeziumSourceFunction<String> build = MySQLSource.<String>builder()
                .hostname("hadoop-single") //主机名
                .port(3306) //端口号
                .databaseList("gmall2") // 填入自己的数据库名
                .tableList("gmall.*") // 可以扫描多个表格
                .username("root") //账号
                .password("root") //密码
                .deserializer(new CastDebeziumDeserializationSchema())
                .build();
        //将数据源转换成流 进行准备输入到 kafka 流中
        DataStreamSource<String> stringDataStreamSource = env.addSource(build);
        stringDataStreamSource.print();
//        stringDataStreamSource.print("stringDataStreamSource>>>>");
        //利用 写出的 kafka 工具类 进行数据输入到cdc层
        stringDataStreamSource.addSink(KafkaUtil.kafkaProducer("ods_db_cdc"));

        //启动程序
        env.execute();
    }
}
