package com.xujian.cdc.sink.kafka;


import com.ververica.cdc.connectors.postgres.PostgreSQLSource;
import com.xujian.cdc.schema.TxlcCustomerSchema;
import com.xujian.common.util.EnviromentUtil;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;

import java.util.Properties;

/**
 * @author star xu
 * @date 2023/1/31 17:12
 * @Description:// TODO: 2023/1/31 增量阶段日期格式变成 2023-01-30T16:44:22.326Z 
 * @Slogan: 致敬大师,致敬未来的自己
 */
public class FlinkCdcPgToKafka {

    public static void main(String[] args) throws Exception {

        //1.获取执行环境
        String storgeType = "other";
        StreamExecutionEnvironment env = EnviromentUtil.build(storgeType);
        env.setParallelism(1);

        String tableName = "channel_order";
        String srcHost = "127.0.0.1";
        int port = 5432;
        String srcDatabase = "whe_member_meinian";
        String srcUsername = "postgres";
        String srcPassword = "qwer1234";
        String schemaName = "public";

        //2.通过 FlinkCDC读取数据
        Properties properties = new Properties();
        properties.setProperty("slot.name", "flink_test");
        properties.setProperty("converters", "dateConverters");
        properties.setProperty("dateConverters.type", "com.xujian.cdc.convert.DateTimeConverter");
        SourceFunction<String> pgSqlSource = PostgreSQLSource.<String>builder()
                .hostname(srcHost)
                .port(port)
                .database(srcDatabase)
                .schemaList(schemaName)
                .tableList(schemaName + "." + tableName)
                .username(srcUsername)
                .password(srcPassword)
                .decodingPluginName("pgoutput")
                .deserializer(new TxlcCustomerSchema())
                .debeziumProperties(properties)
                .build();
        DataStreamSource<String> streamSource = env.addSource(pgSqlSource);

        //3.将数据写入 Kafka
        streamSource.print();
        String sinkTopic = srcDatabase + "_" + tableName + "2";
        KafkaSink<String> sink = KafkaSink.<String>builder()
                .setBootstrapServers("192.168.122.1:9092")
                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
                        .setTopic(sinkTopic)
                        .setValueSerializationSchema(new SimpleStringSchema())
                        .build()
                )
                .setDeliveryGuarantee(DeliveryGuarantee.EXACTLY_ONCE)
                //EXACTLY_ONCE模式下需要指定
                .setTransactionalIdPrefix(tableName)
                .setProperty("transaction.timeout.ms",1000*60*5+"")
                .build();
        streamSource.sinkTo(sink);
        //4.启动任务
        env.execute("FlinkCDC-pg-kafka" + tableName);
    }
}