package com.tdy.cdc.util;

import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.io.IOException;
import java.nio.charset.StandardCharsets;

/**
 * @author NanHuang
 * @Date 2023/1/24
 */
public class FlinkSourceUtil {

    public static KafkaSource<String> getKafkaSource(String topic, String groupId){
        return KafkaSource.<String>builder()
                .setBootstrapServers(ConfigUtil.getProperty("kafka.bootstrap.servers"))//kafka服务器地址
                .setTopics(topic)//消费的topic
                .setGroupId(groupId)//消费者组id
                .setStartingOffsets(OffsetsInitializer.latest())//设置从何处开始读取数据
                .setValueOnlyDeserializer(new DeserializationSchema<String>() {
                    @Override
                    public String deserialize(byte[] bytes) throws IOException {
                        if (bytes != null){
                            return new String(bytes, StandardCharsets.UTF_8);
                        }
                        return null;
                    }

                    @Override
                    public boolean isEndOfStream(String s) {
                        return false;
                    }

                    @Override
                    public TypeInformation<String> getProducedType() {
                        return TypeInformation.of(String.class);
                    }
                })//设置value的反序列化器
                .setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"true")//设置向kafka提交消费数据的偏移量
                .setProperty(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"500")//设置提交的数据间隔
                .setProperty(ConsumerConfig.ISOLATION_LEVEL_CONFIG,"read_committed")//设置分离级别，值读取提交的数据
                .build();
    }

    public static String getKafkaWith(String topic,String groupId,String... format) {
        String formatValue = "json";
        if (format.length != 0) {
            formatValue = format[0];
        }
        return "with (" +
                "   'connector'='kafka'," +
                "   'properties.bootstrap.servers'='" + ConfigUtil.getProperty("kafka.bootstrap.servers.test") + "'," +
                "   'properties.group.id'='" + groupId +  "'," +
//                "   'scan.startup.mode'='latest-offset'," +
                "   'scan.startup.mode'='earliest-offset'," +
                "   'topic'='" + topic + "'," +
                "   'format'='" + formatValue + "'" +
                ")";
    }
}
