package com.group1.realtime.util;

import com.group1.realtime.common.Constant;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.nio.charset.StandardCharsets;
import java.util.Properties;

public class FlinkSourceUtil {

    public static SourceFunction<String> getKafkaSource(String groupId,
                                                        String topic) {
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", Constant.KAFKA_BROKERS);
        props.setProperty("group.id", groupId);
        props.setProperty("isolation.level", "read_committed");

        return new FlinkKafkaConsumer<String>(
                topic,
                new KafkaDeserializationSchema<String>() {
                    // 是否停止消费
                    @Override
                    public boolean isEndOfStream(String s) {
                        return false;
                    }

                    // 反序列化
                    @Override
                    public String deserialize(ConsumerRecord<byte[], byte[]> record) throws Exception {
                        byte[] value = record.value();

                        if (value != null && value.length > 0) {
                            return new String(value, StandardCharsets.UTF_8);
                        }
                        return null;
                    }

                    @Override
                    public TypeInformation<String> getProducedType() {

                        return Types.STRING;
                    }
                },
                props
        );
    }
}
