package com.atguigu.realtime.util;

import com.atguigu.realtime.common.Constant;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

/**
 * @Author lzc
 * @Date 2022/5/17 14:05
 */
public class FlinkSourceUtil {
    
    public static SourceFunction<String> getKafkaSource(String groupId, String topic) {
        Properties props = new Properties();
        props.put("bootstrap.servers", Constant.KAFKA_BROKERS);
        props.put("group.id", groupId);
        props.put("auto.reset.offsets", "latest");
        props.put("isolation.level", "read_committed"); // 不读取未提交的数据
        
        return new FlinkKafkaConsumer<String>(
            topic,
            //            new SimpleStringSchema(),
            new DeserializationSchema<String>() {
                // 就是把来的字节数组反序列化成字符串
                @Override
                public String deserialize(byte[] message) throws IOException {
                    return message == null ? null : new String(message, StandardCharsets.UTF_8);
                }
                // 是否是流的结束
                @Override
                public boolean isEndOfStream(String nextElement) {
                    return false;
                }
                // 返回序列化后的类型
                @Override
                public TypeInformation<String> getProducedType() {
                    return Types.STRING;
                }
            },
            props
        );
    }
}
