package com.atguigu.gmall.realtime.common.util;

import com.atguigu.gmall.realtime.common.constant.Constant;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;

import java.io.IOException;
import java.nio.charset.StandardCharsets;

/**
 * @author yhm
 * @create 2023-11-24 15:02
 */
public class FlinkSourceUtil {
    public static KafkaSource<String> getKafkaSource(String topicName ,String groupId ){

        return KafkaSource.<String>builder()
                .setBootstrapServers(Constant.KAFKA_BROKERS)
                .setTopics(topicName)
                // 不能使用SimpleStringSchema  -> 无法解析null
                // 只能自定义解决null问题
                .setValueOnlyDeserializer(new DeserializationSchema<String>() {
                    @Override
                    public String deserialize(byte[] message) throws IOException {
                        if (message == null || message.length == 0){
                            return "";
                        }
                        return new String(message, StandardCharsets.UTF_8);
                    }

                    @Override
                    public boolean isEndOfStream(String nextElement) {
                        return false;
                    }

                    @Override
                    public TypeInformation<String> getProducedType() {
                        return BasicTypeInfo.STRING_TYPE_INFO;
                    }
                })
                // 实时数仓不需要处理过期的业务历史数据 -> 生成中使用latest
                .setStartingOffsets(OffsetsInitializer.earliest())
                // 不能出现常量
                .setGroupId(groupId)
                .build();
    }
}
