package cn.gwm.flink.streaming.sink.kafka;

import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.kafka.clients.consumer.ConsumerRecord;

/**
 * @Author: liangguang
 * @Date: 2022-09-01
 * @Description:
 */
@Slf4j
public class StringDeserializationSchema implements KafkaDeserializationSchema<String> {
    private final String encoding = "UTF8";
    private final boolean includeTopic;
    private final boolean includeTimestamp;

    public StringDeserializationSchema(boolean includeTopic, boolean includeTimestamp) {
        this.includeTopic = includeTopic;
        this.includeTimestamp = includeTimestamp;
    }

    @Override
    public boolean isEndOfStream(String jason) {
        return false;
    }

    @Override
    public String deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
        if (consumerRecord != null) {
            try {
                String value = new String(consumerRecord.value(), encoding);
                StringBuilder sb = new StringBuilder();
                if (includeTopic) {
                    String topic = consumerRecord.topic();
                    sb.append(topic).append("\t");
                }
                if (includeTimestamp) {
                    long timestamp = consumerRecord.timestamp();
                    sb.append(timestamp).append("\t");
                }
                return sb.toString() + value;
            } catch (Exception e) {
                log.error("deserialize failed : " + e.getMessage());
            }
        }
        return null;
    }

    @Override
    public TypeInformation<String> getProducedType() {
        return TypeInformation.of(String.class);
    }
}
