package cn.com.itg.function.process;

import cn.com.itg.pojo.kafka.KafkaConsumerRecord;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.Headers;

import java.io.Serializable;
import java.util.HashMap;

public class PaperKafkaSchema implements KafkaDeserializationSchema<KafkaConsumerRecord>, Serializable {
    @Override
    public boolean isEndOfStream(KafkaConsumerRecord o) {
        return false;
    }

    @Override
    public KafkaConsumerRecord deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
        Headers headers = consumerRecord.headers();
        HashMap<String, String> headerMap = new HashMap<>();
        for (Header header : headers) {
            headerMap.put(header.key(),new String(header.value()));
        }
        byte[] key1 = consumerRecord.key();
        byte[] value1 = consumerRecord.value();
        String key = key1==null?null:new String(key1);
        String value = new String(value1);
        return new KafkaConsumerRecord(key, value, consumerRecord.timestamp(), headerMap);
    }

    @Override
    public TypeInformation<KafkaConsumerRecord> getProducedType() {
        return TypeInformation.of(KafkaConsumerRecord.class);
    }
}
