package com.fanxuankai.kafka.connect.sink.redis.consumer;

import org.apache.kafka.connect.json.JsonConverter;
import org.apache.kafka.connect.sink.SinkRecord;
import org.apache.kafka.connect.storage.Converter;
import org.springframework.data.redis.core.RedisTemplate;

import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Collections;

/**
 * @author fanxuankai
 */
public class JsonSinkRecordConsumer implements SinkRecordConsumer {
    private final RedisTemplate<String, Object> redisTemplate;
    private static final Converter JSON_CONVERTER;

    public JsonSinkRecordConsumer(RedisTemplate<String, Object> redisTemplate) {
        this.redisTemplate = redisTemplate;
    }

    static {
        JSON_CONVERTER = new JsonConverter();
        JSON_CONVERTER.configure(Collections.singletonMap("schemas.enable", "false"), false);
    }

    @Override
    public void accept(Collection<SinkRecord> sinkRecords) {
        for (SinkRecord sinkRecord : sinkRecords) {
            if (sinkRecord.value() == null) {
                redisTemplate.opsForHash()
                        .delete(sinkRecord.topic(), sinkRecord.key()
                                .toString());
            } else {
                byte[] rawJsonPayload = JSON_CONVERTER.fromConnectData(sinkRecord.topic(), sinkRecord.valueSchema(),
                        sinkRecord.value());
                String json = new String(rawJsonPayload, StandardCharsets.UTF_8);
                redisTemplate.opsForHash()
                        .put(sinkRecord.topic(), sinkRecord.key()
                                .toString(), json);
            }
        }
    }
}
