package com.vincent.service;

import com.vincent.util.PartitionOffsetContainer;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.KafkaAdmin;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.AcknowledgingConsumerAwareMessageListener;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.config.ContainerProperties;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Service;

import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;

/**
 * Created by vincent on 2018/8/3.
 */
@Service
public class KafkaService {
    private static Logger logger = LoggerFactory.getLogger(KafkaService.class);
    @Autowired
    private KafkaTemplate kafkaTemplate;
    @Autowired
    private ConsumerFactory consumerFactory;
    @Autowired
    private KafkaAdmin admin;


    public void send(String topic, String key, String value) {
        kafkaTemplate.send(topic, key, value);
    }

    public void createTopic(String topic) throws ExecutionException, InterruptedException {
        AdminClient client = AdminClient.create(admin.getConfig());
        NewTopic newTopic = new NewTopic(topic, 1, (short) 1);
        CreateTopicsResult result = client.createTopics(Collections.singletonList(newTopic));
        Map<String, KafkaFuture<Void>> values = result.values();
        KafkaFuture<Void> voidKafkaFuture = values.get(topic);
        voidKafkaFuture.get();
    }

    public void receive(String topic, String groupId) {
        ContainerProperties containerProperties = new ContainerProperties(topic);
        containerProperties.setGroupId(groupId);
        containerProperties.setMessageListener(new AcknowledgingConsumerAwareMessageListener<String, String>() {
            @Override
            public void onMessage(ConsumerRecord<String, String> data, Acknowledgment acknowledgment, Consumer<?, ?> consumer) {
                try {
                    logger.info("key-" + data.key() + " value-" + data.value() + " offset=" + data.offset());
                    TopicPartition topicPartition = new TopicPartition(data.topic(), data.partition());
                    OffsetAndMetadata offsetAndMetadata = new OffsetAndMetadata(data.offset());
                    Map<TopicPartition, OffsetAndMetadata> map = new HashMap<>();
                    logger.info("test=="+(data.offset() % 3));
                    if ((data.offset() % 3) == 0) {
                        logger.info("不提交offset测试");
                    } else {
                        map.put(topicPartition, offsetAndMetadata);
                    }
                    PartitionOffsetContainer.list.add(map);
                    if (PartitionOffsetContainer.list.size() == 3) {
                        logger.info("commit offset:{}", PartitionOffsetContainer.list.toString());
                        //PartitionOffsetContainer.list.forEach(consumer::commitSync);
                        PartitionOffsetContainer.list.clear();
                    }
                } catch (Exception e) {
                    logger.error("error", e);
                }
            }
        });
        KafkaMessageListenerContainer<String, String> container = new KafkaMessageListenerContainer<String, String>(consumerFactory, containerProperties);
        container.setBeanName("testAuto");
        container.start();
    }

}
