package com.yq.canalkafka.consumer;

import com.alibaba.otter.canal.client.kafka.protocol.KafkaMessage;
import com.alibaba.otter.canal.protocol.Message;
import com.google.common.collect.Lists;
import com.yq.canalkafka.service.CanalDealService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;

import java.util.ArrayList;
import java.util.List;

/**
 * @author by shizan
 * @Classname KafKaConsumer
 * @Description TODO
 * @Date 2020/11/23 2:03 下午
 */
@Component
@Slf4j
public class KafKaConsumer {
    @Autowired
    private CanalDealService canalDealService;


    @KafkaListener(topics = {"${spring.kafka.consumer.topic}"},
            groupId = "${spring.kafka.consumer.properties.group-id}",
            containerFactory = "batchFactory")
    public void listen(List<ConsumerRecord<String, Message>> records, Acknowledgment ack) {
        List<KafkaMessage> messages = transformMessage(records);
        try {
            if (!CollectionUtils.isEmpty(messages)) {
                for (KafkaMessage message : messages) {
                    long batchId = message.getId();
                    int size = message.getEntries().size();
                    if (batchId == -1 || size == 0) {

                        continue;
                    }
//                CanalKafkaUtils.printEntry(message.getEntries());
                    canalDealService.printEntries(message);
                }
            }
        } catch (Exception e) {
            log.error("kafka消息消费失败", e);
        } finally {
            //直接提交offset  https://blog.csdn.net/qq330983778/article/details/105937689/
            ack.acknowledge();
        }


    }

    /**
     * 将原生kafka消息转换为 KafKaMessage
     *
     * @param records
     * @return
     */
    private List<KafkaMessage> transformMessage(List<ConsumerRecord<String, Message>> records) {
        if (!records.isEmpty()) {
            List<KafkaMessage> messages = new ArrayList<>();
            for (ConsumerRecord<String, Message> record : records) {
                KafkaMessage message = new KafkaMessage(record.value(), record.offset());
                String key = record.key();
                long offset = record.offset();
                int partition = record.partition();
                String topic = record.topic();
                long timestamp = record.timestamp();
//                log.info("key->{},offset->{},partition->{},topic->{},timestamp->{}", key, offset, partition, topic, timestamp);

                messages.add(message);
            }
            return messages;
        }
        return Lists.newArrayList();
    }

}
