package xman.com.xmancloudserver.kafka;

import cn.hutool.core.collection.CollectionUtil;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.redisson.api.RLock;
import org.redisson.api.RedissonClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicLong;

/**
 * @Author shu
 * @Date: 2021/10/29/ 19:17
 * @Description 分布式锁解决kafka消费重复的问题
 **/
@Component
public class RedissonKafka {


    //程序执行的初始时间，只会保留一份
    private static final AtomicLong lastRecieveMessage = new AtomicLong(System.currentTimeMillis());
    //时间转换
    private static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    //前缀
    private static final String KEY_PREFIX = "key";
    //缓存
    private ConcurrentLinkedQueue<ConsumerRecord<String, String>> concurrentLinkedQueue = new ConcurrentLinkedQueue<>();
    //json
    private final Gson gson = new GsonBuilder().create();
    //kafka
    @Resource
    private KafkaTemplate<Object, Object> kafkaTemplate;
    //redisson
    @Resource
    private RedissonClient redissonClient;
    @Resource
    private TopicHandler topicHandler;


    /**
     * 消息接受者(每隔30分钟执行)
     */
    @Scheduled(cron = "0/30 * * * * ?")
    public void Consumer() {
        long last = lastRecieveMessage.get();
        long current = System.currentTimeMillis();
        if ((current - last) > (30 * 1000)) {
            //初始化
            redissonClient.getBucket("key").set(1);
            if (CollectionUtil.isEmpty(concurrentLinkedQueue)) {
                return;
            }
            ConsumerRecord<String, String> consumerRecord = concurrentLinkedQueue.poll();
            //获取锁
            RLock lock = redissonClient.getLock(redissonClient.getBucket(consumerRecord.key()).get().toString());
            //上锁
            lock.lock();
            MsgInfo info = gson.fromJson(consumerRecord.value(), MsgInfo.class);
            System.out.println("消息：" + info);
            redissonClient.getBucket(consumerRecord.key()).set(consumerRecord.offset() + 1);
            //解锁
            lock.unlock();
        }
    }


    /**
     * 消息发送者(30s执行一次)
     */
    @Scheduled(cron = "0/1 * * * * ? ")
    public void Provide() {
        long last = lastRecieveMessage.get();
        long current = System.currentTimeMillis();
        if ((current - last) > (1 * 1000)) {
            MsgInfo msgInfo = new MsgInfo(current - last, "消息服务", last, new Date());
            kafkaTemplate.send(topicHandler.getTopic(), "key", gson.toJson(msgInfo));
        }

    }


    /**
     * 单条消费
     *
     * @param record
     * @param ack
     */
    @KafkaListener(topics = "#{topicHandler.getTopic()}", groupId = "MyGroup1")
    public void listenGroup(ConsumerRecord<String, String> record, Acknowledgment ack) {
        concurrentLinkedQueue.offer(record);
        System.out.println("concurrentLinkedQueue size: " + concurrentLinkedQueue.size());
        ack.acknowledge();
    }

}

