package com.dfd.consumer.task;

import cn.hutool.core.collection.CollectionUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.BoundHashOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * 用来处理Redis中消息的定时任务
 * @Author: Fengdong.Duan
 * @Date: 2023/2/13 9:36
 */
@Slf4j
@Component
public class ConsumerTask {
    @Autowired
    RedisTemplate<Object, Object> redisTemplate;

    @Autowired
    ThreadPoolExecutor executor;

    @Scheduled(cron = "0 0/1 * * * ?")
    public void dealMsg() {
        BoundHashOperations ops = redisTemplate.boundHashOps("rocketmq-log");
        //获取所有的KEY
        Set<String> allKeys = ops.keys();
        if (CollectionUtil.isEmpty(allKeys)) {
            log.info("没有数据需要处理");
            return;
        }
        //取1000个处理
        List<String> sub = CollectionUtil.sub(allKeys, 0, 1000);
        //使用多线程处理
        //分割集合
        List<List<String>> split = CollectionUtil.split(sub, 100);
        AtomicInteger ai = new AtomicInteger();
        CountDownLatch countDownLatch = new CountDownLatch(split.size());
        for (List<String> list : split) {
            executor.execute(() -> {
                for (String key : list) {
                    log.info("业务处理完成，业务ID：{}", key);
                    ai.addAndGet(1);
                }
                countDownLatch.countDown();
            });
        }
        try {
            countDownLatch.await();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        String[] array = sub.toArray(new String[0]);
        ops.delete(array);
        log.info("一共处理了" + ai.get() + "条数据");
    }
}
