package com.example.producer;

import com.example.constant.RedisConstant;
import com.example.util.redis.RedisUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.RecordId;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.StreamOperations;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.HashMap;
import java.util.Map;
import java.util.UUID;

/**
 * @author LiYuhang
 * @version 0.1
 * @application
 * @Date 2021/6/2 09:57
 * @Description TODO
 */
@Component
@Slf4j
public class CleanStreamJob {

    @Autowired
    private RedisTemplate redisTemplate;


    /**
     * 定时任务。
     */
    @Scheduled(cron = "0/5 * * * * ?")
    @Async(value = "forkJoinPool")
    public void reportCurrentTime() {
        // 定时的清理stream中的数据，保留3000条
        StreamOperations<String, String, String> streamOperations = RedisUtil.getStringStringStringStreamOperations(redisTemplate);

        Long size = streamOperations.size(RedisConstant.COMMENT_KEY);
        log.info("key:{},size:{}",RedisConstant.COMMENT_KEY,size);
        if(size > 3000){
            Long trim = streamOperations.trim(RedisConstant.COMMENT_KEY,3000L);
            log.warn("key:{}, size:{},trim:{}",RedisConstant.COMMENT_KEY,size,trim);
        }

    }




}
