package com.ghp.admin.job.cycle;

import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.ghp.admin.service.InterfaceInfoService;
import com.ghp.common.model.entity.InterfaceInfo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.redis.core.HashOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import static com.ghp.common.constants.RedisConstants.INTERFACE_PATH_METHOD_KEY;

/**
 * 增量同步数据到Redis
 *
 * @author ghp
 */
@Component
@Slf4j
public class IncrementalSynchronizationDataToRedis {

    @Resource
    private InterfaceInfoService interfaceInfoService;

    @Resource
    private RedisTemplate redisTemplate;

    /**
     * 每3分钟执行一次
     */
    @Scheduled(fixedRate = 3 * 60 * 1000)
    public void run() {
        log.info("=============================Start:开始同步用户接口关系数据=============================");
        // 查询近 3 分钟内的数据
        List<InterfaceInfo> interfaceInfoList = interfaceInfoService.list(new LambdaQueryWrapper<InterfaceInfo>()
                .ge(InterfaceInfo::getUpdateTime, LocalDateTime.now().minusMinutes(3)));
        // 批量写入Redis
        Map<String, Long> interfaceMap = interfaceInfoList.stream().collect(Collectors.toMap(
                interfaceInfo -> interfaceInfo.getUrl() + '-' + interfaceInfo.getMethod(),
                InterfaceInfo::getId
        ));
        HashOperations<String, String, Long> hashOps = redisTemplate.opsForHash();
        hashOps.putAll(INTERFACE_PATH_METHOD_KEY, interfaceMap);
        log.info("=============================End:成功同步用户接口关系数据=============================");
    }
}
