package com.xkh.hadoop.controller;

import cn.hutool.core.util.IdUtil;
import com.xkh.hadoop.util.RedisLock;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * @author xkh
 * @since 2021/6/29 0029
 */
@RestController
@RequestMapping("redis/hadoop")
@Api(tags = "redis分布式锁模块")
@Slf4j
public class RedisHadoopController {

    @Autowired
    private RedisLock redisLock;

    int count = 0;

    @GetMapping("/index")
    @ApiOperation("redis分布式锁")
    public void index() throws InterruptedException {
        count = 0;
        int clientCount = 1000;
        CountDownLatch countDownLatch = new CountDownLatch(clientCount);
        ExecutorService executorService = Executors.newFixedThreadPool(clientCount);
        long start = System.currentTimeMillis();
        for(int i=0;i<clientCount;i++){
            executorService.execute(()->{
                ///通过Snowflake算法获取唯一的ID字符串 参数1为终端ID 参数2为数据中心ID
                String id = IdUtil.getSnowflake(1,1).nextIdStr();
                try {
                    redisLock.tryLock(id,id,60*1000);
                    count ++;
                }finally {
                    redisLock.unLock(id,id);
                }
                countDownLatch.countDown();
            });
        }
        countDownLatch.await();
        long end = System.currentTimeMillis();
        Thread.sleep(5000);
        log.info("执行线程数:{},总耗时:{},count数为:{}",clientCount,end-start,count);

    }

}
