package com.benchmark.springboot.controller;

import com.power.common.model.CommonResult;
import com.power.common.util.UUIDUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import java.util.stream.IntStream;

/**
 * ConcurrentHashMap用例
 *
 * @author yu 2020/11/05
 */
@Slf4j
@RestController
@RequestMapping("concurrentHashMap")
public class ConcurrentHashMapController {

    private static final int ITEM_COUNT = 1000;

    private static final int THREAD_COUNT = 10;

    /**
     * 错误用例
     *
     * @return
     * @throws InterruptedException
     * @apiNote 利用多线程并发的往ConcurrentHashMap中填充数据，10个线程每个线程补充10个数据，
     * 最后统计补充后Map中的数据是否为100，putAll这样的聚合方法也不能确保原子性
     */
    @GetMapping("wrong")
    public CommonResult<String> wrong() throws InterruptedException {
        ConcurrentHashMap<String, Long> concurrentHashMap = getData(ITEM_COUNT - 100);
        log.info("init size:{}", concurrentHashMap.size());
        ForkJoinPool forkJoinPool = new ForkJoinPool(THREAD_COUNT);
        forkJoinPool.execute(() -> IntStream.rangeClosed(1, 10).parallel().forEach(i -> {
            // 查询需要补充多少元素
            int gap = ITEM_COUNT - concurrentHashMap.size();
            log.info("gap size:{}", gap);
            //补充元素
            concurrentHashMap.putAll(getData(gap));
        }));
        // 等待所有任务完成
        forkJoinPool.shutdown();
        forkJoinPool.awaitTermination(1, TimeUnit.HOURS);

        log.info("finish size:{}", concurrentHashMap.size());
        return CommonResult.ok().setResult("finish size:" + concurrentHashMap.size());
    }

    /**
     * 正确用例
     *
     * @return
     * @throws InterruptedException
     * @apiNote 利用多线程并发的往ConcurrentHashMap中填充数据，10个线程每个线程补充10个数据，
     * 采用synchronized对ConcurrentHashMap加锁，防止putAll这样方法在并发下出现问题。
     */
    @GetMapping("right")
    public CommonResult<String> right() throws InterruptedException {
        ConcurrentHashMap<String, Long> concurrentHashMap = getData(ITEM_COUNT - 100);
        log.info("init size:{}", concurrentHashMap.size());
        ForkJoinPool forkJoinPool = new ForkJoinPool(THREAD_COUNT);
        forkJoinPool.execute(() -> IntStream.rangeClosed(1, 10).parallel().forEach(i -> {
            synchronized (concurrentHashMap) {
                // 查询需要补充多少元素
                int gap = ITEM_COUNT - concurrentHashMap.size();
                log.info("gap size:{}", gap);
                //补充元素
                concurrentHashMap.putAll(getData(gap));
            }
        }));
        // 等待所有任务完成
        forkJoinPool.shutdown();
        forkJoinPool.awaitTermination(1, TimeUnit.HOURS);

        log.info("finish size:{}", concurrentHashMap.size());
        return CommonResult.ok().setResult("finish size:" + concurrentHashMap.size());
    }


    private ConcurrentHashMap<String, Long> getData(int size) {
        ConcurrentHashMap<String, Long> map = new ConcurrentHashMap<>();
        for (int i = 0; i < size; i++) {
            map.put(UUIDUtil.getUuid32(), Long.valueOf(i));
        }
        return map;
    }
}
