package com.hou.controller;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.LongStream;

/**
 * @author: hbx
 * @create: 2024-07-30
 **/
@RestController
@RequestMapping("/api")
public class ConcurrentHashMapController {

    private static final Logger log = LoggerFactory.getLogger(ConcurrentHashMapController.class);
    //线程个数
    private static int THREAD_COUNT = 10;
    //元素个数
    private static int ITEM_COUNT = 1000;

    private ConcurrentHashMap<String, Long> getData(int count) {
        return LongStream.rangeClosed(1, count)
                .boxed()
                .collect(Collectors.toConcurrentMap(i -> UUID.randomUUID().toString(), Function.identity(),
                        (o1, o2) -> o1, ConcurrentHashMap::new));
    }

    private ThreadLocal<Integer> currentUser = ThreadLocal.withInitial(() -> null);

    @GetMapping("/hash")
    public int helloword() throws InterruptedException {
        ConcurrentHashMap<String, Long> concurrentHashMap = getData(ITEM_COUNT - 100);
        //初始化900个元素
        log.info("init size:{}", concurrentHashMap.size());
        ForkJoinPool pool = new ForkJoinPool(THREAD_COUNT);

        //使用线程池处理并发逻辑
        pool.execute(() -> IntStream.rangeClosed(1, 10).parallel().forEach(i -> {
            //查询还需要补多少元素
            synchronized (concurrentHashMap) {
                int gap = ITEM_COUNT - concurrentHashMap.size();
                log.info("gap size:{}", gap);
                //补元素
                concurrentHashMap.putAll(getData(gap));
            }
        }));
        //等待所有任务完成
        pool.shutdown();
        pool.awaitTermination(1, TimeUnit.HOURS);
        //查看最终元素个数
        log.info("finish size:{}", concurrentHashMap.size());

        return concurrentHashMap.size();
    }
}
