package com.hk.controller;

import com.hk.service.business.ConcurrencyBusiness;
import com.hk.utils.R;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.StopWatch;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.*;
import java.util.concurrent.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.LongStream;

/**
 * 并发控制器
 * @author: dpb
 * @date: 2022/11/25
 */
@RestController
@RequestMapping("current")
@Slf4j
public class ConcurrencyController {

    //线程数量
    private static int THREAD_COUNT = 10;

    //总数
    private static int ITEM_COUNT = 1000;

    //循环次数
    private static int LOOP_COUNT = 10;

    private static int THREAD_COUNT_2 = 10;

    private static int ITEM_COUNT_2 = 10;


    private static StopWatch stopWatch = new StopWatch();

    @Autowired
    private ConcurrencyBusiness concurrencyBusiness;



    private ConcurrentHashMap<String, Long> getData(int count) {
        return LongStream.rangeClosed(1,count)
                .boxed()
                .collect(Collectors.toConcurrentMap(i -> UUID.randomUUID().toString(), Function.identity(),
                        (o1,o2) -> o1, ConcurrentHashMap::new));
    }

    @GetMapping("wrong")
    public String wrong() throws InterruptedException {
        ConcurrentHashMap<String, Long> concurrentHashMap = getData(ITEM_COUNT - 100);
        //初始化900个元素
        log.info("init size:{}", concurrentHashMap.size());

        ForkJoinPool forkJoinPool = new ForkJoinPool(THREAD_COUNT);
        forkJoinPool.execute(() -> IntStream.rangeClosed(1,10).parallel().forEach(i ->{
            int gap = ITEM_COUNT - concurrentHashMap.size();
            log.info("gap size:{}", gap);
            //补充元素
            concurrentHashMap.putAll(getData(gap));
        }));

        //等待所有任务完成
        forkJoinPool.shutdown();
        forkJoinPool.awaitTermination(1, TimeUnit.HOURS);

        //最后元素个数会是1000嘛
        log.info("finish size:{}", concurrentHashMap.size());

        return "OK";
    }

    @GetMapping("right")
    public String right() throws InterruptedException {
        StopWatch stopWatch = new StopWatch();
        ConcurrentHashMap<String, Long> concurrentHashMap = getData(ITEM_COUNT - 100);
        //初始化900个元素
        log.info("init size:{}", concurrentHashMap.size());

        ForkJoinPool forkJoinPool = new ForkJoinPool(THREAD_COUNT);
        forkJoinPool.execute(() -> IntStream.rangeClosed(1,10).parallel().forEach(i ->{
            //加对象锁
            synchronized(concurrentHashMap) {
                int gap = ITEM_COUNT - concurrentHashMap.size();
                log.info("gap size:{}", gap);
                //补充元素
                concurrentHashMap.putAll(getData(gap));
            }
        }));

        log.info(stopWatch.prettyPrint());
        //等待所有任务完成
        forkJoinPool.shutdown();
        forkJoinPool.awaitTermination(1, TimeUnit.HOURS);

        //最后元素个数会是1000嘛
        log.info("finish size:{}", concurrentHashMap.size());
        return "OK";
    }

    /**
     * 统计Map中key出现的次数
     * @return
     * @throws InterruptedException
     */
    @GetMapping("/countKey")
    public R countKey() throws InterruptedException {
        return R.ok();
    }

    @GetMapping("/good")
    public R goodUse() throws Exception {
        StopWatch stopWatch = new StopWatch();
        stopWatch.start("normaluse");
        Map<String, Long> normaluses = concurrencyBusiness.normaluse();
        stopWatch.stop();

//        Assert.isTrue(normaluses.size() == ITEM_COUNT_2, "normaluse size error");
//        Assert.isTrue(normaluses.entrySet().stream()
//                .mapToLong(item -> item.getValue()).reduce(0, Long::sum) == LOOP_COUNT
//                , "normaluses count error");


        stopWatch.start("goodUse");
        Map<String, Long> goodUse = concurrencyBusiness.goodUse();
        stopWatch.stop();
//        Assert.isTrue(goodUse.entrySet().stream()
//                .mapToLong(item -> item.getValue()) .reduce(0, Long::sum) == LOOP_COUNT
//                , "gooduse count error");
        log.info(stopWatch.prettyPrint());
        return R.ok();
    }

    private void addAll(List<Integer> list) {
        list.addAll(IntStream.rangeClosed(1,1000000).boxed().collect(Collectors.toList()));
    }


    /**
     * 测试并发写的性能
     * @return
     */
    @GetMapping("/write")
    public Map testWrite() {
        CopyOnWriteArrayList<Integer> copyOnWriteArrayList = new CopyOnWriteArrayList<>();
        List<Integer> synchronizedList = Collections.synchronizedList(new ArrayList<>());
        StopWatch stopWatch = new StopWatch();

        int loopCount = 100000;
        stopWatch.start("write:opyOnWriteArrayList");
        IntStream.rangeClosed(1, loopCount).parallel().forEach(__ -> copyOnWriteArrayList.add(ThreadLocalRandom.current().nextInt(loopCount)));
        stopWatch.stop();

        stopWatch.start("write:synchronizedList");
        IntStream.rangeClosed(1,loopCount).parallel().forEach(__ -> synchronizedList.add(ThreadLocalRandom.current().nextInt(loopCount)));
        stopWatch.stop();
        log.info(stopWatch.prettyPrint());
        Map result = new HashMap();
        result.put("copyOnWriteArrayList", copyOnWriteArrayList.size());
        result.put("synchronizedList", synchronizedList.size());
        /**
         * ---------------------------------------------
         * ns         %     Task name
         * ---------------------------------------------
         * 3574213900  099%  write:opyOnWriteArrayList
         * 023648200  001%  write:synchronizedList
         */
        return result;
    }

    /**
     * 测试并发读的性能
     */
    @GetMapping("/read")
    public Map testRead(){
        CopyOnWriteArrayList<Integer> copyOnWriteArrayList = new CopyOnWriteArrayList<>();
        List<Integer> synchronizedList = Collections.synchronizedList(new ArrayList<>());

        //填充数据
        addAll(copyOnWriteArrayList);
        addAll(synchronizedList);

        StopWatch stopWatch = new StopWatch();
        int loopCount = 1000000;
        int count = copyOnWriteArrayList.size();
        stopWatch.start("Read:copyOnWriteArrayList");
        IntStream.rangeClosed(1, loopCount).parallel().forEach(__ -> copyOnWriteArrayList.get(ThreadLocalRandom.current().nextInt(count)));
        stopWatch.stop();
        stopWatch.start("Read:synchronizedList");
        IntStream.rangeClosed(0, loopCount).parallel().forEach(__ -> synchronizedList.get(ThreadLocalRandom.current().nextInt(count)));
        stopWatch.stop();
        log.info(stopWatch.prettyPrint());
        HashMap map = new HashMap();
        map.put("copyOnWriteArrayList", copyOnWriteArrayList.size());
        map.put("synchronizedList", synchronizedList.size());

        /**
         * 运行结果
         * ---------------------------------------------
         * ns         %     Task name
         * ---------------------------------------------
         * 028599300  010%  Read:copyOnWriteArrayList
         * 253200200  090%  Read:synchronizedList
         */
        return map;
    }
}
