package com.tantou.java.study;

//import com.sun.org.slf4j.internal.Logger;
//import com.sun.org.slf4j.internal.LoggerFactory;
//
//import java.util.Map;
//import java.util.UUID;
//import java.util.concurrent.ConcurrentHashMap;
//import java.util.concurrent.ForkJoinPool;
//import java.util.concurrent.ThreadLocalRandom;
//import java.util.concurrent.TimeUnit;
//import java.util.function.Function;
//import java.util.stream.Collectors;
//import java.util.stream.IntStream;
//import java.util.stream.LongStream;

public class ThreadLocalTest {

    private static final ThreadLocal currentUser = ThreadLocal.withInitial(() -> null);

//    public static void main(String[] args) {
//        String before = Thread.currentThread().getName() + ":" + currentUser.get();
//        currentUser.set(10251);
//
//        String after = Thread.currentThread().getName() + ":" + currentUser.get();
//
//        System.out.println("before:" + before + "---after:" + after);
//
//    }


//    private static int THREAD_COUNT = 10;
//    //总元素数量
//    private static int ITEM_COUNT = 1000;
//
//    //帮助方法，用来获得一个指定元素数量模拟数据的ConcurrentHashMap
//    private static ConcurrentHashMap getData(int count) {
//        return LongStream.rangeClosed(1, count)
//                .boxed()
//                .collect(Collectors.toConcurrentMap(i -> UUID.randomUUID().toString(), Function.identity(),
//                        (o1, o2) -> o1, ConcurrentHashMap::new));
//    }
//
//    private static final Logger log = LoggerFactory.getLogger(ThreadLocalTest.class);
//
//    public static void main(String[] args) throws InterruptedException {
//        ConcurrentHashMap concurrentHashMap = getData(ITEM_COUNT - 100);
//        //初始900个元素
//        System.out.println("init size:"+ concurrentHashMap.size());
//
//        ForkJoinPool forkJoinPool = new ForkJoinPool(THREAD_COUNT);
//        //使用线程池并发处理逻辑
//        forkJoinPool.execute(() -> IntStream.rangeClosed(1, 10).parallel().forEach(i -> {
//            //查询还需要补充多少个元素
//            synchronized(concurrentHashMap){
//                int gap = ITEM_COUNT - concurrentHashMap.size();
//                System.out.println("gap size:{}"+gap);
//                //补充元素
//                concurrentHashMap.putAll(getData(gap));
//            }
//
//        }));
//        //等待所有任务完成
//        forkJoinPool.shutdown();
//        forkJoinPool.awaitTermination(1, TimeUnit.HOURS);
//        //最后元素个数会是1000吗？
//        System.out.println("finish size:{}"+ concurrentHashMap.size());
//    }



    //循环次数
    private static int LOOP_COUNT = 10000000;
    //线程数量
    private static int THREAD_COUNT = 10;
    //元素数量
    private static int ITEM_COUNT = 10;
//    public static Map normaluse() throws InterruptedException {
//        ConcurrentHashMap freqs = new ConcurrentHashMap<>(ITEM_COUNT);
//        ForkJoinPool forkJoinPool = new ForkJoinPool(THREAD_COUNT);
//        forkJoinPool.execute(() -> IntStream.rangeClosed(1, LOOP_COUNT).parallel().forEach(i -> {
//                    //获得一个随机的Key
//                    String key = "item" + ThreadLocalRandom.current().nextInt(ITEM_COUNT);
//                    synchronized (freqs) {
//                        if (freqs.containsKey(key)) {
//                            //Key存在则+1
//                            freqs.put(key, freqs.get(key) + 1);
//                        } else {
//                            //Key不存在则初始化为1
//                            freqs.put(key, 1L);
//                        }
//                    }
//                }
//        ));
//        forkJoinPool.shutdown();
//        forkJoinPool.awaitTermination(1, TimeUnit.HOURS);
//        return freqs;
//    }
//
//    public static void main(String[] args) throws InterruptedException {
//        ConcurrentHashMap freqs = (ConcurrentHashMap) normaluse();
//        System.out.println(freqs.size());
//
//    }
}
