package com.arong.concurrentHashMap;

import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

import static java.util.concurrent.ConcurrentHashMap.MIN_TRANSFER_STRIDE;
import static java.util.concurrent.Phaser.NCPU;

/**
 * 1.7 版本和 1.8 版本之间的不同
 * 1.7版本：分段锁，分成一个一个的 segment，该集合的并发度为 segment 数组的大小，不可扩容，并发度一旦产生便确定了下来
 * 1.8版本：运用数组+链表+红黑树的方式进行存储，最大的特点是可扩容，并且锁的细粒度更低，加在了节点上
 * extends AbstractMap<K,V>
 *     implements ConcurrentMap<K,V>, Serializable
 */
public class ArongConHashMap<K,V> {

    // 最大容量
    private static final int MAXIMUM_CAPACITY = 1 << 30;

    // 默认容量
    private static final int DEFAULT_CAPACITY = 16;

    // 最大数组容量
    static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;

    // 默认并发度
    private static final int DEFAULT_CONCURRENCY_LEVEL = 16;

    // 负载因子
    private static final float LOAD_FACTOR = 0.75f;

    // 树化指数
    static final int TREEIFY_THRESHOLD = 8;

    // 树变成链表指数
    static final int UNTREEIFY_THRESHOLD = 6;

    // 数组
    transient volatile Node<K,V>[] table;

    // 元素个数
    private transient volatile int size;

    static class Node<K, V> implements Map.Entry<K, V> {

        final int hash;
        final K key;
        volatile V val;
        volatile Node<K,V> next;

        public Node(int hash, K key, V val, Node<K, V> next) {
            this.hash = hash;
            this.key = key;
            this.val = val;
            this.next = next;
        }

        @Override
        public K getKey() {
            return this.key;
        }

        @Override
        public V getValue() {
            return this.val;
        }

        @Override
        public V setValue(V value) {
            return this.val = value;
        }

        public final boolean equals(Object o) {
            Object k, v, u; Map.Entry<?,?> e;
            return ((o instanceof Map.Entry) && // 数据类型判断
                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
                    (v = e.getValue()) != null && // 非空判断
                    (k == key || k.equals(key)) &&
                    (v == (u = val) || v.equals(u))); // 键值都必须相等
        }



        Node<K,V> find(int h, Object k) {
            Node<K,V> e = this;
            if(k != null) {
                do {
                    K ek;
                    // 哈希值相等且值相等
                    if(e.hash == h &&
                            ((ek = e.key) == k || (ek != null && k.equals(ek)))) {
                        return e;
                    }
                } while((e = e.next) != null);
            }
            // 找不到返回空
            return null;
        }
        static final class TreeBin<K,V> extends Node<K,V> {

            public TreeBin(int hash, K key, V val, Node<K, V> next) {
                super(hash, key, val, next);
            }

            public Node<K, V> putTreeVal(int hash, K key, V value) {
                return null;
            }
        }
    }

    /**
     * 将参数转换成最近的 2 ^ n 值
     * @param c
     * @return
     */
    private static final int tableSizeFor(int c) {
        int n = c - 1;
        n |= n >>> 1;
        n |= n >>> 2;
        n |= n >>> 4;
        n |= n >>> 8;
        n |= n >>> 16;
        return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
    }

    // 标识性
    static final int MOVED     = -1; // hash for forwarding nodes
    static final int TREEBIN   = -2; // hash for roots of trees
    static final int RESERVED  = -3; // hash for transient reservations
    static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash

    /**
     *  计算哈希值
     * @param h
     * @return
     */
    static final int spread(int h) {
        // 将hash值的低16位图片与高16位进行异或计算，而高16位保持不变。
        // HASH_BITS=0x7fffffff，将hash值的符号位置为0，其它位不变，确保hash值非负。
        return (h ^ (h >>> 16)) & HASH_BITS;
    }


    // 构造方法

    public ArongConHashMap() {
    }

    public ArongConHashMap(int initialCapacity) {
        // 初始化容量校验
        if(initialCapacity < 0) {
            throw new IllegalArgumentException();
        }
        // 计算桶的容量
        int cap = ((initialCapacity >= MAXIMUM_CAPACITY >>> 1)) ?
                MAXIMUM_CAPACITY :
                tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1);
        this.size = cap;
    }

    /**
     * 查找元素
     * @param key
     * @return
     */
    public V get(Object key) {
        Node<K,V>[] tab; Node<K,V> e, p; int n, eh; K ek;
        // 计算哈希值
        int h = spread(key.hashCode());
        // 确定hash表是否初始化，key所在的桶是否有元素，如果都有则进入if
        if ((tab = table) != null && (n = tab.length) > 0 &&
                (e = tabAt(tab, (n - 1) & h)) != null) {
            // 判断链表头节点是否就是要找的节点
            if ((eh = e.hash) == h) {
                if ((ek = e.key) == key || (ek != null && key.equals(ek)))
                    return e.val;
            }
            // hash值为负数与扩容有关，等讲了扩容再回来说明，现在先忽略这个分支
            else if (eh < 0)
                return (p = e.find(h, key)) != null ? p.val : null;
            // 头节点不是要找的节点，那就沿着链表一路寻找
            while ((e = e.next) != null) {
                if (e.hash == h &&
                        ((ek = e.key) == key || (ek != null && key.equals(ek))))
                    return e.val;
            }
        }
        // 没找到相同key的节点
        return null;
    }

    public V put(K key, V value) {
        return putVal(key, value, false);
    }

    final V putVal(K key, V value, boolean onlyIfAbsent) {
        // 非空校验
        if (key == null || value == null) throw new NullPointerException();
        // 求哈希值
        int hash = spread(key.hashCode());
        int binCount = 0;
        // 核心逻辑 加入值
        for (Node<K,V>[] tab = table;;) {
            Node<K,V> f; int n, i, fh;
            // 懒加载策略，第一次初始化才创建数组
            if (tab == null || (n = tab.length) == 0)
                tab = initTable();
                // key所在的桶没有一个元素，那就直接将当前key作为头节点，put操作完成
            else if ((f = tabAt(tab, i = (n - 1) & hash)) == null) {
                // CAS设置头节点，因为可能有其它线程也在设置该头节点
                if (casTabAt(tab, i, null,
                        new Node<K,V>(hash, key, value, null)))
                    break;                   // no lock when adding to empty bin
            }
            // 表示正在扩容
            else if ((fh = f.hash) == MOVED)
                tab = helpTransfer(tab, f);
            else { // 正式遍历逻辑
                V oldVal = null;
                // 开始遍历，锁住头节点，此时这个桶无法put其它节点了
                synchronized (f) {
                    if (tabAt(tab, i) == f) {
                        // 头节点的hash值大于0表示这是一个正经的节点
                        if (fh >= 0) {
                            // 记录当前桶中的节点数量，如果超过8，就要转换为红黑树
                            binCount = 1;
                            // 开始遍历
                            for (Node<K,V> e = f;; ++binCount) {
                                K ek;
                                if (e.hash == hash &&
                                        ((ek = e.key) == key ||
                                                (ek != null && key.equals(ek)))) {
                                    // 发现map中存在key了，就根据情况进行覆盖，如果是put方法进来的，onlyIfAbsent=false
                                    oldVal = e.val;
                                    if (!onlyIfAbsent)
                                        e.val = value;
                                    break;
                                }
                                // 找到最后一个节点还没有找到重复的key，则表示当前key是全新的，那就将节点加入链表末尾
                                Node<K,V> pred = e;
                                if ((e = e.next) == null) {
                                    pred.next = new Node<K,V>(hash, key,
                                            value, null);
                                    break;
                                }
                            }
                        }
                        // 如果头节点是树节点，则使用红黑树的查询方式
                        else if (f instanceof Node.TreeBin) {
                            Node<K,V> p;
                            binCount = 2;
                            if ((p = ((Node.TreeBin<K,V>)f).putTreeVal(hash, key,
                                    value)) != null) {
                                oldVal = p.val;
                                if (!onlyIfAbsent)
                                    p.val = value;
                            }
                        }
                    }
                }
                if (binCount != 0) {
                    // 节点数超过8就变成红黑树
                    if (binCount >= TREEIFY_THRESHOLD)
                        treeifyBin(tab, i);
                    if (oldVal != null)
                        return oldVal;
                    break;
                }
            }
        }
        addCount(1L, binCount);
        return null;
    }

    private void addCount(long l, int binCount) {
        CounterCell[] as; long b, s;
        // counterCells 记录每个桶中元素的个数，这个分支主要是 counterCells 的维护工作。
        if ((as = counterCells) != null ||
                !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
            CounterCell a; long v; int m;
            boolean uncontended = true;
            if (as == null || (m = as.length - 1) < 0 ||
                    (a = as[ThreadLocalRandom.getProbe() & m]) == null ||
                    !(uncontended =
                            U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
                fullAddCount(x, uncontended);
                return;
            }
            if (check <= 1)
                return;
            // map 中节点总数
            s = sumCount();
        }
        // 桶中的节点数大于0，表示可能需要扩容
        if (check >= 0) {
            Node<K,V>[] tab, nt; int n, sc;
            // 当前map中的节点数超过了sizeCtl=容量*装载因子，并且能够扩容
            while (s >= (long)(sc = size) && (tab = table) != null &&
                    (n = tab.length) < MAXIMUM_CAPACITY) {
                // rs 没看懂是什么，这里我们关注扩容逻辑就行
                int rs = resizeStamp(n);
                if (sc < 0) {
                    if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
                            sc == rs + MAX_RESIZERS || (nt = nextTable) == null ||
                            transferIndex <= 0)
                        break;
                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1))
                        transfer(tab, nt);
                }
                else if (U.compareAndSwapInt(this, SIZECTL, sc,
                        (rs << RESIZE_STAMP_SHIFT) + 2))
                    transfer(tab, null);
                s = sumCount();
            }
        }
    }

    private void treeifyBin(Node<K, V>[] tab, int i) {
        // 树化
    }

    private final void transfer(Node<K,V>[] tab, Node<K,V>[] nextTab) {
        int n = tab.length, stride;
        // NCPU 表示当前机器的 CPU 核心数，计算扩容的布长，一个线程负责一个布长范围内的桶的迁移工作，布长最小为 16
        if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
            stride = MIN_TRANSFER_STRIDE; // subdivide range
        // 创建扩容后的新数组
        if (nextTab == null) {            // initiating
            try {
                // 创建原数组两倍大小的新数组
                Node<K, V>[] nt = (Node<K, V>[]) new Node<?, ?>[n << 1];
                nextTab = nt;
                // 需要注意 OOM 的问题
            } catch (Throwable ex) {
                sizeCtl = Integer.MAX_VALUE;
                return;
            }
            nextTable = nextTab;
            // 确认开始迁移的桶的下标
            transferIndex = n;
        }
        int nextn = nextTab.length;
        // 留在旧数组的哨兵节点，其hash值为-1
        ForwardingNode<K, V> fwd = new ForwardingNode<K, V>(nextTab);
            // 扩容核心逻辑
            boolean advance = true;
            boolean finishing = false; // to ensure sweep before committing nextTab}
            for (int i = 0, bound = 0;;) {
                Node<K,V> f; int fh;
                // 先看最后一个else if
                while (advance) {
                    int nextIndex, nextBound;
                    // 当前桶迁移完成，开始迁移下一个桶
                    if (--i >= bound || finishing)
                        advance = false;
                        // 说明迁移完成了
                    else if ((nextIndex = transferIndex) <= 0) {
                        i = -1;
                        advance = false;
                    }
                    // 第一次进while循环通常从这里开始，前面那个if是给变量赋值nextIndex = transferIndex
                    // 迁移的下界是bound，上界为nextIndex - 1
                    // 其中transferIndex是全局变量，通过CAS操作，就能为每个线程分配到不同的迁移段
                    else if (U.compareAndSwapInt
                            (this, TRANSFERINDEX, nextIndex,
                                    nextBound = (nextIndex > stride ?
                                            nextIndex - stride : 0))) {
                        bound = nextBound;
                        i = nextIndex - 1;
                        advance = false;
                    }
                }
                // 后续步骤
                    // 所有桶都迁移完成，将新map的信息赋值到当前map
                    if (i < 0 || i >= n || i + n >= nextn) {
                        int sc;
                        if (finishing) {
                            nextTable = null;
                            table = nextTab;
                            size = (n << 1) - (n >>> 1);
                            return;
                        }
                        if (U.compareAndSwapInt(this, SIZECTL, sc = size, sc - 1)) {
                            if ((sc - 2) != resizeStamp(n) << RESIZE_STAMP_SHIFT)
                                return;
                            finishing = advance = true;
                            i = n; // recheck before commit
                        }
                        // 如果下标为i的桶完成迁移，就在原位置增加一个哨兵节点
                    } else if ((f = tabAt(tab, i)) == null)
                        advance = casTabAt(tab, i, null, fwd);
                        // 如果下标为i的桶已经设置了哨兵节点，那就溜了
                    else if ((fh = f.hash) == MOVED)
                        advance = true; // already processed
                 else {
                    // 锁住旧桶的头节点
                    synchronized (f) {
                        // 再次校验
                        if (tabAt(tab, i) == f) {
                            Node<K,V> ln, hn;
                            // 头节点的hash值大于0，表示是数据节点，还未开始迁移
                            if (fh >= 0) {
                                // 这块机制比较复杂，不在注释里面讲了
                                int runBit = fh & n;
                                Node<K,V> lastRun = f;
                                // lastRun 机制选出最后hash值相同的链表的头节点
                                for (Node<K,V> p = f.next; p != null; p = p.next) {
                                    int b = p.hash & n;
                                    if (b != runBit) {
                                        runBit = b;
                                        lastRun = p;
                                    }
                                }
                                // 根据最高位比特为1还是0，决定放入新数组的高位还是低位
                                if (runBit == 0) {
                                    ln = lastRun;
                                    hn = null;
                                }
                                else {
                                    hn = lastRun;
                                    ln = null;
                                }
                                // 迁移剩余的节点，0入低位，1入高位
                                for (Node<K,V> p = f; p != lastRun; p = p.next) {
                                    int ph = p.hash; K pk = p.key; V pv = p.val;
                                    if ((ph & n) == 0)
                                        ln = new Node<K,V>(ph, pk, pv, ln);
                                    else
                                        hn = new Node<K,V>(ph, pk, pv, hn);
                                }
                                setTabAt(nextTab, i, ln);
                                setTabAt(nextTab, i + n, hn);
                                setTabAt(tab, i, fwd);
                                advance = true;
                            }
                            // 如果是红黑树
                            else if (f instanceof Node.TreeBin) {
                                // 处理逻辑与链表相同，不过多展示
                            }
                        }
                    }

                }
            }
        }


    final Node<K,V>[] helpTransfer(Node<K,V>[] tab, Node<K,V> f) {
        Node<K,V>[] nextTab; int sc;
        if (tab != null && (f instanceof ForwardingNode) &&
                (nextTab = ((ForwardingNode<K,V>)f).nextTable) != null) {
            int rs = resizeStamp(tab.length);
            // 反复确认当前处于扩容阶段
            while (nextTab == nextTable && table == tab &&
                    (sc = size) < 0) {
                if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
                        sc == rs + MAX_RESIZERS || transferIndex <= 0)
                    break;
                // 加入扩容工作
                if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1)) {
                    transfer(tab, nextTab);
                    break;
                }
            }
            return nextTab;
        }
        return table;
    }

    private final Node<K,V>[] initTable() {
        Node<K,V>[] tab; int sc;
        while ((tab = table) == null || tab.length == 0) {
            // 表示其它线程正在初始化数组，当前线程就不用管了
            if ((sc = size) < 0)
                Thread.yield(); // lost initialization race; just spin
                // CAS操作，将sizeCtl设为-1，表示当前线程正在初始化数组
            else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
                try {
                    // 双检锁
                    if ((tab = table) == null || tab.length == 0) {
                        int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
                        @SuppressWarnings("unchecked")
                        Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
                        table = tab = nt;
                        // sc = 0.75n，0.75就是默认的装填因子，map中的总元素数量达到该值就会触发扩容
                        sc = n - (n >>> 2);
                    }
                } finally {
                    size = sc;
                }
                break;
            }
        }
        return tab;
    }

    


    // 用了 Unsafe 类，保证了线程的并发度
    @sun.misc.Contended static final class CounterCell {
        volatile long value;
        CounterCell(long x) { value = x; }
    }

    // 判断桶中是否有元素
    static final <K,V> Node<K,V> tabAt(Node<K,V>[] tab, int i) {
        return (Node<K,V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
    }

    // cas 判断桶中是否有元素
    static final <K,V> boolean casTabAt(Node<K,V>[] tab, int i,
                                        Node<K,V> c, Node<K,V> v) {
        return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
    }

    static final <K,V> void setTabAt(Node<K,V>[] tab, int i, Node<K,V> v) {
        U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
    }

    private static final sun.misc.Unsafe U;
    private static final long SIZECTL;
    private static final long TRANSFERINDEX;
    private static final long BASECOUNT;
    private static final long CELLSBUSY;
    private static final long CELLVALUE;
    private static final long ABASE;
    private static final int ASHIFT;

    static {
        try {
            U = sun.misc.Unsafe.getUnsafe();
            Class<?> k = ArongConHashMap.class;
            SIZECTL = U.objectFieldOffset
                    (k.getDeclaredField("sizeCtl"));
            TRANSFERINDEX = U.objectFieldOffset
                    (k.getDeclaredField("transferIndex"));
            BASECOUNT = U.objectFieldOffset
                    (k.getDeclaredField("baseCount"));
            CELLSBUSY = U.objectFieldOffset
                    (k.getDeclaredField("cellsBusy"));
            Class<?> ck = CounterCell.class;
            CELLVALUE = U.objectFieldOffset
                    (ck.getDeclaredField("value"));
            Class<?> ak = Node[].class;
            ABASE = U.arrayBaseOffset(ak);
            int scale = U.arrayIndexScale(ak);
            if ((scale & (scale - 1)) != 0)
                throw new Error("data type scale not a power of two");
            ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
        } catch (Exception e) {
            throw new Error(e);
        }
    }
}

