public class HashBuck {
    static class Node {
        public int key;
        public int val;

        public Node next;

        public Node(int key,int val) {
            this.key = key;
            this.val = val;
        }
    }
    public Node[] array;
    public int usedSize;
    public HashBuck() {
        array = new Node[10];
    }
    public void put(int key,int val) {

        int index = key%array.length;

        Node cur = array[index];

        //检查重复值
        while (cur != null) {
            if (cur.key == key) {
                //Set中无需更新值，但是哈希表会更新val
                cur.val = val;
                return;
            }
            cur = cur.next;
        }
        Node a = new Node(key, val);
        a.next = array[index];
        array[index] = a;
        usedSize++;

        //扩容机制
        if(loadFactor() >= 0.75) {
            resize();
        }
    }
    public double loadFactor() {
        // 固定公式，一般以0.75作为临界
        return usedSize*1.0 / array.length;
    }
    public void resize() {

        Node[] tmp =new Node[array.length*2];
        //!!!关键点，重新哈希（由于扩容，数组长度改变因此数据额位置改变）
        for (int i = 0; i < array.length; i++) {
            Node cur = array[i];
            //由于是链表加顺序表结构，因此要两层循环一层遍历链表一层遍历顺序表
            while (cur != null) {
                Node curNext = cur.next;
                int index = cur.key%tmp.length;

                //注意头插逻辑，易错易忘
                cur.next = tmp[index];
                tmp[index] = cur;
                cur = curNext;
            }
        }
        array = tmp;
    }
    public int get(int key) {
        int index = key % array.length;
        Node cur = array[index];
        while (cur != null) {
            if(cur.key == key) {
                return cur.val;
            }
            cur = cur.next;
        }
        return -1;
    }

}