public class HashBuck{
    static class Node{
        public int key;
        public int val;
        public Node next;

       public Node(int key,int val){
           this.key = key;
           this.val = val;
       }
    }
    public Node[] array;
    public int usedSize;
    public HashBuck(){
        array = new Node[10];
    }
    public void put(int key,int val){
        int index = key %array.length;
        Node cur = array[index];
        //先遍历一遍整体的链表，是否存在当前key
        while (cur != null){
            if (cur.key == key){
                cur.val = val;
                return;
            }
            cur = cur.next;
        }
        //没有这个key
        Node node = new Node(key,val);
        node.next = array[index];
        array[index] = node;
        usedSize++;
        if (loadFactor()>= 0.75){
            //负载因子过大，扩大链表长度
            resize();
        }
    }
    public void resize(){
        Node[] tmpArr = new Node[array.length*2];
        //遍历原来的数组，将所有的元素重新哈希到新的数组当中
        for (int i = 0; i < array.length; i++) {
            Node cur = array[i];
            while (cur != null){
                //记录当前节点的下个节点
                Node curNext = cur.next;
                int newIndex= cur.key%tmpArr.length;
                //头插
                cur.next = tmpArr[newIndex];
                tmpArr[newIndex] = cur;
                cur = cur.next;
            }
        }
        array = tmpArr;
    }
    public double loadFactor(){
        return usedSize*1.0/array.length;
    }
}
