public class HashBuck {

    static class Node {
        public int key;
        public int val;
        public Node next;

        public Node(int key, int val) {
            this.key = key;
            this.val = val;
        }
    }

    public Node[] array;
    public int usedSize;

    public HashBuck(){
        array = new Node[10];
    }

    public void put(int key,int val){
        int index = key%array.length;
        Node node = new Node(key,val);
        Node cur = array[index];
        //先遍历一边整体的链表
        while (cur!=null){
            if (cur.key==key){
                return;
            }
            cur=cur.next;
        }
        //没有这个key
        node.next = array[index];
        array[index] = node;
        usedSize++;

        //负载因子
        if (loadFactor()>=0.75){
            //扩大散列表长度
            resize();
        }

        //尾差
//        while (cur.next!=null) {
//            cur = cur.next;
//
//        }
//        cur.next = node;

    }
    /*
    *
    * 扩容需要注意的事项
    *   原来的数重新遍历
    *
    * */

    private void resize() {
        Node[] tmpArr = new Node[array.length*2];
        //遍历原来数组中的元素，重新哈希到新的数组中
        for(int i = 0;i<array.length;i++){
            Node cur = array[i];
            while (cur!=null){
                //记录当前节点的下个节点
                Node curNext = cur.next;
                int newIndex = cur.key%tmpArr.length;
                cur.next = tmpArr[newIndex];
                tmpArr[newIndex] = cur;

                cur = curNext;

            }
        }


        array = tmpArr;
    }


    private double loadFactor(){
        return usedSize*1.0/array.length;
    }


    public int get(int key){
        int index = key%array.length;
        Node cur = array[index];
        while (cur!=null){
            if (cur.key==key){
                return cur.val;
            }
            cur = cur.next;
        }
        return -1;

    }

}
