public class HashBuck {

    static class Node{
        public int key;
        public int val;
        public Node next;
        public Node(int key,int val){
            this.key=key;
            this.val=val;
        }

    }


    public Node[]array;
    public int usedSize;
    public HashBuck() {
        array = new Node[10];
    }
    public void put(int key,int val){
        int index=key%array.length;
        Node cur=array[index];
        while(cur!=null){
            if(cur.key==key){
                cur.val=val;
                return ;
            }
            cur=cur.next;
        }
        Node node=new Node(key,val);

        cur.next=array[index];
        array[index]=node;
        usedSize++;
        if(loadFactor()>=0.75){
            resize();
        }
    }
    private void resize(){
        Node []tmpArr=new Node[array.length*2];
        //需要遍历原来的数组，重新哈希
        for (int i = 0; i < array.length; i++) {
            Node cur=array[i];
            while(cur!=null){
                //记录当前节点的下个节点
                Node curNext=cur.next;
                int newIndex = cur.key% tmpArr.length;
                cur.next=tmpArr[newIndex];
                tmpArr[newIndex]=cur;
                cur= curNext;
            }
        }
        array=tmpArr;
    }
    private double loadFactor(){
        return usedSize*1.0/array.length;
    }
    //哈希map的性能确实很高
    //很快
    //浪费空间来换取时间的过程

}
