package ferp.core.ai.tree2;

import java.util.Arrays;

/**
 * User: olegs
 * Date: 1/13/13 11:10 AM
 */
public class HashSet
{
  // node isn't in duplicate hashset
  static final int NO_BUCKET = -1;

  public HashSet()
  {
    buckets = new Node[65536];
  }

  public final void clear()
  {
    Arrays.fill(buckets, null);
  }

  public final void put(Node node)
  {
    int hash = hash(node.hashCode());
    node.bucketIndex = indexFor(hash, buckets.length);

    Node head = buckets[node.bucketIndex];

    if (head != null)
    {
      node.nextInHashset = head;
      head.previousInHashset = node;
    }

    buckets[node.bucketIndex] = node;
  }

  public final Node get(Node key)
  {
    for (Node node = buckets[indexFor(hash(key.hashCode()), buckets.length)]; node != null; node = node.nextInHashset)
      if (node.equals(key))
        return node;

    return null;
  }

  public final void remove(Node node)
  {
    Node head = buckets[node.bucketIndex];
    Node previous = node.previousInHashset;
    Node next = node.nextInHashset;

    if (previous != null)
      previous.nextInHashset = next;

    if (next != null)
      next.previousInHashset = previous;

    if (head == node)
      buckets[node.bucketIndex] = next;

    node.nextInHashset = null;
    node.previousInHashset = null;
    node.bucketIndex = NO_BUCKET;
  }

  private static int hash(int h)
  {
    // This function ensures that hashCodes that differ only by
    // constant multiples at each bit position have a bounded
    // number of collisions (approximately 8 at default load factor).
    h ^= (h >>> 20) ^ (h >>> 12);
    return h ^ (h >>> 7) ^ (h >>> 4);
  }

  private static int indexFor(int h, int length)
  {
    return h & (length-1);
  }

  private Node[] buckets;
}
