package de.lgohlke.collection;

import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import de.lgohlke.compress.CompressableValue;
import de.lgohlke.concurrent.ThreadPool;

/**
 * <p>ParallelCompressedHashMap class.</p>
 *
 * @author lars
 * @version $Id: $
 */
public class ParallelCompressedHashMap<T1, T2> extends CompressedHashMap<T1, T2>
{
  private final static Logger                   log              = LoggerFactory.getLogger(ParallelCompressedHashMap.class);
  private static final long                     serialVersionUID = 6779165949662087204L;

  private final ThreadPool                      workerPool       = ThreadPool.getInstance();

  private final BlockingQueue<KeyValue<T1, T2>> queue            = new LinkedBlockingQueue<KeyValue<T1, T2>>();

  /**
   * <p>Constructor for ParallelCompressedHashMap.</p>
   *
   * @param initialCapacity a int.
   * @param loadFactor a float.
   * @param <T1> a T1 object.
   * @param <T2> a T2 object.
   */
  public ParallelCompressedHashMap(final int initialCapacity, final float loadFactor)
  {
    this(initialCapacity, loadFactor, new CompressedHashMap<T1, T2>(initialCapacity, loadFactor));
  }

  /**
   * <p>Constructor for ParallelCompressedHashMap.</p>
   *
   * @param initialCapacity a int.
   */
  public ParallelCompressedHashMap(final int initialCapacity)
  {
    this(initialCapacity, DEFAULT_LOAD_FACTOR);
  }

  /**
   * <p>Constructor for ParallelCompressedHashMap.</p>
   */
  public ParallelCompressedHashMap()
  {
    this(DEFAULT_INITIAL_CAPACITY);
  }

  /**
   * <p>Constructor for ParallelCompressedHashMap.</p>
   *
   * @param map a {@link de.lgohlke.collection.CompressedHashMap} object.
   */
  public ParallelCompressedHashMap(final CompressedHashMap<T1, T2> map)
  {
    this(DEFAULT_INITIAL_CAPACITY, map);
  }

  /**
   * <p>Constructor for ParallelCompressedHashMap.</p>
   *
   * @param defaultInitialCapacity a int.
   * @param map a {@link de.lgohlke.collection.CompressedHashMap} object.
   */
  public ParallelCompressedHashMap(final int defaultInitialCapacity, final CompressedHashMap<T1, T2> map)
  {
    this(defaultInitialCapacity, DEFAULT_LOAD_FACTOR, map);
  }

  /**
   * <p>Constructor for ParallelCompressedHashMap.</p>
   *
   * @param initialCapacity a int.
   * @param loadFactor a float.
   * @param map a {@link de.lgohlke.collection.CompressedHashMap} object.
   */
  public ParallelCompressedHashMap(final int initialCapacity, final float loadFactor, final CompressedHashMap<T1, T2> map)
  {
    super(initialCapacity, loadFactor);

    addKeyCompressor(map.getKeyCompressor());
    addValueCompressor(map.getValueCompressor());

    setDataBackend(new ConcurrentHashMap<CompressableValue<T1>, CompressableValue<T2>>(initialCapacity, loadFactor, workerPool.getThreadPoolSize()));
    startWorkerPool();
  }

  private synchronized void stopWorkerPool()
  {
    for (int i = 0; i < workerPool.getThreadPoolSize(); i++)
    {
      queue.add(new KeyValue<T1, T2>(null, null));
    }
    flush();
  }

  private synchronized void startWorkerPool()
  {
    for (int i = 0; i < workerPool.getThreadPoolSize(); i++)
    {
      workerPool.submit(new QueueJob());
    }
  }

  /** {@inheritDoc} */
  @Override
  protected void finalize()
  {
    stopWorkerPool();
    try
    {
      super.finalize();
    }
    catch (Throwable e)
    {
      // ok
    }
  }

  private class KeyValue<T_1, T_2>
  {
    private final T_1 key;
    private final T_2 value;

    public KeyValue(final T_1 compressableKey, final T_2 compressableValue)
    {
      this.key = compressableKey;
      this.value = compressableValue;
    }
  }

  private class QueueJob implements Runnable
  {
    private boolean shouldEnd = false;

    public void run()
    {
      try
      {
        while (!shouldEnd)
        {
          KeyValue<T1, T2> element = queue.take();
          if ((element.key == null) && (element.value == null))
          {
            shouldEnd = true;
          }
          else
          {
            CompressableValue<T1> compressedKey = getKeyCompressor().compress(element.key);
            CompressableValue<T2> compressedValue = getValueCompressor().compress(element.value);

            if (compressedKey.isCompressed())
            {
              getDataBackend().put(compressedKey, compressedValue);
              getDataBackend().remove(new CompressableValue<T1>(element.key, false));
            }
            else
            {
              if (compressedValue.isCompressed())
              {
                getDataBackend().put(compressedKey, compressedValue);
              }
            }
          }
        }
      }
      catch (Exception e)
      {
        log.error(e.getMessage(), e);
      }
    }
  }

  /** {@inheritDoc} */
  @Override
  public T2 put(final T1 key, final T2 value)
  {
    CompressableValue<T1> compressableKey = new CompressableValue<T1>(key, false);
    CompressableValue<T2> compressableValue = new CompressableValue<T2>(value, false);

    CompressableValue<T2> result = getDataBackend().put(compressableKey, compressableValue);

    queue.add(new KeyValue<T1, T2>(key, value));

    return result == null ? (T2) null : result.getValue();
  }

  /** {@inheritDoc} */
  @Override
  public T2 remove(final Object key)
  {
    stopWorkerPool();
    T2 result = super.remove(key);
    startWorkerPool();
    return result;
  }

  /** {@inheritDoc} */
  @Override
  public void clear()
  {
    stopWorkerPool();
    super.clear();
    startWorkerPool();
  }

  /** {@inheritDoc} */
  @Override
  public void printStats()
  {
    stopWorkerPool();
    super.printStats();
    startWorkerPool();
  }

  /**
   * blocking until work queue is empty
   */
  public void flush()
  {
    while (!queue.isEmpty())
    {
      // ok
    }
  }

  /** {@inheritDoc} */
  @Override
  public ParallelCompressedHashMap<T1, T2> configure(final CompressedHashMap<T1, T2> map)
  {
    throw new RuntimeException("not supported operation");
  }
}
