/*
 *  Copyright 2013 National Institute of Advanced Industrial Science and Technology
 *  
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *  
 *      http://www.apache.org/licenses/LICENSE-2.0
 *  
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */

package org.sss.client;

import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sss.mapreduce.Configuration;
import org.sss.mapreduce.GroupID;
import org.sss.mapreduce.SssException;
import org.sss.mapreduce.StorageNode;
import org.sss.mapreduce.Tuple;
import org.sss.mapreduce.TupleGroupScaner;
import org.sss.mapreduce.datatype.Packable;

/**
 * Getter of data in SSS servers.
 */
public class DataGetter<K extends Packable, V extends Packable> implements Iterable<Tuple<K, V>>, Closeable {
  private static Logger logger = LoggerFactory.getLogger(DataGetter.class);
  private boolean done = false;
  private boolean closed = false;
  private final Thread scanThread;
  private Throwable exception = null;
  private final long queueLimitNBytes;
  private final long cacheLimitNBytes;
  private long queueNBytes = 0;
  private Queue<Cache> queue = new LinkedList<Cache>();
  private ThreadLocal<Cache> cache = new ThreadLocal<Cache>();;
  private Map<Thread, Cache> cacheMap = new HashMap<Thread, Cache>();

  /**
   * Create <code>DataGetter</code> object which can access to all the
   * storage nodes that <code>SssClient</code> knows.
   *
   * @param   client     the <code>SssClient</code>
   * @param   keyClass   Class of key type.
   * @param   valueClass Class of value type.
   * @param   groupID    <code>GroupID</code> to retrieve the tuples
   * @throws  SssException
   * @see     org.sss.client.SssClient
   */
  public static <K extends Packable, V extends Packable>
  DataGetter<K, V> create(SssClient client,
                          Class<K> keyClass,
                          Class<V> valueClass,
                          GroupID groupID) throws SssException {
    return new DataGetter<K, V>(client, keyClass, valueClass, groupID, client.getClusterManager().getStorageNodes());
  }

  /**
   * Create <code>DataGetter</code> object which can access to
   * the given storage nodes.
   *
   * @param   client       the <code>SssClient</code>
   * @param   keyClass     Class of key type.
   * @param   valueClass   Class of value type.
   * @param   groupID      <code>GroupID</code> to retrieve the recoreds
   * @param   storageNodes the list of storage nodes to be accessed.
   * @throws  SssException
   * @see     org.sss.client.SssClient
   */
  public static <K extends Packable, V extends Packable>
  DataGetter<K, V> create(SssClient client,
                          Class<K> keyClass,
                          Class<V> valueClass,
                          GroupID groupID,
                          List<StorageNode> storageNodes) throws SssException {
    return new DataGetter<K, V>(client, keyClass, valueClass, groupID, storageNodes);
  }

  private DataGetter(SssClient client,
                     Class<K> keyClass,
                     Class<V> valueClass,
                     GroupID groupID,
                     List<StorageNode> storageNodes) throws SssException {

    Configuration conf = client.getConfiguration();
    this.cacheLimitNBytes = conf.getInt("data_getter.cache.nbytes", 16 * 1024);
    this.queueLimitNBytes = conf.getInt("data_getter.queue.nbytes", 16 * 16 * 1024);
    final DataScaner<K, V> scaner = new DataScaner<K, V>(client, keyClass, valueClass, groupID, new TupleGroupScaner<K, V>() {
      @Override
      public void set(K key, V value) throws SssException {
        put(key, value);
      }
    }, storageNodes);
    this.scanThread = new Thread(new Runnable() {
      public void run() { scan(scaner); }
    });
    scanThread.start();
  }

  private void scan(DataScaner<K, V> scaner) {
    try {
      scaner.scan();
      putEndMark();
    }
    catch (Throwable e) {
      synchronized (this) {
        if (exception == null) {
          exception = e;
          notifyAll();
        }
      }
    }
    finally {
      try {
        scaner.close();
      }
      catch (IOException e) {
        logger.error("", e);
      }
    }
  }

  /**
   * Retrieve all the tuples from SSS storage servers.
   *
   * @return an <tt>Iterator</tt> to access <code>Tuple</code> objects
   */
  @Override
  public Iterator<Tuple<K, V>> iterator() {
    return new TupleIterator();
  }

  /**
   * Close this <code>DataGetter</code>.
   *
   * This function releases resources to get data.
   */
  @Override
  public void close() {
    synchronized(this) {
      closed = true;
      notifyAll();
    }
    try {
      scanThread.join();
    }
    catch (InterruptedException e) {
    }
  }

  private void put(K key, V value) throws SssException {
    Cache c = cache.get();
    if (c == null) {
      c = createCache();
    }
    c.add(new Tuple<K, V>(key, value));
    if (c.isFull()) {
      flushCache(c, false);
    }
  }

  private synchronized Cache createCache() {
    Cache c = new Cache();
    cacheMap.put(Thread.currentThread(), c);
    cache.set(c);
    return c;
  }

  private synchronized void flushCache(Cache c, boolean last) throws SssException {
    while (queueNBytes >= queueLimitNBytes && exception == null && !closed) {
      try {
        wait();
      }
      catch (InterruptedException e) {
      }
    }
    if (exception != null) {
      throw new SssException(exception);
    }
    if (closed) {
      ClosedException e = new ClosedException();
      exception = e;
      throw e;
    }
    queueNBytes += c.nbytes;
    queue.add(c);
    if (!last) {
      createCache();
    }
    notifyAll();
  }

  private synchronized void putEndMark() throws SssException {
    for (Cache c: cacheMap.values()) {
      flushCache(c, true);
    }
    done = true;
    notifyAll();
  }

  private synchronized Iterator<Tuple<K, V>> getFromQueue() {
    while (queue.isEmpty() && exception == null && !done) {
      try {
        wait();
      }
      catch (InterruptedException e) {
      }
    }
    if (exception != null) {
      throw new RuntimeException(exception);
    }
    if (queue.isEmpty()) {
      assert done;
      return null;
    }
    Cache c = queue.poll();
    assert queueNBytes >= c.nbytes;
    queueNBytes -= c.nbytes;
    notifyAll();
    return c.data.iterator();
  }

  private class Cache {
    public List<Tuple<K, V>> data = new ArrayList<Tuple<K, V>>();
    public long nbytes = 0;

    public void add(Tuple<K, V> tuple) {
      data.add(tuple);
      nbytes += tuple.getRoughSize();
    }

    public boolean isFull() {
      return nbytes >= cacheLimitNBytes;
    }
  };

  private class TupleIterator implements Iterator<Tuple<K, V>> {
    private Iterator<Tuple<K, V>> it;

    public TupleIterator() {
      this.it = getFromQueue();
    }

    @Override
    public boolean hasNext() {
      while (it != null && !it.hasNext()) {
        it = getFromQueue();
      }
      return it != null;
    }

    @Override
    public Tuple<K, V> next() {
      return it.next();
    }

    @Override
    public void remove() {
      throw new UnsupportedOperationException();
    }
  };

  @SuppressWarnings("serial")
  /**
   * Exception expressing that DataGetter already has been closed.
   */
  public static class ClosedException extends SssException {
    public ClosedException() { super("closed"); }
  }
}
