/*
 *  Copyright 2013 National Institute of Advanced Industrial Science and Technology
 *  
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *  
 *      http://www.apache.org/licenses/LICENSE-2.0
 *  
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */

package org.sss.server;

import static org.sss.util.Util.orElse;
import static org.sss.common.Utils.loadPackableClass;
import static org.sss.common.Utils.loadPartitionerClass;
import static org.sss.common.Utils.loadReducerClass;
import static org.sss.common.Utils.newInstance;

import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;

import org.slf4j.Logger;
import org.sss.common.Bucket;
import org.sss.common.CanceledException;
import org.sss.common.CommonStatus;
import org.sss.common.DataReceiver;
import org.sss.common.ErrorDataReceiverException;
import org.sss.common.Inlet;
import org.sss.common.SentinelThreadPool;
import org.sss.common.io.DataReader;
import org.sss.common.io.IOManager;
import org.sss.mapreduce.Configuration;
import org.sss.mapreduce.Decoder;
import org.sss.mapreduce.GroupID;
import org.sss.mapreduce.Partitioner;
import org.sss.mapreduce.Reducer;
import org.sss.mapreduce.SssException;
import org.sss.mapreduce.StorageNode;
import org.sss.mapreduce.datatype.Packable;
import org.sss.util.CallbackInterface;
import org.sss.util.FactoryThreadLocal;

public abstract class Task {
  private static Logger logger = SssServer.getLogger(Task.class);
  private static TaskTable taskTable = new TaskTable();

  private final TaskSet taskSet;
  private final SssServerResource sssResource;
  private final StorageNode storageNode;
  private final Configuration conf;
  private final GroupID inputGID;
  private final OutletManager outletManager = new OutletManager();
  private final List<DataReader> dataReaders = new ArrayList<DataReader>();
  private ThreadLocal<Decoder> keyDecoder = null;
  private ThreadLocal<Decoder> valueDecoder = null;
  private boolean isCanceled = false;
  private final UUID id;
  private final AtomicInteger numActiveOutlets = new AtomicInteger(0);

  public Task(TaskSet taskSet,
              UUID jobID,
              SssServerResource resource,
              Configuration conf,
              StorageNode node,
              Input input) throws SssException {
    this.taskSet = taskSet;
    this.id = jobID;
    this.sssResource = resource;
    this.storageNode = node;
    this.conf = conf;
    this.inputGID = input.GID;
    this.keyDecoder = loadThreadLocalDecoder(input.keyClass);
    this.valueDecoder = loadThreadLocalDecoder(input.valueClass);
    taskTable.register(this);
  }
  public TaskSet getTaskSet() {
    return taskSet;
  }
  public UUID getId() {
    return id;
  }
  public Decoder getKeyDecoder() {
    return keyDecoder.get();
  }
  public Decoder getValueDecoder() {
    return valueDecoder.get();
  }
  public StorageNode getStorageNode() {
    return storageNode;
  }
  public Configuration getConfiguration() {
    return conf;
  }
  public GroupID getInputGID() {
    return inputGID;
  }
  public SssServerConfiguration getSssServerConfiguration() {
    return sssResource.getServerConfiguration();
  }
  public SentinelThreadPool getIOTPRead() {
    return sssResource.getIOTPRead();
  }
  public SentinelThreadPool getIOTPWrite() {
    return sssResource.getIOTPWrite();
  }
  public SentinelThreadPool getWKTP() {
    return sssResource.getWKTP();
  }
  public IOManager getIOManager() {
    return sssResource.getIOManager();
  }
  public SssServerResource getSssServerResource() {
    return sssResource;
  }
  public List<DataReader> getDataReaders() {
    return dataReaders;
  }

  public synchronized void renew() throws SssException {
    this.dataReaders.clear();
    outletManager.renew();
  }

  protected synchronized void submitReadRequest(DataReader r) {
    dataReaders.add(r);
    getIOTPRead().execute(r);
  }

  protected synchronized void checkCanceled() throws SssException {
    if (isCanceled) {
      throw new CanceledException("Task is canceled.");
    }
  }
  protected OutletManager getOutlets() {
    return outletManager;
  }

  /**
   * Cancel this task.
   */
  public synchronized void cancel() {
    if (!isCanceled) {
      logger.error("cancel Task \"{}\".", getId());
      disturbSleepers();
      isCanceled = true;
      for (DataReader r: dataReaders) {
        r.cancel();
      }
    }
  }

  /**
   * .
   * @return set error to output.
   */
  protected void handleExceptionInHandle(Throwable e, String message) {
    if ((e instanceof InvocationTargetException) && e.getCause() != null) {
      e = e.getCause();
    }
    if (e instanceof ErrorDataReceiverException) {
      // Do nothing
    }
    else {
      getOutlets().setError(e);
      if (!(e instanceof CanceledException)) {
        logger.error(message, e);
        cancel();
      }
    }
  }

  /**
   * This is called when writing to all outlets is completed.
   */
  public void ioWriteComplete(Throwable e) {
    throw new UnsupportedOperationException("Does not override ioWriteComplete().");
  }

  /**
   * for input param
   */
  public static class Input {
    public final GroupID GID;
    public final Class<? extends Packable> keyClass;
    public final Class<? extends Packable> valueClass;

    public Input(GroupID GID, Class<? extends Packable> k, Class<? extends Packable> v) {
      this.GID = GID;
      this.keyClass = k;
      this.valueClass = v;
    }

    public Input(GroupID GID) {
      this(GID, null, null);
    }

    public Input(Configuration conf, Inlet inlet) throws SssException {
      this(inlet.GID,
          loadPackableClass(conf.getClassLoader(), inlet.keyClassName),
          loadPackableClass(conf.getClassLoader(), inlet.valueClassName));
    }
  }

  private class CombinerContext extends ContextBase implements Reducer.Context {
    public CombinerContext(ContextImpl impl) {
      super(impl);
    }
  }

  /**
   *.
   * NOTE: not synchronized.
   */
  protected class OutletManager {
    private final List<TaskOutput> outlets = new ArrayList<TaskOutput>();
    public TaskOutput create(GroupID outputGID,
                       Class<? extends Packable> keyClass,
                       Class<? extends Packable> valClass,
                       Class<? extends Reducer> combinerClass,
                       Class<? extends Partitioner> partitionerClass,
                       int combineCacheSize,
                       long multiplicity,
                       long queueLimitNBytes) throws SssException {
      numActiveOutlets.incrementAndGet();
      TaskOutput out = new TaskOutputImpl(outputGID, 
          taskSet.getEncodings().getEncoderFactory(keyClass),
          taskSet.getEncodings().getEncoderFactory(valClass),
          partitionerClass, multiplicity, queueLimitNBytes,
          sssResource, new CallbackInterface<Throwable>() {
            @Override
            public void callback(Throwable e) {
              if (numActiveOutlets.decrementAndGet() == 0 || e != null) {
                ioWriteComplete(e);
              }
            }
          });
      if (combinerClass != null) {
        CombinerContext context = new CombinerContext(
            new ContextImpl(
              getSssServerResource(),
              getStorageNode(), getTaskSet(), conf));
        Reducer combiner = newInstance(combinerClass);
        try {
          combiner.configure(context);
        } catch (Throwable e) {
          throw new SssException("The error occurs in user-defined combiner initialization.", e);
        }
        TaskOutput com = new CombinerOutput(
            combiner, context,
            sssResource.getCombinerTP(), 
            getSssServerConfiguration().combine_queue_limit_nbytes, 
            combineCacheSize, out);
        out = com;
      }
      outlets.add(out);
      return out;
    }
    public TaskOutput create(GroupID outputGID,
                       String keyClass,
                       String valClass,
                       String combinerClass,
                       String partitionerClass,
                       int combineCacheSize,
                       long multiplicity,
                       long queueLimitNBytes) throws SssException {
      ClassLoader cl = conf.getClassLoader();
      return create(outputGID,
          loadPackableClass(cl, keyClass),
          loadPackableClass(cl, valClass),
          loadReducerClass(cl, combinerClass),
          loadPartitionerClass(cl, partitionerClass), 
          combineCacheSize, multiplicity, queueLimitNBytes);
    }
    public List<TaskOutput> get() {
      return outlets;
    }
    public TaskOutput get(int i) {
      return outlets.get(i);
    }
    public synchronized void setError(Throwable e) {
      for (TaskOutput o: outlets) {
        o.setError(e);
      }
    }
    public synchronized void closeAll() throws SssException {
      SssException exp = null;
      for (TaskOutput o: outlets) {
        try {
          o.close();
        } catch (SssException e) {
          if (exp == null) {
            exp = e;
          } else if (exp.getCause() != e.getCause()) {
            logger.error("Ignore error", e);
          }
        }
      }
      if (exp != null) {
        throw exp;
      }
    }

    public void renew() throws SssException {
      for (TaskOutput o: outlets) {
        numActiveOutlets.incrementAndGet();
        o.renew();
      }
    }

    public int size() {
      return outlets.size();
    }

    public List<DataReceiver<Bucket>> getTLPutters(int cacheSize) throws SssException {
      List<DataReceiver<Bucket>> cache = new ArrayList<DataReceiver<Bucket>>();
      for (TaskOutput o: outlets) {
        cache.add(o.getTLPutter(cacheSize));
      }
      return cache;
    }
  }

  /**
   * Get task list from UUID.
   */
  public static List<Task> get(UUID id) {
    return taskTable.get(id);
  }

  private static class TaskTable {
    private final List<WeakReference<Task>> taskList = new LinkedList<WeakReference<Task>>();

    public synchronized void register(Task t) {
      for (Iterator<WeakReference<Task>> it = taskList.iterator();it.hasNext();) {
        Task task = it.next().get();
        if (task == null) {
          it.remove();
        }
        assert task != t;
      }
      taskList.add(new WeakReference<Task>(t));
    }

    public synchronized List<Task> get(UUID id) {
      List<Task> ret = new ArrayList<Task>();
      for (Iterator<WeakReference<Task>> it = taskList.iterator();it.hasNext();) {
        Task task = it.next().get();
        if (task != null) {
          if (task.id.equals(id)) {
            ret.add(task);
          }
        } else {
          it.remove();
        }
      }
      return ret;
    }
  }

  private final Set<Thread> sleepers = new HashSet<Thread>();
  public synchronized void goToSleep() {
    sleepers.add(Thread.currentThread());
  }
  public synchronized void comeOutOfSleep() {
    sleepers.remove(Thread.currentThread());
  }
  public synchronized void disturbSleepers() {
    for (Thread t: sleepers) {
      t.interrupt();
    }
    sleepers.clear();
  }

  // Utility
  public ThreadLocal<Decoder> loadThreadLocalDecoder(Configuration conf, String className) throws SssException {
    if (className == null) {
      return null;
    }
    return loadThreadLocalDecoder(loadPackableClass(conf.getClassLoader(), className));
  }

  public ThreadLocal<Decoder> loadThreadLocalDecoder(Class<? extends Packable> klass) throws SssException {
    ThreadLocal<Decoder> ret = null;
    if (klass != null) {
      ret = new FactoryThreadLocal<Decoder>(taskSet.getEncodings().getDecoderFactory(klass));
    }
    return ret;
  }

  protected static Object[] createArguments(Object context, int nInputs, List<DataReceiver<Bucket>> cache) {
    Object[] args = new Object[1 + nInputs + cache.size()];
    args[0] = context;
    for (int i = 0;i < cache.size(); ++i) {
      args[1 + nInputs + i] = new Output(cache.get(i));
    }
    return args;
  }

  /**
   * Output implementation.
   *
   * NOTE: This is not thread-safe.
   */
  protected static class Output implements org.sss.mapreduce.Output<Packable, Packable> {
    private final DataReceiver<Bucket> output;

    public Output(DataReceiver<Bucket> output) {
      this.output = output;
    }

    @Override
    public void write(Packable key, Packable value) throws SssException {
      output.put(new Bucket(key, value));
    }
  }

  protected void setValueToStatus(CommonStatus s, Throwable e) {
    if (e == null) {
      try {
        for (DataReader r: getDataReaders()) {
          GroupID gid = r.getSource();
          s.completedSubTaskCount.put(gid, orElse(s.completedSubTaskCount.get(gid), 0L) + 1L);
          s.inputTupleCount.put(gid, orElse(s.inputTupleCount.get(gid), 0L) + r.getInputCount());
          s.inputTupleNBytes.put(gid, orElse(s.inputTupleNBytes.get(gid), 0L) + r.getInputNBytes());
        }

        List<StorageNode> nodes = getSssServerResource().getStorageNodeList();
        for (TaskOutput outlet: getOutlets().get()) {
          s.outputTupleCount.put(
            outlet.getGroupID(),
            ServerUtil.convertLongArrayToStorageNodeKeyMap(nodes, outlet.getOutputCountCopy()));
          s.outputTupleNBytes.put(
            outlet.getGroupID(),
            ServerUtil.convertLongArrayToStorageNodeKeyMap(nodes, outlet.getOutputNBytesCopy()));
        }
      } catch (Throwable ee) {
        s.exception = ee;
      }
    } else {
      s.exception = e;
    }
  }
}
