/*
 *  Copyright 2013 National Institute of Advanced Industrial Science and Technology
 *  
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *  
 *      http://www.apache.org/licenses/LICENSE-2.0
 *  
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */

package org.sss.client;

import static org.sss.util.Util.orElse;

import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;

import org.slf4j.Logger;
import org.sss.common.CommonStatus;
import org.sss.common.Inlet;
import org.sss.common.Outlet;
import org.sss.common.Reflection;
import org.sss.common.SssServerRemote;
import org.sss.mapreduce.Configuration;
import org.sss.mapreduce.GroupID;
import org.sss.mapreduce.HashPartitioner;
import org.sss.mapreduce.KeyFormat;
import org.sss.mapreduce.Mapper;
import org.sss.mapreduce.Partitioner;
import org.sss.mapreduce.Reducer;
import org.sss.mapreduce.SssException;
import org.sss.mapreduce.StorageNode;
import org.sss.mapreduce.datatype.Packable;
import org.sss.util.BinaryCallable;
import org.sss.util.OptionBox;
import org.sss.util.StopWatch;

/**
 * <code>Job</code> represents a job node in a job tree.
 *
 * {@link Job.Builder} is available to create instance of this class.
 */
public class Job {
  private static Logger logger = SssClient.getLogger(Job.class);
  private static enum Status { initial, done };
  private static class Input {
    public final String                       name;
    public final TupleGroupStatus             tupleGroupStatus;
    public final Class<? extends Packable>    keyClass;
    public final Class<? extends Packable>    valueClass;

    public Input(String                    name,
                 TupleGroupStatus          tupleGroupStatus,
                 Class<? extends Packable> keyClass,
                 Class<? extends Packable> valueClass) {
      assert tupleGroupStatus != null;
      this.name             = name;
      this.tupleGroupStatus = tupleGroupStatus;
      this.keyClass         = keyClass;
      this.valueClass       = valueClass;
    }
    public GroupID getGroupID() {
      return tupleGroupStatus.getGroupID();
    }
  };
  private static class Output {
    public final String                       name;
    public final TupleGroupStatus             tupleGroupStatus;
    public final Class<? extends Reducer>     combiner;
    public final Class<? extends Partitioner> partitioner;
    public final Class<? extends Packable>    keyClass;
    public final Class<? extends Packable>    valueClass;

    public Output(String                       name,
                  TupleGroupStatus             tupleGroupStatus,
                  Class<? extends Reducer>     combiner,
                  Class<? extends Partitioner> partitioner,
                  Class<? extends Packable>    keyClass,
                  Class<? extends Packable>    valueClass) {
      assert tupleGroupStatus != null;
      this.name             = name;
      this.tupleGroupStatus = tupleGroupStatus;
      this.combiner         = combiner;
      this.partitioner      = partitioner;
      this.keyClass         = keyClass;
      this.valueClass       = valueClass;
    }
    public GroupID getGroupID() {
      return tupleGroupStatus.getGroupID();
    }
  };
  private Status status;
  private final UUID jobID;
  private final String name;
  private final Reflection.ProcInfo procInfo;
  private final Class<?> proc;
  private final List<Input> inputs;
  private final List<Output> outputs;
  private final Configuration conf;
  private final JobEngine engine;
  private final ClusterManager clusterManager;
  private final SssClient client;

  private Map<String, Long> counterMap = new HashMap<String, Long>();

  /**
   * Allocate a <code>Job</code> object and initialize it based on
   * the specified <code>Job.Builder</code>'s properties.
   * @throws SssException
   */
  private Job(Builder builder) throws SssException {
    this.status = Status.initial;
    this.jobID  = UUID.randomUUID();
    this.name = builder.name;
    this.procInfo = builder.procInfo;
    this.proc = builder.proc;

    this.inputs = builder.inputs;
    checkInputTypes();

    this.outputs = builder.outputs;
    checkOutputTypes();

    this.conf = builder.conf;
    conf.merge(builder.engine.getConfiguration()); // merge job tree wide configuration

    this.engine = builder.engine;
    this.client = engine.getClient();
    this.clusterManager = engine.getClusterManager();
    this.engine.addJob(this);
  }
  /**
   * Check if input types are valid or not.
   * @throws SssException
   */
  private void checkInputTypes() throws SssException {
    int arity = procInfo.getInputTypes().size();

    if (arity != inputs.size()) {
      throw new SssException("input arity and inlets mismatch");
    }
    switch (procInfo.getType()) {
    case MAP:
      switch (arity) {
      case 2:
        // Second input(driving input of 'merge') must be 'broadcast' type.
        if (!inputs.get(1).getGroupID().isBroadcast()) {
          throw new SssException("second input is not 'broadcast':" + inputs.get(1));
        }
        // intentionaly fall through here
      case 1:
        // First input must not be 'broadcast' type.
        if (inputs.get(0).getGroupID().isBroadcast()) {
          throw new SssException("first input is 'broadcast': " + inputs.get(0));
        }
        break;
      default:
        throw new SssException("too many inlets for map proc");
      }
      break;
    case REDUCE:
      //
      // TBD: Is it necessary to check attribute of GroupIDs for reduce?
      //
      switch (arity) {
      case 2:
        // second input must not be 'broadcast' type
        if (inputs.get(1).getGroupID().isBroadcast()) {
          throw new SssException("second input is 'broadcast': " + inputs.get(1));
        }
        // intentionaly fall through here
      case 1:
        // first input must not be 'broadcast' type
        if (inputs.get(0).getGroupID().isBroadcast()) {
          throw new SssException("first input is 'broadcast': " + inputs.get(0));
        }
        break;
      default:
        throw new SssException("too many inlets for reduce proc");
      }
      for (Input c: inputs) {
        if (c.getGroupID().getKeyFormat() == KeyFormat.ORDER) {
          logger.warn(String.format(
                "Key format of reducer input(%s) is ORDER. "  +
                "In this case, reducer will read all tuples for sort." +
                "Therefore, OutOfMemoryError may occur in remote server.", c.getGroupID()));
        }
      }
      break;
    }
    for (int i = 0; i < arity; i++) {
      Input input = inputs.get(i);
      Reflection.TupleType t = procInfo.getInputTypes().get(i);
      checkTupleGroupType(name, (input.name != null)?input.name:("" + i), "input",
         input.tupleGroupStatus,
         input.keyClass, t.keyClass,
         input.valueClass, t.valueClass);
    }
  }
  /**
   * Check if output types are valid or not.
   * @throws SssException
   */
  private void checkOutputTypes() throws SssException {
    int arity = procInfo.getOutputTypes().size();

    if (arity != outputs.size()) {
      throw new SssException("output arity and outlets mismatch");
    }
    for (int i = 0; i < arity; i++) {
      Output output = outputs.get(i);
      output.tupleGroupStatus.incUpper();
      Class<? extends Packable> keyType = procInfo.getOutputTypes().get(i).keyClass;
      Class<? extends Packable> valueType = procInfo.getOutputTypes().get(i).valueClass;
      String outName = (output.name != null)?output.name:"" + i;

      checkTupleGroupType(name, outName, "output",
          output.tupleGroupStatus,
          output.keyClass,
          keyType, output.valueClass, valueType);

      if (output.combiner != null) {
        Reflection.ProcInfo pi = Reflection.createProcInfo(output.combiner);
        checkTupleGroupType(name, outName, "output and combiner input",
            null,
            output.tupleGroupStatus.getKeyClass(),
            pi.getInputTypes().get(0).keyClass,
            output.tupleGroupStatus.getValueClass(),
            pi.getInputTypes().get(0).valueClass);
        checkTupleGroupType(name, outName, "combiner output",
            null,
            output.tupleGroupStatus.getKeyClass(),
            pi.getOutputTypes().get(0).keyClass,
            output.tupleGroupStatus.getValueClass(),
            pi.getOutputTypes().get(0).valueClass);
      }
    }
  }
  /**
   * Check if given <code>TupleGroup</code> could accept tuples
   * in the form of specified key class, value class pair.
   * @throws SssException
   */
  private static void checkTupleGroupType(String jobName,
                                          String groupName,
                                          String wayName,
                                          TupleGroupStatus c,
                                          Class<? extends Packable> realKeyType,
                                          Class<? extends Packable> keyType,
                                          Class<? extends Packable> realValueType,
                                          Class<? extends Packable> valueType) throws SssException {
    String where = jobName + " " + groupName + " :" + wayName;
    if (realKeyType != null) {
      if (!keyType.isAssignableFrom(realKeyType)) {
        throw new SssException(
            String.format("%s: key types: Key type in proc class \"%s\" is not assignable from \"%s\".",
              where, keyType.getName(), realKeyType.getName()));
      }
      keyType = realKeyType;
    }

    if (realValueType!= null) {
      if (!valueType.isAssignableFrom(realValueType)) {
        throw new SssException(
            String.format("%s: value types: Value type in proc class \"%s\" is not assignable from \"%s\".",
              where, valueType.getName(), realValueType.getName()));
      }
      valueType = realValueType;
    }
    if (c != null) {
      if (c.isTyped()) {
        checkKeyValueType(where, c.getKeyClass(), c.getValueClass(), keyType, valueType);
      }
      else {
        c.setOrCheckTypes(keyType, valueType);
      }
    }
  }
  /**
   * Check if given key types and value types are matched or not.
   * @throws SssException
   */
  private static void checkKeyValueType(String where,
    Class<? extends Packable> keyType1, Class<? extends Packable> valueType1,
    Class<? extends Packable> keyType2, Class<? extends Packable> valueType2)
    throws SssException {
    if (keyType1 != keyType2) {
      throw new SssException
        (String.format("%s: key types mismatch: %s != %s.",
                       where, keyType1.getName(), keyType2.getName()));
    }
    if (valueType1 != valueType2) {
      throw new SssException
        (String.format("%s: key types mismatch: %s != %s.",
                       where, valueType1.getName(), valueType2.getName()));
    }
    logger.debug(where + ": types matched.");
  }
  /**
   * Set this <code>Job</code> object's status
   * @param s status
   */
  private void setStatus(Status s) {
    this.status = s;
  }

  /**
   * Get Job ID.
   */
  public UUID getJobID() {
    return jobID;
  }
  /**
   * Returns the name of this <code>Job</code>.
   *
   * @return the name of this <code>Job</code>
   */
  public String getName() {
    return name;
  }
  /**
   * Returns the <code>Configuration</code> object for this <code>Job</code>.
   *
   * @return the <code>Configuration</code> object for this <code>Job</code>
   */
  public Configuration getConfiguration() {
    return conf;
  }
  /**
   * Returns a <code>String</code> which represents "name(status)" for
   * this <code>Job</code>
   *
   * @return a <code>String</code> "name(status)" for this <code>Job</code>
   */
  @Override
  public String toString() {
    return name + "(" + status.toString() + ")";
  }
  /**
   * Indicates that this <code>Job</code> is runnable or not.
   *
   * NOTE: This method used by only JobEngine.
   *
   * @return <tt>true</tt> if this <code>Job</code> is runnable, otherwise <tt>false</tt>.
   */
  boolean isReadyToGo() {
    boolean ready = status == Status.initial;
    for (Input input: inputs) {
      ready &= input.tupleGroupStatus.hasData();
    }
    return ready;
  }

  private interface Submitter {
    void submit(SssServerRemote server, UUID taskSetID, UUID jobID, StorageNode sn, Configuration conf,
        List<Inlet> inputs, String procClassName, List<Outlet> outputs, JobWaiter.Listener<CommonStatus> listener)
        throws RemoteException;
  }

  /**
   * .
   */
  private void handleCommonStatus(Map<StorageNode, CommonStatus> status) throws SssException {
    final boolean countInputSize = conf.getBoolean("count.input.size", true);
    final Properties execInfo = engine.execInfo;
    final CommonStatus total = new CommonStatus();

    for (CommonStatus c: status.values()) {
      total.accumulate(c, false);
    }

    // Input
    for (int i = 0;i < inputs.size();++i) {
      GroupID id = inputs.get(i).tupleGroupStatus.getGroupID();

      for (Map.Entry<StorageNode, CommonStatus> e: status.entrySet()) {
        StorageNode node = e.getKey();
        CommonStatus s = e.getValue();

        execInfo.setProperty(name("input", i, node, "count"), "" + s.inputTupleCount.get(id));
        if (countInputSize) {
          execInfo.setProperty(name("input", i, node, "size"), "" + s.inputTupleNBytes.get(id));
        }
      }
      execInfo.setProperty(name("input", i, "total", "count"), "" + total.inputTupleCount.get(id));
      if (countInputSize) {
        execInfo.setProperty(name("input", i, "total", "size"), "" + total.inputTupleNBytes.get(id));
      }
    }

    // Output
    for (int i = 0;i < outputs.size();++i) {
      TupleGroupStatus o = outputs.get(i).tupleGroupStatus;
      GroupID id = o.getGroupID();

      /* number of tuples */
      long nOutput = 0L;
      for (Map.Entry<StorageNode, Long> e: total.outputTupleCount.get(id).entrySet()) {
        execInfo.setProperty(name("output", i, e.getKey(), "count"), "" + e.getValue());
        nOutput += e.getValue();
      }
      execInfo.setProperty(name("output", i, "total", "count"), "" + nOutput);

      if (conf.getBoolean("count.output.size", true)) {
        /* number of bytes */
        long nBytes = 0L;
        for (Map.Entry<StorageNode, Long> e: total.outputTupleNBytes.get(id).entrySet()) {
          execInfo.setProperty(name("output", i, e.getKey(), "size"), "" + e.getValue());
          nBytes += e.getValue();
        }
        execInfo.setProperty(name("output", i, "total", "size"), "" + nBytes);
      }
    }

    // Counter
    counterMap = total.counter;
  }
  /**
   */
  private Submitter mapSubmitter = new Submitter() {
    @Override
    public void submit(SssServerRemote server,
                       UUID taskSetID,
                       UUID jobID,
                       StorageNode sn,
                       Configuration conf,
                       List<Inlet> inputs,
                       String procClassName,
                       List<Outlet> outputs,
                       JobWaiter.Listener<CommonStatus> listener) throws RemoteException {
      server.submitMapTask(taskSetID, jobID, sn, conf, inputs, procClassName, outputs, listener);
    }
  };
  /**
   */
  private Submitter mergeMapSubmitter = new Submitter() {
    @Override
    public void submit(SssServerRemote server,
                       UUID taskSetID,
                       UUID jobID,
                       StorageNode sn,
                       Configuration conf,
                       List<Inlet> inputs,
                       String procClassName,
                       List<Outlet> outputs,
                       JobWaiter.Listener<CommonStatus> listener) throws RemoteException {
      server.submitMMapTask(taskSetID, jobID, sn, conf, inputs, procClassName, outputs, listener);
    }
  };
  /**
   */
  private Submitter reduceSubmitter = new Submitter() {
    @Override
    public void submit(SssServerRemote server,
                       UUID taskSetID,
                       UUID jobID,
                       StorageNode sn,
                       Configuration conf,
                       List<Inlet> inputs,
                       String procClassName,
                       List<Outlet> outputs,
                       JobWaiter.Listener<CommonStatus> listener) throws RemoteException {
      server.submitReduceTask(taskSetID, jobID, sn, conf, inputs, procClassName, outputs, listener);
    }
  };
  private Submitter mergeReduceSubmitter = new Submitter() {
    @Override
    public void submit(SssServerRemote server,
                       UUID taskSetID,
                       UUID jobID,
                       StorageNode sn,
                       Configuration conf,
                       List<Inlet> inputs,
                       String procClassName,
                       List<Outlet> outputs,
                       JobWaiter.Listener<CommonStatus> listener) throws RemoteException {
      server.submitMReduceTask(taskSetID, jobID, sn, conf, inputs, procClassName, outputs, listener);
    }
  };

  /**
   * Run this <code>Job</code>.
   */
  private void runProc(final UUID taskSetID, final Submitter submitter) throws SssException, RemoteException, InterruptedException {
    Properties execInfo = engine.execInfo;
    StopWatch watch = StopWatch.start();
    final JobWaiter<CommonStatus> waiter = new JobWaiter<CommonStatus>(client, jobID);
    Map<StorageNode, CommonStatus> status = null;

    final List<Inlet> is = makeInletList();
    final List<Outlet> os = makeOutletList();

    try {
      clusterManager.callEachStorageInParallel(
          new BinaryCallable<Void, SssServerRemote, StorageNode>() {
            @Override
            public Void call(SssServerRemote server, StorageNode sn) throws Exception {
              submitter.submit(server, taskSetID, jobID, sn, conf,
                  is, proc.getName(), os, waiter.newListener(sn));
              return null;
            }
          });
      status = waiter.await();
    }
    finally {
      waiter.dispose();
    }
    for (Map.Entry<StorageNode, CommonStatus> e: status.entrySet()) {
      CommonStatus s = e.getValue();
      if (s.exception != null) {
        // TODO: put in all exception?
        throw new SssException("Exception in " + e.getKey(), s.exception);
      }
    }
    watch.stop();

    execInfo.setProperty(name + ".time", watch.toString());
    for (int i = 0; i < inputs.size(); i++) {
      Input input = inputs.get(i);
      execInfo.setProperty(name("input", i),
          orElse(input.name, "") +
          ((input.name != null)?(input.name + " "):"") + input.tupleGroupStatus);
    }
    setStatus(Status.done);
    for (int i = 0; i < outputs.size(); i++) {
      Output output = outputs.get(i);
      output.tupleGroupStatus.incDone();
      execInfo.setProperty(name("output", i),
          ((output.name != null)?(output.name + " "):"") + output.tupleGroupStatus);
    }

    handleCommonStatus(status);
  }

  private List<Inlet> makeInletList() {
    int inputArity = procInfo.getInputTypes().size();
    List<Inlet> is = new ArrayList<Inlet>();
    for (int i = 0;i < inputArity;++i) {
      Input input = inputs.get(i);
      Reflection.TupleType t = procInfo.getInputTypes().get(i);
      is.add(new Inlet(input.getGroupID(),
                       (input.keyClass != null)?input.keyClass:t.keyClass,
                       (input.valueClass != null)?input.valueClass:t.valueClass));
    }
    return is;
  }

  private List<Outlet> makeOutletList() throws SssException {
    final int outputArity = procInfo.getOutputTypes().size();
    final List<Outlet> os = new ArrayList<Outlet>(outputArity);
    for (int i=0; i<outputArity; i++) {
      Output o = outputs.get(i);
      Reflection.TupleType t = procInfo.getOutputTypes().get(i);
      os.add(new Outlet(o.getGroupID(),
                        (o.keyClass != null)?o.keyClass:t.keyClass,
                        (o.valueClass != null)?o.valueClass:t.valueClass,
                        (o.partitioner != null) ? o.partitioner : HashPartitioner.class,
                        conf.getBoolean("job.force_skip_mode", false) ? null : o.combiner));
    }
    return os;
  }

  /**
   * Entry point to execute this <code>Job</code> on SSS.
   *
   * NOTE: This method used by only JobEngine.
   *
   * @param  taskSetID  task set id
   */
  void go(UUID taskSetID) throws SssException, RemoteException, InterruptedException {
    final int inputArity = inputs.size();
    switch (procInfo.getType()) {
    case MAP:
      switch (inputArity) {
      case 1:
        runProc(taskSetID, mapSubmitter);
        break;
      case 2:
        runProc(taskSetID, mergeMapSubmitter);
        break;
      default: throw new SssException("unsupported input arity");
      }
      break;
    case REDUCE:
      switch (inputArity) {
      case 1:
        runProc(taskSetID, reduceSubmitter);
        break;
      case 2:
        runProc(taskSetID, mergeReduceSubmitter);
        break;
      default: throw new SssException("unsupported input arity");
      }
      break;
    }
  }
  /**
   * Dummy entry point to mimic the execution of this <code>Job</code> on SSS.
   *
   * NOTE: This method used by only JobEngine.
   */
  void go_() {
    logger.info(this + " executing ... done");
    setStatus(Status.done);
    for (Output o: outputs) {
      o.tupleGroupStatus.incDone();
    }
  }

  /**
   * Optional parameter for job input.
   *
   * @see Builder#addInput
   */
  public interface InputOption {}

  /**
   * Optional parameter for job output.
   *
   * @see Builder#addOutput
   */
  public interface OutputOption {}

  /**
   * Wrapper class to specify key class to input/output.
   *
   * {@link Job#keyClass} is available to create instance of this class.
   */
  public static class KeyClassOption extends OptionBox<Class<? extends Packable>, KeyClassOption> implements InputOption, OutputOption {}

  /**
   * Wrapper class to specify value class to input/output.
   *
   * {@link Job#valueClass} is available to create instance of this class.
   */
  public static class ValueClassOption extends OptionBox<Class<? extends Packable>, ValueClassOption> implements InputOption, OutputOption {}

  /**
   * Wrapper class to specify partitioner class to output.
   *
   * {@link Job#partitioner} is available to create instance of this class.
   */
  public static class PartitionerClassOption extends OptionBox<Class<? extends Partitioner>, PartitionerClassOption> implements OutputOption {}

  /**
   * Make key class option for {@link Builder#addInput} or {@link Builder#addOutput}.
   */
  public static KeyClassOption keyClass(Class<? extends Packable> v) { return new KeyClassOption().set(v); }

  /**
   * Make value class option for {@link Builder#addInput} or {@link Builder#addOutput}.
   */
  public static ValueClassOption valueClass(Class<? extends Packable> v) { return new ValueClassOption().set(v); }

  /**
   * Make partitioner class option for {@link Builder#addOutput}.
   */
  public static PartitionerClassOption partitioner(Class<? extends Partitioner> v) { return new PartitionerClassOption().set(v); }

  /**
   * <code>Builder</code> class is used to construct a new {@link Job}.
   */
  public static class Builder {
    private final String name;
    private final Reflection.ProcInfo procInfo;
    private final Class<?> proc;
    private final List<Input> inputs = new ArrayList<Input>();
    private final List<Output> outputs = new ArrayList<Output>();
    private final JobEngine engine;
    private final Configuration conf = new Configuration();
    /**
     * Allocates a <code>Builder</code> object which will create a new <code>Job</code>
     * instance which has the given name, procedure.
     *
     * NOTE: This used by only JobEngine.
     *
     * @param  name     the name for a new <code>Job</code>
     * @param  proc     the class object which implements <tt>map</tt> or <tt>reduce</tt> method
     * @param  engine   an instance of job engine
     */
    Builder(String name, Class<?> proc, JobEngine engine)
      throws SssException {
      this.name = name;
      this.procInfo = Reflection.createProcInfo(proc);
      this.proc = proc;
      this.engine = engine;
    }

    /**
     * Add input {@link GroupID}.
     *
     * @param   input   the {@link GroupID} added to this as input.
     * @param   options options. {@link Job#keyClass}, {@link Job#valueClass} are available.
     * @return  this <code>Builder</code> object
     * @throws SssException
     */
    public Builder addInput(GroupID input, InputOption... options) throws SssException {
      return addInput(null, input, options);
    }
    /**
     * Add input {@link GroupID}.
     *
     * @param   name  name. This used in only log.
     * @param   input the {@link GroupID} added to this as input.
     * @param   options options. {@link Job#keyClass} and {@link Job#valueClass} are available.
     * @return  this <code>Builder</code> object
     * @throws SssException
     */
    public Builder addInput(String name, GroupID input, InputOption... options) throws SssException {
      KeyClassOption   key = new KeyClassOption();
      ValueClassOption value = new ValueClassOption();
      for (InputOption opt: options) {
        OptionBox.setSingleOption("input key class",   key, opt);
        OptionBox.setSingleOption("input value class", value, opt);
      }

      inputs.add(new Input(name, engine.getTupleGroupStatus(input), key.get(), value.get()));
      return this;
    }
    /**
     * Add output {@link GroupID}.
     *
     * @param   name        name. This used in only log.
     * @param   output      the {@link GroupID} added to this as output.
     * @param   combiner    the Class object of combiner.
     *                      This can be specified only when the proc of this job is {@link Mapper},
     *                      otherwise, this must be null.
     *                      If the proc of this job is {@link Mapper} and this is null,
     *                      "combiner" phase of this output is skipped.
     * @param   options     options. {@link Job#keyClass}, {@link Job#valueClass} and {@link Job#partitioner} are available.
     * @return  this <code>Builder</code> object
     * @throws SssException
     */
    public Builder addOutput(String name,
                             GroupID output,
                             Class<? extends Reducer> combiner,
                             OutputOption... options) throws SssException {
      if (combiner != null) {
        Reflection.ProcInfo pi = Reflection.createProcInfo(combiner);
        if (!pi.isAvailableAsCombiner()) {
          throw new SssException(combiner.getName() + " is invalid combiner class");
        }
      }
      KeyClassOption key = new KeyClassOption();
      ValueClassOption value = new ValueClassOption();
      PartitionerClassOption partitioner = new PartitionerClassOption();
      for (OutputOption opt: options) {
        OptionBox.setSingleOption("input key class",   key, opt);
        OptionBox.setSingleOption("input value class", value, opt);
        OptionBox.setSingleOption("partitioner value class", partitioner, opt);
      }
      outputs.add(new Output(name, engine.getTupleGroupStatus(output), combiner, partitioner.get(), key.get(), value.get()));
      return this;
    }
    /**
     * Add output {@link GroupID}.
     *
     * @param   output      the {@link GroupID} added to this as output.
     * @param   combiner    the Class object of combiner.
     *                      This can be specified only when the proc of this job is {@link Mapper},
     *                      otherwise, this must be null.
     *                      If the proc of this job is {@link Mapper} and this is null,
     *                      "combiner" phase of this output is skipped.
     * @param   options     options. {@link Job#keyClass}, {@link Job#valueClass} and {@link Job#partitioner} are available.
     * @return  this <code>Builder</code> object
     * @throws SssException
     */
    public Builder addOutput(GroupID output, Class<? extends Reducer> combiner, OutputOption... options) throws SssException {
      return addOutput(null, output, combiner, options);
    }
    /**
     * Add output {@link GroupID}.
     *
     * @param   name        name. This used in only log.
     * @param   output      the {@link GroupID} added to this as output.
     * @param   options     options. {@link Job#keyClass}, {@link Job#valueClass} and {@link Job#partitioner} are available.
     * @return  this <code>Builder</code> object
     * @throws SssException
     */
    public Builder addOutput(String name, GroupID output, OutputOption... options) throws SssException {
      return addOutput(name, output, null, options);
    }
    /**
     * Add output {@link GroupID}.
     *
     * @param   output      the {@link GroupID} added to this as output.
     * @param   options     options. {@link Job#keyClass}, {@link Job#valueClass} and {@link Job#partitioner} are available.
     * @return  this <code>Builder</code> object
     * @throws SssException
     */
    public Builder addOutput(GroupID output, OutputOption... options) throws SssException {
      return addOutput(null, output, null, options);
    }
    /**
     * Get the <code>Configuration</code> object for a <code>Job</code> to be created.
     *
     * @return the <code>Configuration</code> object which used by <code>Job</code>.
     * @throws SssException
     */
    public Configuration getConfiguration() {
      return conf;
    }
    /**
     * Allocates a <code>Job</code> object and initialize it so that
     * it has the given properties determined by this <code>Builder</code> object.
     *
     * @return  a new <code>Job</code> object that is created by this <code>Builder</code>
     * @throws SssException
     */
    public Job build() throws SssException {
      return new Job(this);
    }
  }

  /**
   * Get value of counter.
   *
   * Counter value is set in remote job.
   *
   * @param counterName identifier to specify counter.
   *
   * @see org.sss.mapreduce.Context#incrCounter
   */
  public Long getCounter(Enum<?> counterName) {
    return counterMap.get(counterName.name());
  }

  private String name(Object... item) {
    StringBuilder sb = new StringBuilder(name);
    for (Object i: item) {
      sb.append(".").append(i);
    }
    return sb.toString();
  }

  void reset() {
    status = Status.initial;
    for (Input input: inputs) {
      input.tupleGroupStatus.reset();
    }
    for (Output output: outputs) {
      output.tupleGroupStatus.reset();
    }
  }
}
