/*
 *  Copyright 2013 National Institute of Advanced Industrial Science and Technology
 *  
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *  
 *      http://www.apache.org/licenses/LICENSE-2.0
 *  
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */

package org.sss.server;

import static org.sss.common.Utils.closeDataReceivers;

import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;

import org.slf4j.Logger;
import org.sss.common.Bucket;
import org.sss.common.CanceledException;
import org.sss.common.DataReceiver;
import org.sss.common.Inlet;
import org.sss.common.InputDrivenTask;
import org.sss.common.KeyValues;
import org.sss.common.TaskSliceSubmitter;
import org.sss.common.CommonStatus;
import org.sss.common.io.DataReader;
import org.sss.common.io.KeyFormatter;
import org.sss.mapreduce.Configuration;
import org.sss.mapreduce.Decoder;
import org.sss.mapreduce.GroupID;
import org.sss.mapreduce.Mapper;
import org.sss.mapreduce.SssException;
import org.sss.mapreduce.StorageNode;
import org.sss.mapreduce.datatype.Packable;
import org.sss.util.CallbackInterface;

public class MergeMapTask extends MapTaskBase {
  private static Logger logger = SssServer.getLogger(MergeMapTask.class);
  private int numDoneGetter = 0;
  private long drivingTableTupleCount = 0L;
  private long drivingTableTupleNBytes = 0L;
  private final List<Packable> drivingTableKeys;
  private final List<Packable> drivingTableValues;
  private final GroupID dtInputGID;
  private final ThreadLocal<Decoder> dtKeyDecoder;
  private final ThreadLocal<Decoder> dtValueDecoder;

  public MergeMapTask(TaskSet taskSet,
                   UUID jobID,
                   SssServerResource sssResource,
                   Configuration conf,
                   StorageNode storageNode,
                   Inlet inlet,
                   Inlet drivingTable,
                   Class<? extends Mapper> mapperClass,
                   List<org.sss.common.Outlet> outletList,
                   CallbackInterface<CommonStatus> doneCallback)
    throws SssException {

    super(taskSet, jobID, sssResource, conf, storageNode, inlet,
        mapperClass, outletList, doneCallback);

    this.drivingTableKeys = new ArrayList<Packable>();
    this.drivingTableValues = new ArrayList<Packable>();
    this.dtInputGID = drivingTable.GID;
    this.dtKeyDecoder = loadThreadLocalDecoder(conf, drivingTable.keyClassName);
    this.dtValueDecoder = loadThreadLocalDecoder(conf, drivingTable.valueClassName);
  }

  private class Getter implements InputDrivenTask<List<KeyValues>> {
    private final Logger logger = SssServer.getLogger(Getter.class);
    private final AtomicInteger readLatch = new AtomicInteger(dtInputGID.getNSlot());
    private final List<DataReader> dataReaders = new ArrayList<DataReader>(dtInputGID.getNSlot());

    @Override
    public void handle(List<KeyValues> kvss) {
      try {
        Decoder kDecoder = dtKeyDecoder.get();
        Decoder vDecoder = dtValueDecoder.get();

        synchronized (drivingTableKeys) {
          for (KeyValues kvs: kvss) {
            Iterator<byte[]> it = kvs.values;
            while (it.hasNext()) {
              checkCanceled();
              drivingTableKeys.add(kDecoder.decode(kvs.key));
              drivingTableValues.add(vDecoder.decode(it.next()));
            }
          }
        }
      }
      catch (CanceledException e) {
        errorCallback(e);
      }
      catch (Throwable e) {
        logger.error("handle caught an Exception. Cancels tasks.", e);
        cancel();
        errorCallback(e);
      }
      finally {
        logger.debug("completing a GetTaskSlice");
      }
    }
    @Override
    public void noMoreTaskSlice(Throwable e) {
      if (e == null) {
        logger.debug("noMoreTaskSlice {}.", readLatch.get());
        if (readLatch.decrementAndGet() > 0) {
          return;
        }
        for (DataReader r: dataReaders) {
          numDoneGetter++;
          drivingTableTupleCount  += r.getInputCount();
          drivingTableTupleNBytes += r.getInputNBytes();
        }
        logger.debug("MergeStatus.drivingTableTupleCount {}.",
                     drivingTableTupleCount);
        logger.debug("drivingTableKeys.size = {}.", drivingTableKeys.size());
        logger.debug("drivingTableValuess.size = {}.", drivingTableValues.size());

        submitMain();
      }
      else {
        if (!(e instanceof CanceledException)) {
          logger.error("error in Getter", e);
        }
        errorCallback(e);
      }
    }
    public void submitReadRequest(DataReader r) {
      dataReaders.add(r);
      getIOTPRead().execute(r);
    }
  }

  @Override
  protected synchronized void submitReadTaskSlices() throws SssException {
    Getter getter = new Getter();
    for (int i = 0; i < dtInputGID.getNSlot(); i++) {
      DataReader r = getIOManager().createKeyValueReader(
          getConfiguration(), getStorageNode(), dtInputGID, i,
          new TaskSliceSubmitter<List<KeyValues>>(getWKTP(), getter));
      getter.submitReadRequest(r);
    }
  }

  private void submitMain() {
    try {
      for (int i = 0; i < getInputGID().getNSlot(); i++) {
        DataReader r = getIOManager().createDataReader(
            getConfiguration(), getStorageNode(), getInputGID(), i,
            new TaskSliceSubmitter<List<byte[]>>(getWKTP(), this));
        submitReadRequest(r);
      }
    } catch (Throwable e) {
      logger.error("error", e);
      errorCallback(e);
    }
  }

  @Override
  public void handle(List<byte[]> kvs) {
    try {
      logger.debug("run. {} tuples.", kvs.size()/2);
      SssServerConfiguration sssConf = getSssServerConfiguration();
      Method mapMethod = getProcInfo().getMethod();
      Mapper mapper = getProc();

      List<DataReceiver<Bucket>> cache = getOutlets().getTLPutters(sssConf.map_output_cache_nbytes);
      Object[] args = createArguments(createContext(), 4, cache);

      Decoder kDecoder = getKeyDecoder();
      Decoder vDecoder = getValueDecoder();
      KeyFormatter kf = KeyFormatter.get(getInputGID().getKeyFormat());
      for (int j=0; j<drivingTableKeys.size(); j++) {
        for (int i=0; i<kvs.size(); i+=2) {
          checkCanceled();
          args[1] = kDecoder.decode(kf.getUserKey(kvs.get(i)));
          args[2] = vDecoder.decode(kvs.get(i + 1));
          args[3] = drivingTableKeys.get(j);
          args[4] = drivingTableValues.get(j);
          mapMethod.invoke(mapper, args);
        }
      }
      closeDataReceivers(cache);
    }
    catch (Throwable e) {
      handleExceptionInHandle(e, "Error in Map Merge Task, Cancels tasks");
    }
    finally {
      logger.debug("completing a MapTaskSlice (merge).");
    }
  }

  @Override
  public synchronized void ioWriteComplete(Throwable e) {
    if (e != null) {
      cancel();
    }
    logger.info("ioWriteComplete.");
    CommonStatus s = new CommonStatus();
    setValueToStatus(s, e);
    assert !s.completedSubTaskCount.containsKey(dtInputGID);
    assert !s.inputTupleCount.containsKey(dtInputGID);
    assert !s.inputTupleNBytes.containsKey(dtInputGID);
    s.completedSubTaskCount.put(dtInputGID, new Long(numDoneGetter));
    s.inputTupleCount.put(dtInputGID,  drivingTableTupleCount);
    s.inputTupleNBytes.put(dtInputGID, drivingTableTupleNBytes);

    doneCallback(s);
  }
}
