/*
 *  Copyright 2013 National Institute of Advanced Industrial Science and Technology
 *  
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *  
 *      http://www.apache.org/licenses/LICENSE-2.0
 *  
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */

package org.sss.common;

import static org.sss.common.Utils.closeDataReceivers;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sss.common.io.DataWriter;
import org.sss.mapreduce.Broadcaster;
import org.sss.mapreduce.Encoder;
import org.sss.mapreduce.GroupID;
import org.sss.mapreduce.Partitioner;
import org.sss.mapreduce.SssException;
import org.sss.util.CallbackInterface;
import org.sss.util.Factory;
import org.sss.util.FactoryThreadLocal;
import org.sss.util.ThreadPerTaskExecutor;

public class EncodingOutputQueue implements DataReceiver<Bucket> {
  private static Logger logger = LoggerFactory.getLogger(EncodingOutputQueue.class);

  private final ThreadLocal<Encoder> keyEncoder;
  private final ThreadLocal<Encoder> valueEncoder;
  private final List<MultipleBufferedQueue<EncodedBucket, List<EncodedBucket>>> queueList;
  private final int numberOfQueues;
  private final Partitioner partitioner;
  private final DataReceiver<DataChunk> chunkSubmitter;
  private final GroupID gid;
  private final int nSlot;
  private final boolean broadcast;
  private final WriterCounter counter;
  private CallbackInterface<Throwable> onCompletion;

  public EncodingOutputQueue(GroupID gid,
                             Factory<Encoder> keyEncoderFactory,
                             Factory<Encoder> valueEncoderFactory,
                             int numberOfQueues,        
                             Partitioner partitioner,
                             SentinelThreadPool iotp,
                             DataWriter dataWriter,
                             int multiplicity,          
                             int limitNBytes,           
                             CallbackInterface<Throwable> onCompletion) {
    this.gid = gid;
    this.nSlot = gid.getNSlot();
    this.onCompletion = onCompletion;
    this.numberOfQueues = numberOfQueues;
    this.counter = new WriterCounter(numberOfQueues);
    this.partitioner = partitioner;
    this.broadcast = (partitioner.getClass() == Broadcaster.class);
    assert broadcast == gid.isBroadcast();
    this.chunkSubmitter = new RefCountDataReceiver<DataChunk>(new TaskSliceSubmitter<DataChunk>(iotp, new ChunkHandler(dataWriter)), numberOfQueues);
    this.queueList = new ArrayList<MultipleBufferedQueue<EncodedBucket, List<EncodedBucket>>>(numberOfQueues);
    for (int i = 0; i < numberOfQueues; i++) {
      queueList.add(MultipleBufferedQueue.createList(
            multiplicity, limitNBytes, ThreadPerTaskExecutor.getInstance(), new PutBucketMerger(i)));
    }
    this.keyEncoder   = new FactoryThreadLocal<Encoder>(keyEncoderFactory);
    this.valueEncoder = new FactoryThreadLocal<Encoder>(valueEncoderFactory);
  }

  @Override
  public void put(Bucket bucket) throws SssException {
    try {
      EncodedBucket b = encode(bucket);
      if (broadcast) {
        EncodedBucket eb = encode(bucket);
        for (MultipleBufferedQueue<EncodedBucket, List<EncodedBucket>> q: queueList) {
          q.put(eb);
        }
      }
      else {
        getQueue(bucket).put(b);
      }
    }
    catch (IOException e) {
      throw new SssException(e);
    }
  }

  @Override
  public void close() throws SssException {
    closeDataReceivers(queueList);
  }

  @Override
  public void setError(Throwable e) {
    for (MultipleBufferedQueue<EncodedBucket, List<EncodedBucket>> q: queueList) {
      q.setError(e);
    }
  }

  @Override
  public Throwable getError() {
    for (MultipleBufferedQueue<EncodedBucket, List<EncodedBucket>> q: queueList) {
      Throwable e = q.getError();
      if (e != null) {
        return e;
      }
    }
    return null;
  }

  private EncodedBucket encode(Bucket bucket) throws IOException {
    return new EncodedBucket(
        bucket.key.slot() & (nSlot-1),
        keyEncoder.get().encode(bucket.key),
        valueEncoder.get().encode(bucket.value));
  }

  private MultipleBufferedQueue<EncodedBucket, List<EncodedBucket>> getQueue(Bucket data) {
    return queueList.get(getPartitionedIndex(data));
  }

  private int getPartitionedIndex(Bucket bucket) {
    return partitioner.getPartition(
        bucket.key, bucket.value, numberOfQueues) % numberOfQueues;
  }

  private class ChunkHandler implements InputDrivenTask<DataChunk> {
    private DataWriter dataWriter;

    public ChunkHandler(DataWriter dataWriter) {
      this.dataWriter = dataWriter;
    }
    @Override
    public void handle(DataChunk chunk) throws SssException {
      long nbytes = dataWriter.write(gid, chunk);
      counter.countup(chunk.getPartition(), chunk.size(), nbytes);
    }
    @Override
    public void noMoreTaskSlice(Throwable e) {
      onCompletion.callback(e);
    }
  }

  public long[] getOutputCountCopy() {
    return counter.getCount();
  }

  public long[] getOutputNBytesCopy() {
    return counter.getNBytes();   
  }
  
  private class PutBucketMerger extends BridgeDataReceiver<List<EncodedBucket>, DataChunk> {
    private final int subhash;

    public PutBucketMerger(int subhash) {
      super(chunkSubmitter);
      this.subhash = subhash;
    }

    @Override
    public void put(List<EncodedBucket> bucketList) throws SssException {
      logger.debug("bucket merge {}", bucketList.size());
      substance.put(new DataChunk(subhash, bucketList.toArray(new EncodedBucket[bucketList.size()])));
    }
  }
}
