/*
 *  Copyright 2013 National Institute of Advanced Industrial Science and Technology
 *  
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *  
 *      http://www.apache.org/licenses/LICENSE-2.0
 *  
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */

package org.sss.server;

import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;

import org.slf4j.Logger;
import org.sss.common.Bucket;
import org.sss.common.DataReceiver;
import org.sss.common.ErrorDataReceiverException;
import org.sss.common.Reflection;
import org.sss.mapreduce.GroupID;
import org.sss.mapreduce.Reducer;
import org.sss.mapreduce.SssException;
import org.sss.mapreduce.datatype.Packable;

public class CombinerOutput implements TaskOutput, DataReceiver<List<Bucket>> {
  private static Logger logger = SssServer.getLogger(CombinerOutput.class);
  private Buffer buffer = new Buffer();
  private final Executor executor;
  private final long limitNBytes;
  private final TaskOutput next;
  private final AtomicInteger nRunning = new AtomicInteger(0);
  private final AtomicBoolean closed = new AtomicBoolean(false);
  private final Reducer combiner;
  private final Reducer.Context context;
  private final Reflection.ProcInfo procInfo;
  private final int cacheSize;

  public CombinerOutput(Reducer combiner,
                        Reducer.Context context,
                        Executor executor,
                        long limitNBytes,
                        int cacheSize,
                        TaskOutput next) throws SssException {
    this.combiner    = combiner;
    this.context     = context;
    this.executor    = executor;
    this.limitNBytes = limitNBytes;
    this.next        = next;
    this.procInfo    = Reflection.createProcInfo(combiner.getClass());
    this.cacheSize   = cacheSize;
  }

  public DataReceiver<Bucket> getTLPutter(int limit) throws SssException {
    return new CacheDataReceiver<Bucket>(limit, this);
  }

  @Override
  public synchronized void put(List<Bucket> buckets) throws SssException {
    checkAlreadyError();
    for (Bucket b: buckets) {
      buffer.add(b);
      if (buffer.getSizeNBytes() >= limitNBytes) {
        flush(false);
      }
    }
  }

  @Override
  public synchronized void close() throws SssException {
    checkAlreadyError();
    flush(true);
  }

  @Override
  public void setError(Throwable e) {
    next.setError(e);
  }

  @Override
  public Throwable getError() {
    return next.getError();
  }

  @Override
  public GroupID getGroupID() {
    return next.getGroupID();
  }

  private void checkAlreadyError() throws ErrorDataReceiverException {
    Throwable e = getError();
    if (e != null) {
      throw new ErrorDataReceiverException(e);
    }
  }

  private synchronized void flush(boolean last) {
    final Map<Packable, List<Packable>> tmp = buffer.release();
    nRunning.incrementAndGet();
    if (last) {
      closed.set(last);
    }
    executor.execute(new Runnable() {
      @Override
      public void run() {
        combine(tmp);
      }
    });
  }

  private void combine(Map<Packable, List<Packable>> buckets) {
    try {
      Method reduce = procInfo.getMethod();
      logger.debug("bucket merge {}", buckets.values().size());
      DataReceiver<Bucket> cache = next.getTLPutter(cacheSize);
      ReduceTask.Output o = new ReduceTask.Output(cache);
      Object[] args = new Object[]{ context, null, null, o};
      for (Map.Entry<Packable, List<Packable>> e: buckets.entrySet()) {
        args[1] = e.getKey();
        args[2] = e.getValue();
        reduce.invoke(combiner, args);
      }
      cache.close();
    } catch (ErrorDataReceiverException e) {
      // Ignore
    } catch (Throwable e) {
      logger.error("Exception in combiner - " + e);
      next.setError(e);
    }
    finally {
      if (nRunning.decrementAndGet() == 0 && closed.get()) {
        try {
          next.close();
        }
        catch (Throwable e) {
          next.setError(e);
        }
      }
    }
  }

  @Override
  public long[] getOutputCountCopy() {
    return next.getOutputCountCopy();
  }

  @Override
  public long[] getOutputNBytesCopy() {
    return next.getOutputNBytesCopy();
  }

  /**
   * .
   * NOTE: not locked
   */
  private static class Buffer {
    private Map<Packable, List<Packable>> buffer = new HashMap<Packable, List<Packable>>();
    private int nBytes = 0;

    public void add(Bucket bucket) {
      List<Packable> vs = buffer.get(bucket.key);
      if (vs == null) {
        vs = new ArrayList<Packable>();
        buffer.put(bucket.key, vs);
      }
      vs.add(bucket.value);
      nBytes += bucket.getRoughSize();
    }

    public int getSizeNBytes() {
      return nBytes;
    }

    public Map<Packable, List<Packable>> release() {
      Map<Packable, List<Packable>> tmp = this.buffer;
      this.buffer = new HashMap<Packable, List<Packable>>();
      this.nBytes = 0;
      return tmp;
    }
  }

  public void renew() throws SssException {
    next.renew();
  }
}
