package skewreduce.lsst.cook;

import java.io.IOException;
import java.nio.ByteOrder;
import java.util.Collections;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Job;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import skewreduce.framework.ArrayIndex;
import skewreduce.framework.ArrayInputFormat;
import skewreduce.framework.LoopingMapper;
import skewreduce.framework.MuxOutput;
import skewreduce.framework.WritableOutputFormat;
import skewreduce.framework.physical.PProcessOp;
import skewreduce.lsst.Image2DPartition;
import skewreduce.lsst.Observ;
import skewreduce.lsst.PixVal;


public class LocalCook extends PProcessOp {
    private static final Logger LOG = LoggerFactory.getLogger(LocalCook.class);

    public static class PixelInputFormat
    extends ArrayInputFormat<FloatWritable> {
        public FloatWritable createValue() { return new FloatWritable(); }
        @Override
		public ByteOrder getByteOrder() {
			return ByteOrder.LITTLE_ENDIAN;
		}
		public int getValueSize() { return 4; }
    }

    // long, object
    public static class MappingOutputFormat 
    extends WritableOutputFormat<LongWritable,Observ> {}

    // state?: long, object
    // line buffer: where split occurred
    public static class MergeOutputFormat 
    extends WritableOutputFormat<LongWritable,PixVal> {}

	@Override
	protected Job createJob(Configuration conf) throws IOException {
        Job job = new Job(conf);

        job.setJarByClass(LocalCook.class);

        job.setInputFormatClass(PixelInputFormat.class);
        job.setOutputFormatClass(MappingOutputFormat.class);

        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(Observ.class);

        job.setMapperClass(CookMapper.class);
        job.setNumReduceTasks(0);

        job.getConfiguration().setInt("skewreduce.monitoring.num.loops",4);

        return job;
	}
	
    public static class CookMapper
    extends LoopingMapper<ArrayIndex,FloatWritable,LongWritable,Observ> {
    	int width;
    	int height;
    	float threshold;
    	Image2DPartition partition;
    	long nextOid;
    	
        long[] previous;
        long[] current;
    	
    	HashSet<Long> finalizable = new HashSet<Long>();
    	HashSet<Long> fresh = new HashSet<Long>();
    	HashMap<Long,Observ> obs = new HashMap<Long,Observ>();
		private int currentY;
		
		Context context;
		MuxOutput outputs;

        int numObjects;
        int numSpilled;

        boolean alwaysBuild;
    	
    	private void updateFinalizable(long oid) {
    		fresh.add(oid);
    		finalizable.remove(oid);
    	}
    	
    	private void flushFinalizable() throws IOException, InterruptedException {
    		// we write all finalizable objects
    		// if the object contains coordinates at boundary, write it to merge output
    		// we do not do build.
    		LongWritable objId = new LongWritable();
    		ArrayList<PixVal> vbuf = new ArrayList<PixVal>();

    		for ( long oid : finalizable ) {
    			Observ o = obs.get(oid);
                if ( o == null ) continue; // merged
    			objId.set(oid);
    			
    			vbuf.clear();
    			
    			boolean isolated = o.isIsolated(partition, vbuf);

                if ( isolated || alwaysBuild )
    				o.build();

    			if ( ! isolated ) {
    				// write to merge output
    				for ( PixVal v : vbuf ) {
    					outputs.write(1, objId, v);
    				}
                    ++numSpilled;
    			}
    			
    			// write object to output
    			context.write(objId,o);
                ++numObjects;
    		}
    		
    		for ( long oid : finalizable ) {
    			obs.remove(oid);
    		}
    		
    		HashSet<Long> tmp = finalizable;
    		finalizable = fresh;
    		fresh = tmp;
    		fresh.clear();
    	}
    	
    	private void nextLine() throws IOException, InterruptedException {
    		long[] temp = previous;
    		if ( temp == null ) temp = new long[width];
    		flushFinalizable();
    		previous = current;
    		current  = temp;
            Arrays.fill(current,0L);
    	}

    	private void checkNext(ArrayIndex p) throws IOException, InterruptedException {
    		    int x = p.get(0);
    		    int y = p.get(1);
    		    assert(x >= 0);
    		    if ( y != currentY ) {
    		        assert(y == currentY + 1); // not handling non-consecutive y either.
    		        nextLine();
    		        currentY = y;
    		    }
    		    assert (x < width); // valid X
    	}
    	
		private long leftNeighbor(int x) {
    		if ( x > 0 ) return current[x-1];
    		return 0L;
    	}
    	
    	private long upNeighbor(int x) {
    		if ( currentY > 0 ) {
    			long current = previous[x];
    			long x_prev = ( x > 0 ) ? previous[x-1] : 0L;
    			long x_next = ( x+1 < width ) ? previous[x+1] : 0L;
    			if ( x > 0 && x_prev > 0L && (x_prev < current || current == 0) )
    				current = x_prev;
    			if ( x+1 < width && x_next > 0 && (x_next < current || current == 0) )
    				current = x_next;
    			return current;
    		}
    		return 0;
    	}
    	
    	private long newObjId() {
    		return nextOid++;
    	}
    	

        private void mergeObjects(long majorid, long minorid) {
        	if ( majorid == minorid ) return;
        	
        	Observ major = obs.get(majorid);
        	Observ minor = obs.get(minorid);
        	
        	// copy minor's pixels to major's
        	major.merge(minor);
        	
        	// patch previous and current
        	for ( PixVal p : minor.getPixels() ) {
        		if ( p.getY() == currentY ) {
        			current[p.getX()] = majorid;
        		}
        		if ( p.getY() == currentY-1 ) {
        			previous[p.getX()] = majorid;
        		}
        	}
        	
        	obs.remove(minorid);
		}
    	
    	private void ingest(double flux,int x,int y) {
    		if ( Double.isNaN(flux) ) return;
    		
    		if ( flux < threshold ) return;
    		
    		long objid;
    		long left = leftNeighbor(x);
    		if ( left > 0 ) {
    			long up = upNeighbor(x);
    			if ( up > 0 ) {
    				if ( left != up ) {
    					if ( left < up ) {
	    					objid = left;
	    					mergeObjects(left,up);
	    				} else {
	    					objid = up;
	    					mergeObjects(up,left);
	    				}
    				} else { // don't need to merge
    					objid = left;
    				}
    			} else {
    				objid = left;
    			}
    		} else { // no left object
    			long up = upNeighbor(x);
    			if ( up > 0 ) {
    				mergeUpper(x);
    				objid = up;
    			} else {
    				objid = (y << 32 | x ) + 1;
    			}
    		}
    		
    		current[x] = objid;
    		addPixel(objid,x,y,flux);
    	}


		private void mergeUpper(int x) {
			if ( x < 1 || x == width ) {
				return;
			}
			long leftoid = previous[x-1];
			long rightoid = previous[x+1];
			if ( leftoid == 0 || rightoid == 0 )
				return;
			if ( leftoid < rightoid )
				mergeObjects(leftoid,rightoid);
			else if ( rightoid < leftoid )
				mergeObjects(rightoid,leftoid);
		}

		private void addPixel(long objid, int x, int y, double flux) {
			updateFinalizable(objid);
            Observ o = obs.get(objid);
            if ( o == null ) {
                o = new Observ();
                obs.put(objid,o);
            }
			o.addPixel(new PixVal(x,y,flux));
		}

		@Override
        protected void setup(Context context)
        throws IOException, InterruptedException {
            super.setup(context);
            this.context = context;

            Configuration conf = context.getConfiguration();
            
            width = conf.getInt("lsst.cook.width",-1);
            height = conf.getInt("lsst.cook.height",-1);
            
            if ( width < 0 || height < 0 ) {
            	LOG.error("Invalid image size: width = {}, height = {}",width,height);
            }

            alwaysBuild = conf.getBoolean("lsst.cook.buildAlways",false);
            
    		partition = new Image2DPartition(conf.get("skewreduce.partition.spec"));
    		if ( LOG.isInfoEnabled() ) {
    			LOG.info("Partition Boundary = {}",partition);
    		}
    		
    		currentY = partition.getMin(1)-1;
            
            threshold = conf.getFloat("lsst.cook.threshold",0.0f);
            
            current = new long[width];
            
            outputs = new MuxOutput(context,"merge",2);
         }
 
    	@Override
		public void run(Context context) throws IOException,
				InterruptedException {
            setup(context);
            
            // loop index incr
            // loop 1 begin
            beginLoop(context);
            while ( context.nextKeyValue() ) {
            	ArrayIndex index = context.getCurrentKey();
            	float flux = context.getCurrentValue().get();

                assert partition.contains(index);
            	
            	checkNext(index);
            	ingest(flux, index.get(0), index.get(1));

            	incrLoop(context);
            }
            endLoop(context);
            
            cleanup(context);
		}
    	
        @Override
        protected void cleanup(Context context)
        throws IOException, InterruptedException {
        	flushFinalizable();

            if ( LOG.isInfoEnabled() ) {
                LOG.info("Total {} objects found. {} isolated and {} spilled.",new Object[] { numObjects, numObjects - numSpilled, numSpilled });
            }
        	
            super.cleanup(context);
            if ( outputs != null )
                outputs.close(context);
        }
    }
    
    public static Job getJobInstance(Configuration conf) throws IOException {
    	return new LocalCook().createJob(conf);
    }
}
