package skewreduce.lsst.cook;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;

import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import skewreduce.framework.MuxData;
import skewreduce.framework.MuxMapper;
import skewreduce.framework.MuxOutputFormat;
import skewreduce.framework.physical.PMergeOp;
import skewreduce.lib.Cube2;
import skewreduce.lib.IPoint3D;
import skewreduce.lib.KDTree;
import skewreduce.lib.KDTreeNode;
import skewreduce.lib.KDTreePredicate;
import skewreduce.lib.NodeList;
import skewreduce.lib.Point3D;
import skewreduce.lsst.Image2DPartition;
import skewreduce.lsst.PixVal;



public class CookMerge extends PMergeOp {
    private static final Logger LOG = LoggerFactory.getLogger(CookMerge.class);

    public static class CookMergeMapper
    extends MuxMapper<ByteWritable,MuxData> {
        List<MergeDataIterator<ByteWritable,MuxData>> dataInputs = new ArrayList<MergeDataIterator<ByteWritable,MuxData>>(2);
        
        Image2DPartition partition;
        
        int    splitBase;
        long[][] lineBuf;
        
        
        class OidPixVal {
        	final long oid;
        	final PixVal val;
        	
        	OidPixVal(long o,PixVal v) { oid = o; val = v; }
        	
        	public long getOid() { return oid; }
        	public PixVal getValue() { return val; }
        	@Override
        	public int hashCode() { return val.hashCode(); }
        }
        
        ArrayList<OidPixVal> residue = new ArrayList<OidPixVal>();

        @SuppressWarnings("unchecked")
		@Override
        protected void setup(Context context)
        throws IOException, InterruptedException {
            super.setup(context);
            setupCounters(context,2);

            Configuration conf = context.getConfiguration();
            
            partition = new Image2DPartition(conf.get(PARTITION_SPEC_ATTR));
            
            if ( LOG.isInfoEnabled() ) {
            	LOG.info("Partition = {}",partition);
            }
            
            if ( partition.getSplitAxis() != 1 ) {
            	throw new IllegalArgumentException("partitioning along X axis is not supported!");
            }

            dataInputs.add(new TaskContextDataIterator<ByteWritable,MuxData>(context));

            // open the other input file
            InputFormat<ByteWritable,MuxData> format = null;
            try {
                format = (InputFormat<ByteWritable,MuxData>)ReflectionUtils.newInstance(context.getInputFormatClass(),conf);
            } catch ( ClassNotFoundException ex ) {
                throw new IOException("Failed to load input format class",ex);
            }

            String[] inputs = conf.getStrings("skewreduce.merge.extraInput");
            FileSystem fs = FileSystem.get(conf);

            for ( String input : inputs ) {
                Path path = new Path(input);
                FileStatus stat = fs.getFileStatus(path);
                FileSplit split = new FileSplit(path,0,stat.getLen(),null);

                RecordReader<ByteWritable,MuxData> otherInput = format.createRecordReader(split,context);
                otherInput.initialize(split,context);

                dataInputs.add(new RecordReaderDataIterator<ByteWritable,MuxData>(otherInput));
            }
            
            // setup merging surface buffer
            lineBuf = new long[2][];
            lineBuf[0] = new long[partition.getWidth()];
            lineBuf[1] = new long[partition.getWidth()];
            splitBase = partition.getSplit() - 1;
        }

        Map<Long,Long> oldMappings = new HashMap<Long,Long>();
        Map<Long,Long> newMappings = new HashMap<Long,Long>();
        
		private long leftNeighbor(int x) {
    		if ( x > 0 ) return lineBuf[1][x-1];
    		return 0L;
    	}
    	
    	private long upNeighbor(int x) {
			long current = lineBuf[0][x];
			long x_prev = ( x > 0 ) ? lineBuf[0][x-1] : 0L;
			long x_next = ( x+1 < partition.getWidth() ) ? lineBuf[0][x+1] : 0L;
			if ( x > 0 && x_prev > 0L && (x_prev < current || current == 0) )
				current = x_prev;
			if ( x+1 < partition.getWidth() && x_next > 0 && (x_next < current || current == 0) )
				current = x_next;
			return current;
    	}

        private void merge(Context context) {
        	// a linear scan on bottom line and do merge as we do in local cook
            beginLoop(context, lineBuf[1].length);
        	for ( int x = 0; x < lineBuf[1].length; ++x ) {
        		long objid = lineBuf[1][x];
        		long left = leftNeighbor(x);
        		if ( left > 0 ) {
        			long up = upNeighbor(x);
        			if ( up > 0 ) {
        				if ( left != up ) {
        					if ( left < up ) {
        						this.newMappings.put(objid,left);
    	    					objid = left; // new mapping
    	    				} else {
    	    					this.newMappings.put(objid,up);
    	    					objid = up;
    	    				}
        				} else { // don't need to merge
        					// stay as it is...
        				}
        			} else {
        				this.newMappings.put(objid, left);
        				objid = left;
        			}
        		} else { // no left object
        			long up = upNeighbor(x);
        			if ( up > 0 ) {
        				this.newMappings.put(up,objid);
        				objid = up;
        			} else {
        				// stay as it is...
        			}
        		}
        		lineBuf[1][x] = objid;

                incrLoop(context);
            }
            endLoop(context);
        }

        // to state
        //   output new mapping table
        // to disk
        //   filter larger state

        private void writeStateMapping(Context context)
        throws IOException,InterruptedException {
            LongWritable oldid = new LongWritable();
            LongWritable newid = new LongWritable();

            // first update all new mappings
            beginLoop(context, newMappings.size());
            for ( Map.Entry<Long,Long> e : newMappings.entrySet() ) {
            	long ogid = e.getKey();
            	long ngid = e.getValue();
            	            	
            	if ( ogid == ngid ) continue; // not mapped
            	
                oldid.set(ogid);
                newid.set(ngid);

                write(context,0,oldid,newid);
                
                incrLoop(context);
            }
            endLoop(context);

            LOG.info(String.format("%d mappings were found",newMappings.size()));

            // now write-out old mappings

            Set<Map.Entry<Long,Long>> entrySet = oldMappings.entrySet();
            beginLoop(context, entrySet.size());
            for ( Map.Entry<Long,Long> mapping : entrySet ) {
                Long newId = newMappings.get(mapping.getValue());
                if ( newId == null ) {
                    newid.set(mapping.getValue());
                } else {
                    newid.set(newId);
                }
                oldid.set(mapping.getKey());

                write(context,0,oldid,newid);

                incrLoop(context);
            }
            endLoop(context);
        }

        private void writeNextMerge(Context context) throws IOException,InterruptedException {
            LongWritable key = new LongWritable();

            beginLoop(context, residue.size() );
            for ( OidPixVal v : residue ) {
            	Long newGid = newMappings.get(v.getOid());
            	if ( newGid == null ) {
            		key.set(v.getOid());
            	} else {
            		key.set(newGid);
            	}
            	write(context,1,key,v.getValue());

                incrLoop(context);
            }
            endLoop(context);
        }

        private LongWritable buf = new LongWritable();

        protected void addState(MuxData record) throws IOException {
            // accumulate input
            record.getKey(buf);
            long oldCid = buf.get();
            record.getValue(buf);
            long newCid = buf.get();
            oldMappings.put(oldCid,newCid);
        }

        protected void addOutput(MuxData record) throws IOException {
        	PixVal v = new PixVal();
        	record.getKey(buf);
        	record.getValue(v);

            assert partition.contains(v);
        	
        	int off = v.getY() - splitBase;
        	if ( off == 0 || off == 1 ) {
        		lineBuf[off][ v.getX() ] = buf.get();
        	}
        	
        	if ( partition.atSkin(v, 1) ) {
        		residue.add(new OidPixVal(buf.get(),v));
        	}
        }

        @Override
        public void run(Context context) 
        throws IOException, InterruptedException {
            setup(context);

            beginLoop(context);
            for ( MergeDataIterator<ByteWritable,MuxData> i : dataInputs ) {
                while ( i.nextKeyValue() ) {
                    byte stream = i.getCurrentKey().get();
                    MuxData record = i.getCurrentValue();
                    if ( stream == 0 ) { // state
                        addState(record);
                    } else if ( stream == 1 ) { // output
                        addOutput(record);
                    } else {
                        throw new IOException("Unidentified stream: "+stream);
                    }
                    incrLoop(context);
                }

                LOG.info("Loaded data: {} state, {} output",oldMappings.size(),residue.size());
            }
            endLoop(context);

            LOG.info("Total loaded data: {} state, {} output",oldMappings.size(),residue.size());

            // do merge
            merge(context);

            // output
            writeStateMapping(context);
            writeNextMerge(context);

            cleanup(context);
        }

        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            for ( MergeDataIterator<?,?> i : dataInputs ) {
                i.close();
            }
            super.cleanup(context);
        }
    }

    protected Job createJob(Configuration conf) throws IOException {
        Job job = new Job(conf);

        job.setJarByClass(CookMerge.class);

        //job.setInputFormatClass(SequenceFileInputFormat.class);
        job.setInputFormatClass(MergeInputFormat.class);
        job.setOutputFormatClass(MuxOutputFormat.class);

        job.setOutputKeyClass(ByteWritable.class);
        job.setOutputValueClass(MuxData.class);

        job.setMapperClass(CookMergeMapper.class);
        job.setNumReduceTasks(0);

        job.getConfiguration().setInt("skewreduce.monitoring.num.loops",6);

        return job;
    }

    public static Job getJobInstance(Configuration conf) throws IOException {
    	return new CookMerge().createJob(conf);
    }
}
