package skewreduce.dfof;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import skewreduce.framework.MuxData;
import skewreduce.framework.MuxMapper;
import skewreduce.framework.MuxOutputFormat;
import skewreduce.framework.physical.PMergeOp;
import skewreduce.lib.Cube2;
import skewreduce.lib.IPoint3D;
import skewreduce.lib.KDTree;
import skewreduce.lib.KDTreeNode;
import skewreduce.lib.KDTreePredicate;
import skewreduce.lib.NodeList;
import skewreduce.lib.Point3D;
import skewreduce.seaflow.InternalParticle;




public class dFoFMerge3 extends PMergeOp {
    private static final Logger LOG = LoggerFactory.getLogger(dFoFMerge.class);

    public static final class RangeExistPredicate
    implements KDTreePredicate<InternalParticle> {
        private InternalParticle seed;
        private double radius;

        public void setRadius(double r) { radius = r; }
        public void set(InternalParticle seed) { this.seed = seed; }

        @Override
        public boolean evalSkip(InternalParticle o) { return false; }

        @Override
        public boolean evalMatch(InternalParticle o) {
            return seed != o && seed.distance(o) < radius;
        }

        @Override
        public boolean evalAdd(InternalParticle o) { return true; }
    }

    public static final class NeighborClusterPredicate
    implements KDTreePredicate<ClusterInfo> {
        private long seedCluster;
        private Cluster cluster;

        public void set(Cluster c) {
            this.seedCluster = c.getClusterID();
            this.cluster = c;
        }

        @Override
        public boolean evalSkip(ClusterInfo o) { return false; }

        @Override
        public boolean evalMatch(ClusterInfo o) {
            return seedCluster != o.getClusterID()
                    && ! cluster.isMapped( o.getCluster() );
        }

        @Override
        public boolean evalAdd(ClusterInfo o) {
            return true;
        }
    }

    static class Bound {
        final Point3D mp;
        final Point3D Mp;

        Bound(Point3D mp,Point3D Mp) {
            this.mp = mp; this.Mp = Mp;
        }

        public Point3D getMinBound() { return mp; }
        public Point3D getMaxBound() { return Mp; }
    }

    static class ClusterInfo
    extends Point3D
    implements Iterable<InternalParticle>,KDTreeNode<ClusterInfo> {
        final Cluster cluster;
        ClusterInfo l;
        ClusterInfo r;

        ClusterInfo(Cluster c,IPoint3D p) {
            super(p);
            cluster = c;
        }

        public boolean isLeaf() { return l == null && r == null; }
        public ClusterInfo getLeft() { return l; }
        public ClusterInfo getRight() { return r; }
        public void setLeft(ClusterInfo o) { l = o; }
        public void setRight(ClusterInfo o) { r = o; }

        public void leave() {}

        public Cluster getCluster() {
            return cluster;
        }
        public long getClusterID() { return cluster.getClusterID(); }
        public Iterator<InternalParticle> iterator() {
            return cluster.iterator();
        }

        @Override
        public int hashCode() { return cluster.hashCode(); }

        @Override
        public boolean equals(Object o) {
            if ( this == o ) {
                return true;
            } else if ( o instanceof ClusterInfo ) {
                return cluster == ((ClusterInfo)o).cluster;
            } else if ( o instanceof Cluster ) {
                return cluster == o;
            }
            return false;
        }

        @Override
        public String toString() {
            return String.format("[%s] - %d",super.toString(),cluster.getClusterID());
        }
    }

    static class Cluster implements Iterable<InternalParticle> {
        final long cid;   // cluster id
        Point3D mp; // minimum bound
        Point3D Mp; // maximum bound
        
        KDTree<InternalParticle> tree;
        InternalParticle[] particles;
        NodeList<InternalParticle> tmpParticles;

        Cluster(long cid) {
            this.cid = cid;
            mp = Point3D.getInfinity();
            Mp = Point3D.getNegInfinity();
            tmpParticles = new NodeList<InternalParticle>();
        }

        public void add(InternalParticle p) {
            float x = p.getX();
            float y = p.getY();
            float z = p.getZ();
            if ( x < mp.getX() ) mp.setX(x);
            if ( y < mp.getY() ) mp.setY(y);
            if ( z < mp.getZ() ) mp.setZ(z);
            if ( Mp.getX() < x ) Mp.setX(x);
            if ( Mp.getY() < y ) Mp.setY(y);
            if ( Mp.getZ() < z ) Mp.setZ(z);
            tmpParticles.add(p);
        }

        public void build() {
            particles = new InternalParticle[tmpParticles.size()];
            tmpParticles.toArray(particles);
            tmpParticles = null;
            tree = new KDTree<InternalParticle>(particles);
        }

        public Iterator<InternalParticle> iterator() {
            return Arrays.asList(particles).iterator();
        }

        public long getClusterID() { return cid; }
        public Point3D getMinBound() { return mp; }
        public Point3D getMaxBound() { return Mp; }
        public int size() { return particles.length; }
        public ClusterInfo[] getBoundingBox() {
            ClusterInfo[] info;

            if ( size() < 8 ) {
                info = new ClusterInfo[size()];
                int i = 0;
                for ( InternalParticle p : particles ) {
                    info[i++] = new ClusterInfo(this,p);
                }
            } else {
                float x = mp.getX(); float y = mp.getY(); float z = mp.getZ();
                float X = Mp.getX(); float Y = Mp.getY(); float Z = Mp.getZ();

                info = new ClusterInfo[] {
                    new ClusterInfo(this,mp),
                    new ClusterInfo(this,new Point3D(x,y,Z)),
                    new ClusterInfo(this,new Point3D(x,Y,z)),
                    new ClusterInfo(this,new Point3D(x,Y,Z)),
                    new ClusterInfo(this,new Point3D(X,y,z)),
                    new ClusterInfo(this,new Point3D(X,y,Z)),
                    new ClusterInfo(this,new Point3D(X,Y,z)),
                    new ClusterInfo(this,Mp)
                };
            }

            return info;
        }

        public Bound getOverlap(Cluster c,float R) {
            return new Bound(
                    new Point3D(
                            Math.max( mp.getX()-R, c.mp.getX()-R ),
                            Math.max( mp.getY()-R, c.mp.getY()-R ),
                            Math.max( mp.getZ()-R, c.mp.getZ()-R )
                    ),
                    new Point3D(
                            Math.min( Mp.getX()+R, c.Mp.getX()+R ),
                            Math.min( Mp.getY()+R, c.Mp.getY()+R ),
                            Math.min( Mp.getZ()+R, c.Mp.getZ()+R )
                    ));
        }

        public void fetchBoundaryParticles(Bound b,float R,Collection<InternalParticle> buf) {
            tree.range(b.getMinBound(),b.getMaxBound(),R,null,buf);
        }

        public boolean neighborExist(IPoint3D p,float R,RangeExistPredicate pred) {
            return tree.rangeExist(p,R,pred);
        }
        
        // closure maintenance
        Cluster parent;
        
        public Cluster getRoot() {
        	if ( parent == null ) return this;
        	else if ( parent.parent == null ) return parent;
        	ArrayList<Cluster> path = new ArrayList<Cluster>(16);
        	Cluster node = this;
        	while ( node.parent != null ) {
        		path.add(node);
        		node = node.parent;
        	}
        	for ( Cluster c : path ) {
        		c.parent = node;
        	}
        	return node;
        }
        
        public void merge(Cluster o) {
        	Cluster r1 = getRoot();
        	Cluster r2 = o.getRoot();
        	if ( r1.cid < r2.cid ) {
        		r2.parent = r1;
        	} else {
        		r1.parent = r2;
        	}
        }
        
        public boolean isMapped(Cluster o) {
        	Cluster r1 = getRoot();
        	Cluster r2 = o.getRoot();
        	return r1 == r2;
        }
    }


    public static class dFoFMergeMapper
    extends MuxMapper<ByteWritable,MuxData> {

        List<MergeDataIterator<ByteWritable,MuxData>> dataInputs = new ArrayList<MergeDataIterator<ByteWritable,MuxData>>(2);

        @Override
        protected void setup(Context context)
        throws IOException, InterruptedException {
            super.setup(context);
            setupCounters(context,2);

            Configuration conf = context.getConfiguration();

            cube = new Cube2( conf.get(PARTITION_SPEC_ATTR) );
            
            System.out.println(cube);

            // if prefix bit is set, 
            //   override the cube
            //   get subpartition information

            // get partition id
            RADIUS = conf.getFloat("dfof.params.eps",0.00026042f);

            dataInputs.add(new TaskContextDataIterator<ByteWritable,MuxData>(context));

            // open the other input file
            InputFormat<ByteWritable,MuxData> format = null;
            try {
                format = (InputFormat<ByteWritable,MuxData>)ReflectionUtils.newInstance(context.getInputFormatClass(),conf);
            } catch ( ClassNotFoundException ex ) {
                throw new IOException("Failed to load input format class",ex);
            }

            String[] inputs = conf.getStrings("skewreduce.merge.extraInput");
            FileSystem fs = FileSystem.get(conf);

            for ( String input : inputs ) {
                Path path = new Path(input);
                FileStatus stat = fs.getFileStatus(path);
                FileSplit split = new FileSplit(path,0,stat.getLen(),null);

                RecordReader<ByteWritable,MuxData> otherInput = format.createRecordReader(split,context);
                otherInput.initialize(split,context);

                dataInputs.add(new RecordReaderDataIterator<ByteWritable,MuxData>(otherInput));
            }
        }

        Map<Long,Long> oldMappings = new HashMap<Long,Long>();
        Map<Long,Long> newMappings = new HashMap<Long,Long>();
        NodeList<InternalParticle> extraParticles = new NodeList<InternalParticle>();

        HashMap<Long,Cluster> clusters = new HashMap<Long,Cluster>();
        KDTree<ClusterInfo> clusterTree;
        Cube2 cube;
        float RADIUS;

        private void merge(Context context) {
            // loop 2 end
            System.err.println(clusters.size() + " clusters were loaded");
            NodeList<ClusterInfo> tmpClusterList = new NodeList<ClusterInfo>();
            beginLoop(context,clusters.size());
            for ( Cluster c : clusters.values() ) {
                c.build();  // build KD-tree
                // append bounding box information
                for ( ClusterInfo i : c.getBoundingBox() ) {
                    tmpClusterList.add(i);
                }
                incrLoop(context);
            }
            endLoop(context);

            ClusterInfo[] ci = new ClusterInfo[tmpClusterList.size()];
            tmpClusterList.toArray(ci);
            tmpClusterList = null;
            clusterTree = new KDTree<ClusterInfo>(ci);


            HashSet<ClusterInfo> q = new HashSet<ClusterInfo>();
            ArrayList<InternalParticle> q2 = new ArrayList<InternalParticle>();

            RangeExistPredicate rangeExistPred = new RangeExistPredicate();
            rangeExistPred.setRadius(RADIUS);

            NeighborClusterPredicate neighborPred = new NeighborClusterPredicate();

            beginLoop(context, clusters.size() );
            for ( Cluster c : clusters.values() ) {
                // collect all neighboring potentially connecting clusters
                q.clear();
                neighborPred.set(c);

                // FIXME: should reduce the size
                clusterTree.range(
                        c.getMinBound(),c.getMaxBound(),RADIUS,neighborPred,q
                );

                /*
                if ( LOG.isDebugEnabled() ) {
                    LOG.debug(String.format("%d (%d;%s;%s) : %d neighbors",c.getClusterID(),c.size(),c.getMinBound(),c.getMaxBound(),q.size()));
                }
                */

                // returned clusters are all neighboring clusters
                int con = 0;
                for ( ClusterInfo info : q ) {
                    Cluster x = info.getCluster();
                    if ( c.isMapped(x) ) continue;

                    // FIXME: did we compared it before? -- check false neighbors
                    Bound bound = c.getOverlap(x,RADIUS);
                    Cluster probe = ( c.size() > x.size() ) ? x : c;
                    Cluster build = ( probe == x ) ? c : x;

                    q2.clear();
                    probe.fetchBoundaryParticles(bound,RADIUS,q2);
                    if ( q2.isEmpty() ) {
                        // FIXME: there is no particle in overlapped region.
                        // false neighbor. add to the list, continue
                        continue;
                    }

                    boolean connected = false;
                    for ( InternalParticle p : q2 ) {
                        rangeExistPred.set(p);

                        // check whether there is a particle within the range
                        if ( build.neighborExist(p,RADIUS,rangeExistPred) ) {
                            connected = true;
                            // if so, the two clusters are connected
                            break;
                        }
                    }

                    if ( connected ) {
                        //LOG.info(String.format("<%d,%d>",c.getClusterID(),x.getClusterID()));
                        // add new mapping
                        ++con;
                        c.merge(x);
                    }
                }

                incrLoop(context);
            }
            endLoop(context);
        }

        // to state
        //   output new mapping table
        // to disk
        //   filter larger state

        private void writeStateMapping(Context context)
        throws IOException,InterruptedException {
            LongWritable oldid = new LongWritable();
            LongWritable newid = new LongWritable();

            // first update all new mappings
            beginLoop(context, clusters.size());
            for ( Map.Entry<Long,Cluster> e : clusters.entrySet() ) {
            	Cluster c = e.getValue();
            	Cluster r = c.getRoot();
            	
            	if ( c == r ) continue; // not mapped
            	
                long ogid = c.cid;
                long ngid = r.cid;

                oldid.set(ogid);
                newid.set(ngid);

                newMappings.put(ogid,ngid);
                write(context,0,oldid,newid);
                
                incrLoop(context);
            }
            endLoop(context);

            LOG.info("{} mappings were found. {} mappings from previous merge.",newMappings.size(),oldMappings.size());
            LOG.info("after merge, there are {} distinct groups",new HashSet<Long>(newMappings.values()).size());

            // now write-out old mappings

            Set<Map.Entry<Long,Long>> entrySet = oldMappings.entrySet();
            beginLoop(context, entrySet.size());
            for ( Map.Entry<Long,Long> mapping : entrySet ) {
                Long newId = newMappings.get(mapping.getValue());
                if ( newId == null ) {
                    newid.set(mapping.getValue());
                } else {
                    newid.set(newId);
                }
                oldid.set(mapping.getKey());

                write(context,0,oldid,newid);

                incrLoop(context);
            }
            endLoop(context);
        }

        private void writeNextMerge(Context context) throws IOException,InterruptedException {
            LongWritable key = new LongWritable();

            beginLoop(context, clusters.size() );
            for ( Cluster c : clusters.values() ) {
                for ( InternalParticle p : c ) {
                    if ( cube.atSkin(p,RADIUS) ) {
                        long g = p.getCluster();
                        if ( newMappings.containsKey(g) ) {
                            p.setCluster(newMappings.get(g));
                        }
                        key.set( p.getID() );
                        write(context,1,key,p);
                    }
                }
                incrLoop(context);
            }
            endLoop(context);
            
            LOG.info("{} particles did not participate in this merge",extraParticles.size());
            
            beginLoop(context, extraParticles.size() );
            for ( InternalParticle p : extraParticles ) {
                if ( cube.atSkin(p,RADIUS) ) {
                    long g = p.getCluster();
                    if ( newMappings.containsKey(g) ) {
                        p.setCluster(newMappings.get(g));
                    }
                    key.set( p.getID() );
                    write(context,1,key,p);
                }
                incrLoop(context);
            }
            endLoop(context);
        }

        private LongWritable buf = new LongWritable();

        protected void addState(MuxData record) throws IOException {
            // accumulate input
            record.getKey(buf);
            long oldCid = buf.get();
            record.getValue(buf);
            long newCid = buf.get();
            oldMappings.put(oldCid,newCid);
        }

        protected void addOutput(MuxData record) throws IOException {
            InternalParticle p = new InternalParticle();
            record.getKey(buf);
            record.getValue(p);

            if ( cube.atMergeSurface(p, RADIUS) ) {
	            Cluster c = clusters.get(p.getCluster());
	            if ( c == null ) {
	                c = new Cluster(p.getCluster());
	                clusters.put(p.getCluster(),c);
	            }
	            c.add(p);
            } else {
            	extraParticles.add(p);
            }
        }

        @Override
        public void run(Context context) 
        throws IOException, InterruptedException {
            setup(context);

            beginLoop(context);
            for ( MergeDataIterator<ByteWritable,MuxData> i : dataInputs ) {
                while ( i.nextKeyValue() ) {
                    byte stream = i.getCurrentKey().get();
                    MuxData record = i.getCurrentValue();
                    if ( stream == 0 ) { // state
                        addState(record);
                    } else if ( stream == 1 ) { // output
                        addOutput(record);
                    } else {
                        throw new IOException("Unidentified stream: "+stream);
                    }
                    incrLoop(context);
                }
            }
            endLoop(context);

            // do merge
            merge(context);

            // output
            writeStateMapping(context);
            writeNextMerge(context);

            cleanup(context);
        }

        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            for ( MergeDataIterator<?,?> i : dataInputs ) {
                i.close();
            }
            super.cleanup(context);
        }
    }

    protected Job createJob(Configuration conf) throws IOException {
        Job job = new Job(conf);

        job.setJarByClass(dFoFMerge3.class);

        //job.setInputFormatClass(SequenceFileInputFormat.class);
        job.setInputFormatClass(MergeInputFormat.class);
        job.setOutputFormatClass(MuxOutputFormat.class);

        job.setOutputKeyClass(ByteWritable.class);
        job.setOutputValueClass(MuxData.class);

        job.setMapperClass(dFoFMergeMapper.class);
        job.setNumReduceTasks(0);

        job.getConfiguration().setInt("skewreduce.monitoring.num.loops",7);

        return job;
    }

    public static Job getJobInstance(Configuration conf) throws IOException {
    	return new dFoFMerge3().createJob(conf);
    }
}
