package skewreduce.dfof;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;

import skewreduce.framework.MuxData;
import skewreduce.framework.MuxOutputFormat;
import skewreduce.framework.MuxReducer;
import skewreduce.framework.physical.PMergeOp;
import skewreduce.lib.Cube2;
import skewreduce.lib.IPoint3D;
import skewreduce.lib.KDTree;
import skewreduce.lib.KDTreeNode;
import skewreduce.lib.KDTreePredicate;
import skewreduce.lib.NodeList;
import skewreduce.lib.Point3D;




public class dFoFMerge2 extends PMergeOp {
    private static final Log LOG = LogFactory.getLog(dFoFMerge.class);

    public static final class RangeExistPredicate
    implements KDTreePredicate<InternalParticle> {
        private InternalParticle seed;
        private double radius;

        public void setRadius(double r) { radius = r; }
        public void set(InternalParticle seed) { this.seed = seed; }

        @Override
        public boolean evalSkip(InternalParticle o) { return false; }

        @Override
        public boolean evalMatch(InternalParticle o) {
            return seed != o && seed.distance(o) < radius;
        }

        @Override
        public boolean evalAdd(InternalParticle o) { return true; }
    }

    static final class Closure {
        HashSet<Long> entries = new HashSet<Long>();
        long id = -1;

        /**
         * take union of this closure and c and share the set object
         */
        public void merge(Closure c) {
            entries.addAll(c.entries);
            c.entries = this.entries;
        }

        public void add(long l) {
            entries.add(l);
        }

        public long getID() {
            if ( id < 0 ) {
                id = Collections.min(entries);
            }
            return id;
        }

        public boolean contains(long l) { return entries.contains(l); }

        public int size() { return entries.size(); }
    }

    static class ClosureTracker implements Iterable<Map.Entry<Long,Closure>> {
        HashMap<Long,Closure> closures=new HashMap<Long,Closure>();

        public Closure getClosure(long l) {
            Closure c = closures.get(l);
            if ( c == null ) {
                c = new Closure();
                c.add(l);
                closures.put(l,c);
            }
            return c;
        }
        public void map(long oldId,long newId) {
            Closure oldc = getClosure(oldId);
            Closure newc = closures.get(newId);
            if ( newc == null ) {
                oldc.add(newId);
                closures.put(newId,oldc);
            } else {
                oldc.merge(newc); // the hash entry will be updated
            }
        }

        public boolean isMapped(long oldId,long newId) {
            return getClosure(oldId).contains(newId);
        }

        public Iterator<Map.Entry<Long,Closure>> iterator() {
            return closures.entrySet().iterator();
        }

        public int size() { return closures.size(); }
    }

    public static final class NeighborClusterPredicate
    implements KDTreePredicate<ClusterInfo> {
        private long seedCluster;
        private Closure closure;
        private ClosureTracker tracker;

        public void set(Cluster c) {
            this.seedCluster = c.getClusterID();
            this.closure = tracker.getClosure(seedCluster);
        }
        public void setTracker(ClosureTracker ct) { tracker = ct; }

        @Override
        public boolean evalSkip(ClusterInfo o) { return false; }

        @Override
        public boolean evalMatch(ClusterInfo o) {
            return seedCluster != o.getClusterID()
                    && ! closure.contains(o.getClusterID());
        }

        @Override
        public boolean evalAdd(ClusterInfo o) {
            return true;
        }
    }

    static class Bound {
        final Point3D mp;
        final Point3D Mp;

        Bound(Point3D mp,Point3D Mp) {
            this.mp = mp; this.Mp = Mp;
        }

        public Point3D getMinBound() { return mp; }
        public Point3D getMaxBound() { return Mp; }
    }

    static class ClusterInfo
    extends Point3D
    implements Iterable<InternalParticle>,KDTreeNode<ClusterInfo> {
        final Cluster cluster;
        ClusterInfo l;
        ClusterInfo r;

        ClusterInfo(Cluster c,IPoint3D p) {
            super(p);
            cluster = c;
        }

        public boolean isLeaf() { return l == null && r == null; }
        public ClusterInfo getLeft() { return l; }
        public ClusterInfo getRight() { return r; }
        public void setLeft(ClusterInfo o) { l = o; }
        public void setRight(ClusterInfo o) { r = o; }

        public void leave() {}

        public Cluster getCluster() {
            return cluster;
        }
        public long getClusterID() { return cluster.getClusterID(); }
        public Iterator<InternalParticle> iterator() {
            return cluster.iterator();
        }

        @Override
        public int hashCode() { return cluster.hashCode(); }

        @Override
        public boolean equals(Object o) {
            if ( this == o ) {
                return true;
            } else if ( o instanceof ClusterInfo ) {
                return cluster == ((ClusterInfo)o).cluster;
            } else if ( o instanceof Cluster ) {
                return cluster == o;
            }
            return false;
        }

        @Override
        public String toString() {
            return String.format("[%s] - %d",super.toString(),cluster.getClusterID());
        }
    }

    static class Cluster implements Iterable<InternalParticle> {
        final long cid;   // cluster id
        Point3D mp; // minimum bound
        Point3D Mp; // maximum bound

        KDTree<InternalParticle> tree;
        InternalParticle[] particles;
        NodeList<InternalParticle> tmpParticles;

        Cluster(long cid) {
            this.cid = cid;
            mp = Point3D.getInfinity();
            Mp = Point3D.getNegInfinity();
            tmpParticles = new NodeList<InternalParticle>();
        }

        public void add(InternalParticle p) {
            float x = p.getX();
            float y = p.getY();
            float z = p.getZ();
            if ( x < mp.getX() ) mp.setX(x);
            if ( y < mp.getY() ) mp.setY(y);
            if ( z < mp.getZ() ) mp.setZ(z);
            if ( Mp.getX() < x ) Mp.setX(x);
            if ( Mp.getY() < y ) Mp.setY(y);
            if ( Mp.getZ() < z ) Mp.setZ(z);
            tmpParticles.add(p);
        }

        public void build() {
            particles = new InternalParticle[tmpParticles.size()];
            tmpParticles.toArray(particles);
            tmpParticles = null;
            tree = new KDTree<InternalParticle>(particles);
        }

        public Iterator<InternalParticle> iterator() {
            return Arrays.asList(particles).iterator();
        }

        public long getClusterID() { return cid; }
        public Point3D getMinBound() { return mp; }
        public Point3D getMaxBound() { return Mp; }
        public int size() { return particles.length; }
        public ClusterInfo[] getBoundingBox() {
            ClusterInfo[] info;

            if ( size() < 8 ) {
                info = new ClusterInfo[size()];
                int i = 0;
                for ( InternalParticle p : particles ) {
                    info[i++] = new ClusterInfo(this,p);
                }
            } else {
                float x = mp.getX(); float y = mp.getY(); float z = mp.getZ();
                float X = Mp.getX(); float Y = Mp.getY(); float Z = Mp.getZ();

                info = new ClusterInfo[] {
                    new ClusterInfo(this,mp),
                    new ClusterInfo(this,new Point3D(x,y,Z)),
                    new ClusterInfo(this,new Point3D(x,Y,z)),
                    new ClusterInfo(this,new Point3D(x,Y,Z)),
                    new ClusterInfo(this,new Point3D(X,y,z)),
                    new ClusterInfo(this,new Point3D(X,y,Z)),
                    new ClusterInfo(this,new Point3D(X,Y,z)),
                    new ClusterInfo(this,Mp)
                };
            }

            return info;
        }

        public Bound getOverlap(Cluster c,float R) {
            return new Bound(
                    new Point3D(
                            Math.max( mp.getX()-R, c.mp.getX()-R ),
                            Math.max( mp.getY()-R, c.mp.getY()-R ),
                            Math.max( mp.getZ()-R, c.mp.getZ()-R )
                    ),
                    new Point3D(
                            Math.min( Mp.getX()+R, c.Mp.getX()+R ),
                            Math.min( Mp.getY()+R, c.Mp.getY()+R ),
                            Math.min( Mp.getZ()+R, c.Mp.getZ()+R )
                    ));
        }

        public void fetchBoundaryParticles(Bound b,float R,Collection<InternalParticle> buf) {
            tree.range(b.getMinBound(),b.getMaxBound(),R,null,buf);
        }

        public boolean neighborExist(IPoint3D p,float R,RangeExistPredicate pred) {
            return tree.rangeExist(p,R,pred);
        }
    }


    public static class dFoFMergeReducer
    extends MuxReducer<ByteWritable,MuxData> {
        @Override
        protected void setup(Context context)
        throws IOException, InterruptedException {
            super.setup(context);
            setupCounters(context,2);

            Configuration conf = context.getConfiguration();

            // setup multiple output
            /*
            LocalFileSystem fs = FileSystem.getLocal(conf);
            final Path pPath = new Path("partitionInfo");
            System.err.println(pPath + " exists? " + fs.exists(pPath) );
            FSDataInputStream fileIn = fs.open(pPath);
            cube = Cube2.read(fileIn);
            fileIn.close();
            */

            cube = new Cube2( conf.get(PARTITION_SPEC_ATTR) );

            // if prefix bit is set, 
            //   override the cube
            //   get subpartition information

            // get partition id
            RADIUS = conf.getFloat("dfof.params.eps",0.00026042f);
        }

        Map<Long,Long> oldMappings = new HashMap<Long,Long>();
        Map<Long,Long> newMappings = new HashMap<Long,Long>();

        // key: type, value: byte array (or writable)
        // if output, wrap it in small key?
        public void buildMappingState(Context context,Iterable<MuxData> muxData) throws IOException {
            LongWritable buf = new LongWritable();

            beginLoop(context);
            for ( MuxData record : muxData ) {
                record.getKey(buf);
                long oldCid = buf.get();
                record.getValue(buf);
                long newCid = buf.get();

                oldMappings.put(oldCid,newCid);
                incrLoop(context);
            }
            endLoop(context);
        }

        HashMap<Long,Cluster> clusters = new HashMap<Long,Cluster>();
        KDTree<ClusterInfo> clusterTree;
        Cube2 cube;
        float RADIUS;

        ClosureTracker closureTracker;

        public void buildKDTree(Context context,Iterable<MuxData> muxData) throws IOException {
            // TODO
            // for each cluster
            // - build KDTree
            // - build bounding box (min/max for each axis)

            LongWritable key = new LongWritable();

            // loop index incr
            // loop 2 begin
            beginLoop(context);
            for ( MuxData record : muxData ) {
                InternalParticle p = new InternalParticle();
                record.getKey(key);
                record.getValue(p);

                Cluster c = clusters.get(p.getCluster());
                if ( c == null ) {
                    c = new Cluster(p.getCluster());
                    clusters.put(p.getCluster(),c);
                }
                c.add(p);
                incrLoop(context);
            }
            endLoop(context);

            // loop 2 end
            System.err.println(clusters.size() + " clusters were loaded");
            NodeList<ClusterInfo> tmpClusterList = new NodeList<ClusterInfo>();
            beginLoop(context,clusters.size());
            for ( Cluster c : clusters.values() ) {
                c.build();  // build KD-tree
                // append bounding box information
                for ( ClusterInfo i : c.getBoundingBox() ) {
                    tmpClusterList.add(i);
                }
                incrLoop(context);
            }
            endLoop(context);

            ClusterInfo[] ci = new ClusterInfo[tmpClusterList.size()];
            tmpClusterList.toArray(ci);
            tmpClusterList = null;
            clusterTree = new KDTree<ClusterInfo>(ci);
        }

        private void merge(Context context) {
            HashSet<ClusterInfo> q = new HashSet<ClusterInfo>();
            ArrayList<InternalParticle> q2 = new ArrayList<InternalParticle>();
            closureTracker = new ClosureTracker();

            RangeExistPredicate rangeExistPred = new RangeExistPredicate();
            rangeExistPred.setRadius(RADIUS);

            NeighborClusterPredicate neighborPred = new NeighborClusterPredicate();
            neighborPred.setTracker(closureTracker);


            beginLoop(context, clusters.size() );
            for ( Cluster c : clusters.values() ) {
                // collect all neighboring potentially connecting clusters
                q.clear();
                neighborPred.set(c);

                // FIXME: should reduce the size
                clusterTree.range(
                        c.getMinBound(),c.getMaxBound(),RADIUS,neighborPred,q
                );

                /*
                if ( LOG.isDebugEnabled() ) {
                    LOG.debug(String.format("%d (%d;%s;%s) : %d neighbors",c.getClusterID(),c.size(),c.getMinBound(),c.getMaxBound(),q.size()));
                }
                */

                long cid = c.getClusterID();

                // returned clusters are all neighboring clusters
                int con = 0;
                for ( ClusterInfo info : q ) {
                    Cluster x = info.getCluster();
                    if ( closureTracker.isMapped(cid,x.getClusterID()) ) continue;

                    // FIXME: did we compared it before? -- check false neighbors
                    Bound bound = c.getOverlap(x,RADIUS);
                    Cluster probe = ( c.size() > x.size() ) ? x : c;
                    Cluster build = ( probe == x ) ? c : x;

                    q2.clear();
                    probe.fetchBoundaryParticles(bound,RADIUS,q2);
                    if ( q2.isEmpty() ) {
                        // FIXME: there is no particle in overlapped region.
                        // false neighbor. add to the list, continue
                        continue;
                    }

                    boolean connected = false;
                    for ( InternalParticle p : q2 ) {
                        rangeExistPred.set(p);

                        // check whether there is a particle within the range
                        if ( build.neighborExist(p,RADIUS,rangeExistPred) ) {
                            connected = true;
                            // if so, the two clusters are connected
                            break;
                        }
                    }

                    if ( connected ) {
                        //LOG.info(String.format("<%d,%d>",c.getClusterID(),x.getClusterID()));
                        // add new mapping
                        ++con;
                        closureTracker.map(c.getClusterID(),x.getClusterID());
                    }
                }

                incrLoop(context);
            }
            endLoop(context);
        }

        // to state
        //   output new mapping table
        // to disk
        //   filter larger state

        private void writeStateMapping(Context context)
        throws IOException,InterruptedException {
            LongWritable oldid = new LongWritable();
            LongWritable newid = new LongWritable();

            // first update all new mappings
            beginLoop(context, closureTracker.size());
            for ( Map.Entry<Long,Closure> c : closureTracker ) {
                long ogid = c.getKey();
                long ngid = c.getValue().getID();

                oldid.set(ogid);
                newid.set(ngid);

                if ( ogid != ngid ) {
                    newMappings.put(ogid,ngid);
                    write(context,0,oldid,newid);
                }
                incrLoop(context);
            }
            endLoop(context);

            LOG.info(String.format("%d mappings were found",newMappings.size()));

            // now write-out old mappings

            Set<Map.Entry<Long,Long>> entrySet = oldMappings.entrySet();
            beginLoop(context, entrySet.size());
            for ( Map.Entry<Long,Long> mapping : entrySet ) {
                Long newId = newMappings.get(mapping.getValue());
                if ( newId == null ) {
                    newid.set(mapping.getValue());
                } else {
                    newid.set(newId);
                }
                oldid.set(mapping.getKey());

                write(context,0,oldid,newid);

                incrLoop(context);
            }
            endLoop(context);
        }

        private void writeNextMerge(Context context) throws IOException,InterruptedException {
            LongWritable key = new LongWritable();

            beginLoop(context, clusters.size() );
            for ( Cluster c : clusters.values() ) {
                for ( InternalParticle p : c ) {
                    if ( cube.atSkin(p,RADIUS) ) {
                        long g = p.getCluster();
                        if ( newMappings.containsKey(g) ) {
                            p.setCluster(newMappings.get(g));
                        }
                        key.set( p.getID() );
                        write(context,1,key,p);
                    }
                }
                incrLoop(context);
            }
            endLoop(context);
        }

        @Override
        public void run(Context context) 
        throws IOException, InterruptedException {
            setup(context);

            while ( context.nextKey() ) {
                // handle appropriate state handler.
                ByteWritable stream = context.getCurrentKey();
                if ( stream.get() == 0 ) {  // state
                    System.err.println("loading state...");
                    buildMappingState(context, context.getValues() );
                } else if ( stream.get() == 1 ) { // output
                    System.err.println("loading kdtree...");
                    buildKDTree(context, context.getValues() );
                } else {
                    throw new IOException("Unidentified stream: "+stream.get());
                }
            }

            // do merge
            merge(context);

            // output
            writeStateMapping(context);
            writeNextMerge(context);

            cleanup(context);
        }

    }

    protected Job createJob(Configuration conf) throws IOException {
        Job job = new Job(conf);

        job.setJarByClass(dFoFMerge2.class);

        job.setInputFormatClass(SequenceFileInputFormat.class);
        job.setOutputFormatClass(MuxOutputFormat.class);

        job.setOutputKeyClass(ByteWritable.class);
        job.setOutputValueClass(MuxData.class);

        job.setMapperClass(Mapper.class);
        job.setReducerClass(dFoFMergeReducer.class);
        job.setNumReduceTasks(1);

        job.getConfiguration().setInt("skewreduce.monitoring.num.loops",7);

        return job;
    }

    public static Job getJobInstance(Configuration conf) throws IOException {
    	return new dFoFMerge2().createJob(conf);
    }
}
