package skewreduce.dfof;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;

import skewreduce.framework.MuxData;
import skewreduce.framework.MuxOutputFormat;
import skewreduce.framework.MuxReducer;
import skewreduce.framework.physical.PMergeOp;
import skewreduce.lib.Cube2;
import skewreduce.lib.KDTree;
import skewreduce.lib.KDTreePredicate;
import skewreduce.lib.NodeList;




public class dFoFMerge extends PMergeOp {
    private static final Log LOG = LogFactory.getLog(dFoFMerge.class);

    static class Predicate implements KDTreePredicate<InternalParticle> {
        private InternalParticle seed;
        private double radius;

        Predicate(double r) { radius = r; }

        public void set(InternalParticle seed) { this.seed = seed; }

        @Override
        public boolean evalSkip(InternalParticle o) {
            return o.canSkip();
        }

        @Override
        public boolean evalMatch(InternalParticle o) {
            return seed != o && seed.distance(o) < radius;
        }

        @Override
        public boolean evalAdd(InternalParticle o) {
            if ( o.isNotVisitedYet() ) {
                o.pending();
                return true;
            }
            return false;
        }
    }

    public static class dFoFMergeReducer
    extends MuxReducer<ByteWritable,MuxData> {
        @Override
        protected void setup(Context context)
        throws IOException, InterruptedException {
            super.setup(context);
            setupCounters(context,2);

            Configuration conf = context.getConfiguration();

            // setup multiple output
            /*
            LocalFileSystem fs = FileSystem.getLocal(conf);
            final Path pPath = new Path("partitionInfo");
            System.err.println(pPath + " exists? " + fs.exists(pPath) );
            FSDataInputStream fileIn = fs.open(pPath);
            cube = Cube2.read(fileIn);
            fileIn.close();
            */

            cube = new Cube2( conf.get(PARTITION_SPEC_ATTR) );

            // if prefix bit is set, 
            //   override the cube
            //   get subpartition information

            // get partition id
            RADIUS = conf.getFloat("dfof.params.eps",0.00026042f);

            pred = new Predicate(RADIUS);
        }

        Map<Long,Long> oldMappings = new HashMap<Long,Long>();
        Map<Long,Long> newMappings = new HashMap<Long,Long>();

        // key: type, value: byte array (or writable)
        // if output, wrap it in small key?
        public void buildMappingState(Context context,Iterable<MuxData> muxData) throws IOException {
            LongWritable buf = new LongWritable();

            beginLoop(context);
            for ( MuxData record : muxData ) {
                record.getKey(buf);
                long oldCid = buf.get();
                record.getValue(buf);
                long newCid = buf.get();

                oldMappings.put(oldCid,newCid);
                incrLoop(context);
            }
            endLoop(context);
        }

        InternalParticle[] particles;
        HashMap<Long,List<InternalParticle>> groups = new HashMap<Long,List<InternalParticle>>();
        KDTree<InternalParticle> tree;
        Predicate pred;
        Cube2 cube;
        float RADIUS;

        public void buildKDTree(Context context,Iterable<MuxData> muxData) throws IOException {
            //List<InternalParticle> tmpParticles = new ArrayList<InternalParticle>();
            NodeList<InternalParticle> tmpParticles = new NodeList<InternalParticle>();
            LongWritable key = new LongWritable();

            // loop index incr
            // loop 2 begin
            beginLoop(context);
            for ( MuxData record : muxData ) {
                InternalParticle p = new InternalParticle();
                record.getKey(key);
                record.getValue(p);

                List<InternalParticle> group = groups.get(p.getCluster());
                if ( group == null ) {
                    group = new ArrayList<InternalParticle>();
                    groups.put(p.getCluster(),group);
                }
                group.add(p);
                tmpParticles.add(p);
                // loop 2 incr
                incrLoop(context);
            }
            endLoop(context);
            // loop 2 end

            System.err.println(tmpParticles.size() + " particles were loaded");

            particles = new InternalParticle[tmpParticles.size()];
            tmpParticles.toArray(particles);
            tmpParticles = null;
            tree = new KDTree<InternalParticle>(particles);
        }

        private void merge(Context context) {
            HashSet<Long> gids = new HashSet<Long>();
            HashSet<Long> mappedgids = new HashSet<Long>();

            ArrayList<InternalParticle> q = new ArrayList<InternalParticle>( (int)(tree.size() * 0.2965f));

            // loop index incr
            // loop 3 N
            // loop 3 begin
            beginLoop(context, groups.values().size() );
            for ( List<InternalParticle> group : groups.values() ) {
                gids.clear();
                gids.add(group.get(0).getCluster());
                for ( InternalParticle seed : group ) {
                    if ( seed.isVisited() ) continue;
                    q.add(seed);
                    while ( ! q.isEmpty() ) {
                        InternalParticle p = q.remove(q.size()-1);
                        if ( p.isVisited() ) continue;
                        p.visit();
                        gids.add(p.getCluster());
                        pred.set(p);
                        tree.range(p,RADIUS,pred,q);
                    }
                }

                if ( gids.size() > 1 ) {
                    mappedgids.clear();
                    long newGid = Long.MAX_VALUE;
                    for ( long g : gids ) {
                        mappedgids.add(g);
                        if ( g < newGid ) newGid = g;
                        if ( newMappings.containsKey(g) ) {
                            long ngid = g;
                            do {
                                ngid = newMappings.get(ngid);
                                if ( ngid < newGid ) newGid = ngid;
                                mappedgids.add(ngid);
                            } while ( newMappings.containsKey(ngid) );
                        }
                    }
                    for ( long g : mappedgids ) {
                        if ( g > newGid )
                            newMappings.put(g,newGid);
                    }
                }

                incrLoop(context);
            }
            endLoop(context);
            // loop 3 end

            LOG.info(String.format("%d new mappings were found",newMappings.size()));
            // first phase is done
            // generate output value
        }

        // to state
        //   output new mapping table
        // to disk
        //   filter larger state

        private void writeStateMapping(Context context) throws IOException,InterruptedException {
            LongWritable oldid = new LongWritable();
            LongWritable newid = new LongWritable();

            ArrayList<Long> path = new ArrayList<Long>();

            // loop index incr
            // loop 4 N
            // loop 4 begin

            // first update all new mappings

            Set<Long> keySet = newMappings.keySet();

            beginLoop(context, keySet.size() );
            for ( long g : keySet ) {
                long ngid = newMappings.get(g);
                while ( newMappings.containsKey(ngid) ) {
                    path.add(ngid);
                    ngid = newMappings.get(ngid);
                }
                while ( ! path.isEmpty() ) {
                    newMappings.put( path.remove(path.size()-1), ngid );
                }

                oldid.set(g);
                newid.set(ngid);

                write(context,0,oldid,newid);
                incrLoop(context);
            }
            endLoop(context);
            // loop 4 end

            // now write-out old mappings
            // loop index incr
            // loop 5 N
            // loop 5 begin

            Set<Map.Entry<Long,Long>> entrySet = oldMappings.entrySet();
            beginLoop(context, entrySet.size());
            for ( Map.Entry<Long,Long> mapping : entrySet ) {
                Long newId = newMappings.get(mapping.getValue());
                if ( newId == null ) {
                    newid.set(mapping.getValue());
                } else {
                    newid.set(newId);
                }
                oldid.set(mapping.getKey());

                write(context,0,oldid,newid);

                incrLoop(context);
            }
            endLoop(context);
        }

        private void writeNextMerge(Context context) throws IOException,InterruptedException {
            LongWritable key = new LongWritable();

            beginLoop(context, particles.length);
            for ( InternalParticle p : particles ) {
                if ( cube.atSkin(p,RADIUS) ) {
                    long g = p.getCluster();
                    if ( newMappings.containsKey(g) ) {
                        p.setCluster(newMappings.get(g));
                    }
                    key.set( p.getID() );
                    write(context,1,key,p);
                }
                incrLoop(context);
            }
            endLoop(context);
        }

        @Override
        public void run(Context context) 
        throws IOException, InterruptedException {
            setup(context);

            while ( context.nextKey() ) {
                // handle appropriate state handler.
                ByteWritable stream = context.getCurrentKey();
                if ( stream.get() == 0 ) {  // state
                    System.err.println("loading state...");
                    buildMappingState(context, context.getValues() );
                } else if ( stream.get() == 1 ) { // output
                    System.err.println("loading kdtree...");
                    buildKDTree(context, context.getValues() );
                } else {
                    throw new IOException("Unidentified stream: "+stream.get());
                }
            }

            // do merge
            merge(context);

            // output
            writeStateMapping(context);
            writeNextMerge(context);

            cleanup(context);
        }

    }

    protected Job createJob(Configuration conf) throws IOException {
        Job job = new Job(conf);

        job.setJarByClass(dFoFMerge.class);

        job.setInputFormatClass(SequenceFileInputFormat.class);
        job.setOutputFormatClass(MuxOutputFormat.class);

        job.setOutputKeyClass(ByteWritable.class);
        job.setOutputValueClass(MuxData.class);

        job.setMapperClass(Mapper.class);
        job.setReducerClass(dFoFMergeReducer.class);
        job.setNumReduceTasks(1);

        job.getConfiguration().setInt("skewreduce.monitoring.num.loops",6);

        return job;
    }

    public static Job getJobInstance(Configuration conf) throws IOException {
    	return new dFoFMerge().createJob(conf);
    }
}
