package edu.cornell.cs.lsi.mapreduce.pass3;

import static edu.cornell.cs.lsi.mapreduce.Constants.COLUMN_COUNT;
import static edu.cornell.cs.lsi.mapreduce.Constants.G;
import static edu.cornell.cs.lsi.mapreduce.Constants.Lmax_SQUARED;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.SortedSet;
import java.util.StringTokenizer;
import java.util.TreeSet;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import edu.cornell.cs.lsi.mapreduce.Constants;
import edu.cornell.cs.lsi.mapreduce.pass1.DoublePoint;

public class ComputeComponents {
    private static final String THIRD_PASS = "third pass";

    public static class Map extends Mapper<Object, Text, Text, Text> {

        /**
         * Overriding the map() method. The mapper passes the input points as it is to the reducer. input to the mapper in the form: <x1><space><y1><space><x2><space><y2><space>
         */
        @Override
        public void map(Object key, Text value, Context context) throws IOException, InterruptedException {

            // read the line and tokenize
            String line = value.toString();
            StringTokenizer tokenizer = new StringTokenizer(line);
            List<String> tokens = new ArrayList<String>();
            while (tokenizer.hasMoreTokens()) {
                tokens.add(tokenizer.nextToken());
            }
            if (tokens.size() == 3) {
                // this if from the production file with the vertices.
                double x = Double.parseDouble(tokens.get(0));
                double y = Double.parseDouble(tokens.get(1));
                double weight = Double.parseDouble(tokens.get(2));

                if (Constants.MIN_WEIGHT <= weight && weight <= Constants.MAX_WEIGHT) {
                    double colNumDouble = x / G;
                    int colNumInt = (int) colNumDouble;
                    double offset = x - ((double) colNumInt) * G;
                    Text pointText = new Text(x + " " + y);
                    if (offset >= Constants.BORDERLESS_COL_WIDTH && colNumInt < COLUMN_COUNT - 1) {
                        // it is in the left boundary of the next column or on the current right boundary
                        context.write(new Text(String.valueOf(colNumInt + 1)), pointText);
                    } else if (colNumDouble == Math.ceil(colNumDouble) && colNumInt > 0) {
                        // right boundary of the previous column or on the current left boundary
                        context.write(new Text(String.valueOf(colNumInt - 1)), pointText);
                    }
                    context.write(new Text(String.valueOf(colNumInt)), pointText);
                }
            } else {
                for (int count = 0; count < COLUMN_COUNT; count++) {
                    Text outputKey = new Text(String.valueOf(count));
                    Text outputValue = new Text(tokens.get(0) + " " + tokens.get(1) + " " + tokens.get(2) + " " + tokens.get(3));
                    context.write(outputKey, outputValue);
                }
            }
        }
    }

    public static class Reduce extends Reducer<Text, Text, Text, Text> {

        private static Comparator<DoublePoint> comp = new Comparator<DoublePoint>() {
            public int compare(DoublePoint point1, DoublePoint point2) {
                // only care if x and y are equal, other ordering doesnt matter
                // because just looking up points
                int compareVal = 0;
                if (point1.getX() < point2.getX()) {
                    compareVal = -1;
                } else if (point1.getX() > point2.getX()) {
                    compareVal = 1;
                } else if (point1.getY() < point2.getY()) {
                    compareVal = -1;
                } else if (point1.getY() > point2.getY()) {
                    compareVal = 1;
                }
                return compareVal;
            }
        };

        private static DoublePoint getPoint(String xStr, String yStr, TreeSet<DoublePoint> vertices) {
            DoublePoint point = new DoublePoint(Double.parseDouble(xStr), Double.parseDouble(yStr));
            DoublePoint alreadyCreatedPoint = vertices.floor(point);
            if (point.equals(alreadyCreatedPoint)) {
                point = alreadyCreatedPoint;
            } else {
                // have not seen this point yet
                vertices.add(point);
            }
            return point;
        }

        /**
         * Override the Reducer method.
         */
        @Override
        public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            TreeSet<DoublePoint> vertices = new TreeSet<DoublePoint>(comp);
            List<DoublePoint> allVertices = new ArrayList<DoublePoint>();

            // read the key and tokenize to extract the key vertex
            for (Text value : values) {
                StringTokenizer tokenizer = new StringTokenizer(value.toString());
                List<String> tokens = new ArrayList<String>();
                while (tokenizer.hasMoreTokens()) {
                    tokens.add(tokenizer.nextToken());
                }
                // populate the vertices in the set
                if (tokens.size() == 2) {
                    // this is a vertex
                    DoublePoint vertex = getPoint(tokens.get(0), tokens.get(1), vertices);
                    allVertices.add(vertex);
                } else {
                    // populate the edges from the second pass in the UF table
                    String[] vertexParentPair = value.toString().trim().split("[\\s]+");
                    DoublePoint vertex = getPoint(vertexParentPair[0], vertexParentPair[1], vertices);
                    DoublePoint parent = getPoint(vertexParentPair[2], vertexParentPair[3], vertices);
                    vertex.union(parent);
                }
            }

            // compute the edges between the single vertices...
            int colNum = Integer.parseInt(key.toString());
            TreeSet<DoublePoint> tree;
            int seekIndex;
            int remIndex;
            DoublePoint curPoint;
            SortedSet<DoublePoint> pointsInSpan;

            DoublePoint minPoint;
            DoublePoint maxPoint;
            double leftColBoundary = ((double) (colNum)) * G;
            double rightColBoundary = leftColBoundary + Constants.BORDERLESS_COL_WIDTH;

            Comparator<DoublePoint> yComp = new Comparator<DoublePoint>() {

                @Override
                public int compare(DoublePoint arg0, DoublePoint arg1) {
                    if (arg0.getY() < arg1.getY())
                        return -1;
                    if (arg0.getY() > arg1.getY())
                        return 1;
                    if (arg0.getX() < arg1.getX())
                        return -1;
                    if (arg0.getX() > arg1.getX())
                        return 1;
                    return 0;
                }

            };

            Comparator<DoublePoint> xComp = new Comparator<DoublePoint>() {

                @Override
                public int compare(DoublePoint arg0, DoublePoint arg1) {
                    if (arg0.getX() < arg1.getX())
                        return -1;
                    if (arg0.getX() > arg1.getX())
                        return 1;
                    if (arg0.getY() < arg1.getY())
                        return -1;
                    if (arg0.getY() > arg1.getY())
                        return 1;
                    return 0;
                }

            };

            tree = new TreeSet<DoublePoint>(yComp);
            remIndex = 0;
            Collections.sort(allVertices, xComp);

            for (seekIndex = 0; seekIndex < allVertices.size(); seekIndex++) {
                curPoint = allVertices.get(seekIndex);
                minPoint = new DoublePoint(-1f, curPoint.getY() - Constants.Lmax);
                maxPoint = new DoublePoint(2f, curPoint.getY() + Constants.Lmax);
                pointsInSpan = tree.subSet(minPoint, maxPoint);

                for (DoublePoint pointInSpan : pointsInSpan) {
                    if (pointInSpan.withinRange(curPoint, Lmax_SQUARED)) {
                        pointInSpan.union(curPoint);
                    }
                }

                if (seekIndex + 1 < allVertices.size()) {
                    while (remIndex < seekIndex && allVertices.get(remIndex).getX() < curPoint.getX() - Constants.Lmax) {
                        tree.remove(allVertices.get(remIndex));
                        remIndex++;
                    }
                }
                tree.add(curPoint);
            }

            // emit the vertices.
            Text outputKey;
            Text outputValue;
            for (DoublePoint vertex : vertices) {
                if (vertex.getX() >= leftColBoundary - Constants.Lmax) {
                    if (vertex.getX() < rightColBoundary || colNum == COLUMN_COUNT - 1) {
                        outputKey = new Text(vertex.toString());
                        outputValue = new Text(vertex.find().toString());
                        context.write(outputKey, outputValue);
                    }
                }
            }
        }
    }

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        Job job = new Job(conf, THIRD_PASS);
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);
        job.setJarByClass(ComputeComponents.class);
        job.setMapperClass(Map.class);
        job.setReducerClass(Reduce.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        job.setNumReduceTasks(COLUMN_COUNT);
        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));
        if (args.length > 2) {
            FileInputFormat.addInputPath(job, new Path(args[2]));
        }
        job.waitForCompletion(false);
    }

}
