package edu.cornell.cs.lsi.mapreduce.pass2;

import java.io.IOException;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import edu.cornell.cs.lsi.mapreduce.pass1.DoublePoint;

public class BoundaryComponents {

    public static class CombineToOneKeyMapper extends Mapper<LongWritable, Text, IntWritable, Text> {
        private static IntWritable oneKey = new IntWritable(0);

        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            // send all points to the same reducer iteration
            context.write(oneKey, value);
        }
    }

    public static class BoundaryComponentsReducer extends Reducer<IntWritable, Text, Text, Text> {
        private static Comparator<DoublePoint> comp = new Comparator<DoublePoint>() {
            public int compare(DoublePoint point1, DoublePoint point2) {
                // only care if x and y are equal, other ordering doesnt matter
                // because just looking up points
                int compareVal = 0;
                if (point1.getX() < point2.getX()) {
                    compareVal = -1;
                } else if (point1.getX() > point2.getX()) {
                    compareVal = 1;
                } else if (point1.getY() < point2.getY()) {
                    compareVal = -1;
                } else if (point1.getY() > point2.getY()) {
                    compareVal = 1;
                }
                return compareVal;
            }
        };

        private static DoublePoint getPoint(String xStr, String yStr, TreeSet<DoublePoint> vertices) {
            DoublePoint point = new DoublePoint(Double.parseDouble(xStr), Double.parseDouble(yStr));
            DoublePoint alreadyCreatedPoint = vertices.floor(point);
            if (point.equals(alreadyCreatedPoint)) {
                point = alreadyCreatedPoint;
            } else {
                // have not seen this point yet
                vertices.add(point);
            }
            return point;
        }

        public void reduce(IntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            Set<DoublePoint> boundaryVertices = new HashSet<DoublePoint>();
            TreeSet<DoublePoint> vertices = new TreeSet<DoublePoint>(comp);

            for (Text value : values) {
                String[] vertexParentPair = value.toString().trim().split("[\\s]+");
                DoublePoint vertex = getPoint(vertexParentPair[0], vertexParentPair[1], vertices);
                // only output boundary vertices
                boundaryVertices.add(vertex);
                DoublePoint parent = getPoint(vertexParentPair[2], vertexParentPair[3], vertices);
                vertex.union(parent);
            }
            for (DoublePoint vertex : boundaryVertices) {
                context.write(new Text(vertex.toString()), new Text(vertex.find().toString()));
            }
        }
    }

    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
        Job job = new Job();
        job.setJobName("BoundaryComponents");
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);
        job.setJarByClass(BoundaryComponents.class);
        job.setMapperClass(CombineToOneKeyMapper.class);
        job.setReducerClass(BoundaryComponentsReducer.class);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        job.setNumReduceTasks(1);
        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));
        job.waitForCompletion(false);
    }
}
