package edu.maxflow.mapreduce;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;

import edu.maxflow.Edge;
import edu.maxflow.Vertex;
import edu.maxflow.mapreduce.util.CounterEnum;
import edu.maxflow.mapreduce.util.MRConstants;

public class MapReduceJob {

	public static void main(String[] args) throws Exception {
		long startTime = System.currentTimeMillis();

		long maxFlow = 0;

		// default values. actually, ready from input args
		int sourceVertexId = 1;
		int targetVertexId = 10000;

		if (args.length > 1) {
			sourceVertexId = Integer.valueOf(args[0]);
			targetVertexId = Integer.valueOf(args[1]);
		}

		System.out.println("sourceVertexId : " + sourceVertexId);
		System.out.println("targetVertexId : " + targetVertexId);

		int round = 0;
		while (true) {
			Configuration conf = new Configuration();
			conf.set("fs.default.name", MRConstants.FILE_SYSTEM_DEFAULT_NAME);
			conf.setInt(MRConstants.ROUND, round);
			conf.setInt(MRConstants.SOURCE_VERTEX_ID, sourceVertexId);
			conf.setInt(MRConstants.TARGET_VERTEX_ID, targetVertexId);

			Job job = new Job(conf, "maximumflow");
			job.setJarByClass(MapReduceJob.class);

			if (round == 0) {
				// graphMapper will work. It will write vertex info from text
				// file to hdfs

				FileInputFormat.addInputPath(job, new Path(MRConstants.PATH_INPUT));

				// no need to reduce. this converts text file into our graph
				// model
				job.setNumReduceTasks(0);
				job.setMapperClass(GraphMapper.class);

				job.setInputFormatClass(TextInputFormat.class);
				job.setOutputFormatClass(SequenceFileOutputFormat.class);
			} else {
				// runs for rounds after 0, this will work. It will do
				// our main job
				FileInputFormat.addInputPath(job, new Path(MRConstants.PATH_OUTPUT + round));

				job.setInputFormatClass(SequenceFileInputFormat.class);
				job.setOutputFormatClass(SequenceFileOutputFormat.class);
				job.setMapperClass(MaxFlowMapper.class);
				job.setReducerClass(MaxFlowReducer.class);
			}

			FileOutputFormat.setOutputPath(job, new Path(MRConstants.PATH_OUTPUT + (round + 1)));

			job.setMapOutputKeyClass(LongWritable.class);
			job.setMapOutputValueClass(Vertex.class);

			job.setOutputKeyClass(LongWritable.class);
			job.setOutputValueClass(Vertex.class);

			job.waitForCompletion(true);

			// //// counter part /////
			Counters counters = job.getCounters();

			Counter sourceCounter = counters.findCounter(CounterEnum.SOURCE_MOVE);
			Counter sinkCounter = counters.findCounter(CounterEnum.SINK_MOVE);
			Counter maxFlowCounter = counters.findCounter(CounterEnum.MAXFLOW);

			Long sourceCounterValue = sourceCounter.getValue();
			Long sinkCounterValue = sinkCounter.getValue();
			Long maxFlowValue = maxFlowCounter.getValue();

			maxFlow += maxFlowValue;
			// ///////////////////////

			// break condition
			if (round > 0 && (sourceCounterValue == 0 || sinkCounterValue == 0)) {
				FileSystem fileSystem = FileSystem.get(conf);

				// some output info. It's better to remove this part before
				// prod.
				for (int i = 1; i <= round; i++) {
					System.out.println("ROUND " + i);
					System.out.println("----------------");

					FSDataInputStream inputStream = null;
					try {
						org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(MRConstants.PATH_AUGMENTED_EDGE + i);
						inputStream = fileSystem.open(path);

						int size = inputStream.readInt();
						System.out.println("Size : " + size);

						for (int j = 0; j < size; j++) {
							Edge edge = new Edge();
							edge.readFields(inputStream);

							if (edge.getFromVertexId() < edge.getToVertexId()) {
								System.out.println("Edge Flow " + edge.getFromVertexId() + "-" + edge.getToVertexId() + " : " + edge.getFlow());
							} else {
								System.out.println("Edge Flow " + edge.getToVertexId() + "-" + edge.getFromVertexId() + " : " + edge.getFlow());
							}
						}
					} catch (Exception e) {
						System.err.println("File read exception occured");
					} finally {
						if (inputStream != null) {
							inputStream.close();
						}
					}
				}

				// total time and maxflow outputs
				System.out.println("ROUND : " + round);
				System.out.println("TOTAL TIME : " + ((System.currentTimeMillis() - startTime) / 1000) + " seconds");
				System.out.println("MAXFLOW : " + maxFlow);

				break;
			}

			round++;
		}
	}
}
