package org.myorg;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.*;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

import org.myorg.GA;

public class Island {
	
	private static final Log LOG = LogFactory.getLog(PGAmain.class);
	public static class PGASplit implements InputSplit {
		public void write(DataOutput out) throws IOException { }
		public void readFields(DataInput in) throws IOException { }
		public long getLength() { return 0L; }
		public String[] getLocations() { return new String[0]; }
	}

	public static class PGAInputFormat extends Configured
	implements InputFormat<IntWritable,IntWritable> {
		public InputSplit[] getSplits(JobConf conf, int numSplits) {
			InputSplit[] ret = new InputSplit[numSplits];
			for (int i = 0; i < numSplits; ++i) {
				ret[i] = new PGASplit();
			}
			return ret;
		}
		public RecordReader<IntWritable,IntWritable> getRecordReader(
				InputSplit ignored, JobConf conf, Reporter reporter)
				throws IOException {

			final int map_c = conf.getInt("PGA.job.map.count", 1);
			if (map_c < 0) throw new IOException("Invalid map count: " + map_c);

			final int red_c = conf.getInt("PGA.job.reduce.count", 1);
			if (red_c < 0) throw new IOException("Invalid map count: " + red_c);

			return new RecordReader<IntWritable,IntWritable>() {
				private int count = 0;
				public boolean next(IntWritable key, IntWritable value)
				throws IOException {

					key.set(count);
					value.set(count);

					return count++ < map_c;
				}
				public IntWritable createKey() { return new IntWritable(); }
				public IntWritable createValue() { return new IntWritable(); }
				public long getPos() throws IOException { return count; }
				public void close() throws IOException { }
				public float getProgress() throws IOException {
					return count / ((float)map_c);
				}

			};
		}
	}


	public static class Map extends MapReduceBase implements
	Mapper<IntWritable, IntWritable, Text, DoubleWritable> {
		//private static final Log LOG = LogFactory.getLog(Reduce.class);
		public void map(IntWritable key, IntWritable value,
				OutputCollector<Text, DoubleWritable> output, Reporter reporter)
		throws IOException {

			LOG.info("Reduce");
			GA.ga();
			Text person = new Text(GA.best_people.getPeople()[GA.best_p]);
			DoubleWritable fitness = new DoubleWritable(GA.best_v);
			output.collect(person, fitness);
		}
	}

	public static class Reduce extends MapReduceBase implements
	Reducer<Text, DoubleWritable, Text, DoubleWritable> {
		private static final Log LOG = LogFactory.getLog(Reduce.class);
		public void reduce(Text key, Iterator<DoubleWritable> values,
				OutputCollector<Text, DoubleWritable> output, Reporter reporter)
		throws IOException {

			System.out.println("Reduce");
			LOG.info("Reduce");
			LOG.error("test");
			output.collect(key, new DoubleWritable(values.next().get()));
		}
	}

	public static void main(String[] args) throws Exception {
		LOG.info("Enter main");
		JobConf conf = new JobConf(NewWC.class);
		conf.setJobName("PGA");

		conf.setNumMapTasks(1);
		conf.setNumReduceTasks(1);

		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(DoubleWritable.class);

		conf.setMapperClass(Map.class);
		conf.setCombinerClass(Reduce.class);
		conf.setReducerClass(Reduce.class);

		conf.setInputFormat(PGAInputFormat.class);
		conf.setOutputFormat(TextOutputFormat.class);

		/*		FileInputFormat.setInputPaths(conf, new Path(args[0]));*/
		FileOutputFormat.setOutputPath(conf, new Path(args[1]));

		
		//FileOutputFormat.setOutputPath(conf, new Path("/user/stanley/output"+ i));
		JobClient.runJob(conf);
		
		
		LOG.info("Leave main");
	}
}

