package org.myorg.v4;

/*
 * Compare democratic model's PGA with serial GA
 */

import java.io.*;
import java.util.*;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FSInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.FSDataInputStream;


/*import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.BlockLocation;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;

import org.apache.hadoop.mapred.FileInputFormat;*/
/*import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;*/
import org.myorg.FGene;
import org.myorg.GA;



public class I4 extends Para4 {

	private static final Log LOG = LogFactory.getLog(I4.class);





	public static class IPGAInputFormat extends 
	FileInputFormat<IntWritable,FPeopleWritable4>{

		@Override
		public InputSplit[] getSplits(JobConf job, int numSplits)
		throws IOException {
			// TODO Auto-generated method stub
			FileStatus[] files = listStatus(job);
			// generate splits
			ArrayList<FileSplit> splits = new ArrayList<FileSplit>(files.length/imn);

			for (FileStatus file: files) {
				Path path = file.getPath();
				FileSystem fs = path.getFileSystem(job);
				long length = file.getLen();
				LOG.info("file length is " + length);

				//Create empty hosts array for zero length files
				for (int i = 0; i < imn; i++) {
					//long length = file.getLen();
					//LOG.info("put start is " + length * i/imn);
					splits.add(new FileSplit(path, length * i/imn, length/imn, new String[0]));
				}

			}
			LOG.info("stanley Total # of splits: " + splits.size());
			/*			      for (int t = 0; t < splits.size(); t++ ) {
			    	  LOG.info("start is " + splits.get(t).getStart());
			      }*/

			return splits.toArray(new FileSplit[splits.size()]);

		}

		@Override
		public RecordReader<IntWritable, FPeopleWritable4> getRecordReader(
				InputSplit split, JobConf job, Reporter reporter)
				throws IOException {
			// TODO Auto-generated method stub
			FileSplit fsplit = (FileSplit)split;
			final Path file = fsplit.getPath();
			// open the file and seek to the start of the split
			FileSystem fs = file.getFileSystem(job);
			final FSDataInputStream fileIn = fs.open(fsplit.getPath());
			fileIn.seek(fsplit.getStart());
			//final ObjectInputStream object = new ObjectInputStream(fileIn);
			//FPeopleWritable4 fp = new FPeopleWritable4(peopleNum/imn,geneLength);
			//fp.readFields(fileIn);
			LOG.info("XXX: " + fsplit.getStart());

			final int map_c = job.getInt("PGA.job.map.count", 1);
			if (map_c < 0) throw new IOException("Invalid map count: " + map_c);
			/*				//for debug
				try {
					FPeople4 p = (FPeople4)object.readObject();
					LOG.info("stanley test" + p.p_num);
				} catch (ClassNotFoundException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}*/

			return new RecordReader<IntWritable, FPeopleWritable4>() {
				private boolean is = true;

				public boolean next(IntWritable key,  FPeopleWritable4 value)
				throws IOException {
					//key.set(Integer.parseInt(file.getName()));
					if (is) {
						key.set(1);
						value.readFields(fileIn);
						is = false;
						return true;						
					} else {							
						return false;
					}


					//value.set(fp);

					//return count++ < map_c;

				}

				public IntWritable createKey() { return new IntWritable(); }

				public FPeopleWritable4 createValue() { 
					/*						FPeopleWritable4 p = new FPeopleWritable4(peopleNum/imn,geneLength);
						try {
							p.readFields(fileIn);
						} catch (IOException e) {
							// TODO Auto-generated catch block
							e.printStackTrace();
						} 
						return p;*/
					return new FPeopleWritable4(ipn/imn,igl);
				}

				public long getPos() throws IOException { return 0; }

				public void close() throws IOException { }

				public float getProgress() throws IOException {
					return 1;
				}

			};
		}

	}

	public static class IPGAOutputFormat extends FileOutputFormat<NullWritable,FPeopleWritable4> {


		@Override
		public RecordWriter<NullWritable,FPeopleWritable4> getRecordWriter(FileSystem ignored,
				JobConf job, String name, Progressable progress)
				throws IOException {
			// TODO Auto-generated method stub
			Path file = FileOutputFormat.getTaskOutputPath(job, name);
			FileSystem fs = file.getFileSystem(job);
			final FSDataOutputStream fileOut = fs.create(file, progress);

			//return new MyWriter(fileOut);
			return new RecordWriter<NullWritable, FPeopleWritable4>() {


				@Override
				public void write(NullWritable key,
						FPeopleWritable4 value) throws IOException {
					// TODO Auto-generated method stub
					//key.write(fileOut);
					value.write(fileOut);
				}

				@Override
				public void close(Reporter reporter) throws IOException {
					// TODO Auto-generated method stub
					fileOut.close();
				}

			};

		}

	}

	public static class IMap extends MapReduceBase implements
	Mapper<IntWritable,FPeopleWritable4, NullWritable, FPeopleWritable4> {
		//		private static final Log LOG = LogFactory.getLog(NIMap.class);
		public void map(IntWritable key, FPeopleWritable4 value,
				OutputCollector<NullWritable, FPeopleWritable4> output, Reporter reporter)
		throws IOException {

			FEvolution4 my_e = new FEvolution4(0.1, p_cross, p_mut);

			ifc.caculate(value.getPersons());
			
			for (int i = 0; i < moving_rate; i++) {
				my_e.select(value);
				//my_e.select(best_people);
				my_e.crossOver(value);
				my_e.mutation(value);
				ifc.caculate(value.getPersons());
				if ((1 - value.getBest().fitness) < 1.0E-5) {
					break;
				}
			}

			output.collect(NullWritable.get(), value);
		}
	}

	@SuppressWarnings("deprecation")
	public static class IReduce extends MapReduceBase implements
	Reducer<NullWritable, FPeopleWritable4, NullWritable, FPeopleWritable4> {
		private static final Log LOG = LogFactory.getLog(IReduce.class);
		public void reduce(NullWritable key, Iterator<FPeopleWritable4> values,
				OutputCollector<NullWritable, FPeopleWritable4> output, Reporter reporter)
		throws IOException {

			//moving
			FPeopleWritable4 res = new FPeopleWritable4();
			while(values.hasNext()) {
				res.add(values.next());
			}


			Arrays.sort(res.persons);

			FPeople4[] ap = res.seg(imn);
			FPeopleWritable4 np = new FPeopleWritable4(ap[ap.length - 1]);
			for (int i = 0; i < imn; i++) {
				output.collect(NullWritable.get(), np);
			}
		}
	}

	public static class RTest extends MapReduceBase implements
	Reducer<NullWritable, FPeopleWritable4, IntWritable, Text> {
		private static final Log LOG = LogFactory.getLog(RTest.class);
		public void reduce(NullWritable key, Iterator<FPeopleWritable4> values,
				OutputCollector<IntWritable, Text> output, Reporter reporter)
		throws IOException {

			//进化
			FPeopleWritable4 res = null;
			IntWritable IntTest = new IntWritable(0);
			Text text = new Text("no input");
			int count = 0;
			String s = "";
			while(values.hasNext()) {

				//res = new FPeopleWritable4(values.next());
				IntTest.set(1);
				//text.set("input" + new FPeopleWritable4(values.next()).p_num);
				s = s + "input" + values.next().getPersons()[0].fitness + count;
				//text.set("input" + values.next().getPersons()[0].fitness + count);
				count++;
			}
			text.set(s);
			output.collect(IntTest,text);

		}
	}

	public static FGene4 get_best_recorder() {
		double rt = Double.NEGATIVE_INFINITY;
		FGene4 gene = null;

		Configuration conf = new Configuration();
		FileSystem fs = null;
		try {
			fs = FileSystem.get(conf);
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		Path f = new Path(iin + "/0");
		FSDataInputStream in = null;
		try {
			in = fs.open(f);
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}


		//选择最好的解		
		//将解的结果添加到一个新的文件

		FPeopleWritable4 ap = new FPeopleWritable4();
		long length = 0;

		try {
			length = in.available();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		LOG.info("file length is " + length);


//		for (int i = 0; i < imn; i++) {
			ap = new FPeopleWritable4();
			//LOG.info("put start is " + length * i/mmn);

			try {
				ap.readFields(in);
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}

			for(int t = 0; t < ap.p_num; t++) {
				LOG.info("individ " + t + " is " + ap.personAt(t).gene + 
						" fitness is " + ap.personAt(t).fitness);
			}

			LOG.info("best " + ap.getBest().gene + " and fitness is " + ap.getBest().getFitness());

/*			if (gene != null) {
				if (gene.fitness > ap.getBest().fitness)
					gene = new FGene4(ap.getBest());
			} else {*/
				gene = new FGene4(ap.getBest());
/*			}*/
/*			try {
				in.seek(length * (i + 1)/imn);
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
//			}

		}*/


		return gene;
	}

	public static String island(String value, String time) throws InterruptedException {

		String cmd;


		long r_t = 0;
		double c_b = 0;
		double best = Double.NEGATIVE_INFINITY;
		String it = "0";

		boolean[] mark = new boolean[imag.length];
		for (boolean bm : mark) {
			bm = false;
		}

		int count = 0;

		for (int i = 0; i < ii_num; i++ ) {

			long t1 = System.currentTimeMillis();
			cmd = hp + "/bin/hadoop fs -rmr " + iout;
			RWF4.execmd(cmd);
			//set hadoop task
			JobConf conf = new JobConf(I4.class);
			conf.setJobName("I4");

			//control map's number
			conf.setNumReduceTasks(1);

			conf.set("mapred.child.java.opts","-Xmx3000m");
			conf.set("mapred.reduce.max.attempts", "100");
			conf.set("mapred.task.timeout", "0");

			conf.setOutputKeyClass(NullWritable.class);
			conf.setOutputValueClass(FPeopleWritable4.class);

			conf.setMapOutputKeyClass(NullWritable.class);
			conf.setMapOutputValueClass(FPeopleWritable4.class);

			//conf.setOutputKeyClass(IntWritable.class);
			//conf.setOutputValueClass(Text.class);

			conf.setMapperClass(IMap.class);
			conf.setReducerClass(IReduce.class);
			//conf.setReducerClass(RTest.class);

			conf.setInputFormat(IPGAInputFormat.class);

			conf.setOutputFormat(IPGAOutputFormat.class);
			//conf.setOutputFormat(MapFileOutputFormat.class);


			FileInputFormat.setInputPaths(conf, new Path(iin));
			FileOutputFormat.setOutputPath(conf, new Path(iout));


			//运行测试内容
			try {
				JobClient.runJob(conf);
			} catch (IOException e1) {
				// TODO Auto-generated catch block
				e1.printStackTrace();
			}
			long t3 = System.currentTimeMillis();

			//delete old input file
			cmd = hp + "/bin/hadoop fs -rmr " + iin + "/*";
			RWF4.execmd(cmd);

			//mv new people to input file
			cmd = hp + "/bin/hadoop fs -mv " + iout + "/part-00000 " 
			+ iin + "/0";
			RWF4.execmd(cmd);

			//record running time
			LOG.info(t3-t1);
			r_t = r_t + (t3 -t1);
			
			//Thread.sleep(3000);

			//for record running time, iterator time and best value
			// c_b current best record
			FGene4 gene = get_best_recorder();

			c_b = gene.getFitness();

			LOG.info(i + " the current best value is " + c_b);
/*			if( c_b > best ) {

				best = c_b;
				double b_v = ifc.org_value(gene);
				LOG.info(i + " the global best value is " + b_v);

				int is_r = ifc.is_r(gene);
				if (1 == is_r || 0 == is_r) {
					re = re + "\t" + r_t  + "\t" + (i+1)*ipn*moving_rate + "\t" + b_v;

					if (0 == is_r ) {
						break;
					}
				} 
			}*/
			
			value = value + "\t" + -c_b;
			time = time + "\t" + r_t;
			it = it + "\t" + (i + 1)*moving_rate;
			
			if((1 - c_b) < 1.0E-05) {
				break;
			}

		}

		return value + "\n" + time + "\n" + it + "\n";
	}

	public FPeopleWritable4 FPeopleWritable4(FPeopleWritable4 value) {
		// TODO Auto-generated method stub
		return null;
	}



	public static void main(String[] args) throws Exception {
		LOG.info("f Enter main");
		
		for (int i=0; args != null && i < args.length; i++) {
			if (args[i].equalsIgnoreCase("-mn") || args[i].equalsIgnoreCase("-map_num")) {
				i++;
				imn = Integer.parseInt(args[i]);
				System.out.println("map number is: " + args[i]);
			} else if (args[i].equalsIgnoreCase("-ln") ||args[i].equalsIgnoreCase("-loop_num")) {
				i++;
				iln = Integer.parseInt(args[i]);
				System.out.println("loop number is: " + args[i]);
			} else {
				System.out.println("Usage: -mn[map_num] 10 -ln[loop_num] 25");
				System.exit(-1);
			}
        }   
		
		String r = "";
		String value = "";
		String time = "";
		String it = "";

		String cmd = "mkdir -p ./res";
		RWF4.execmd(cmd);


		File res = null; 

		/*		res = new File(irp + "/i_r");
		if (res.exists()) {
			res.delete();
		}*/
		String mark = RWF4.mkMark("./res/imark", ifc);
		mark = mark + "_" + iln + "_" + imn + "_" + moving_rate;
		
		//input file
		iin = iin + "/" + mark;
		//output file
		iout = iout + "/" + mark;

		//result file
		//irp = irp;

		cmd = "mkdir -p " + irp;
		RWF4.execmd(cmd);

		/*		cmd = hp + "/bin/hadoop fs -mkdir " + iin;
		RWF4.execmd(cmd);*/


		//循环多次
		for (int i = 0; i < iln; i++) {

			//ifc.init();
			long start_time = System.currentTimeMillis();
			FPeopleWritable4 all_p = new FPeopleWritable4(ipn,igl);
			ifc.caculate(all_p.getPersons());
			value = Double.toString(-all_p.getBest().fitness);  
			time = Long.toString(System.currentTimeMillis() - start_time);
			
			FPeople4[] ap = all_p.seg(imn);

			Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(conf);
			Path f = new Path(iin + "/0");			
			FSDataOutputStream out = fs.create(f, true, 0);
			for (int j = 0; j < imn; j++) {
				new FPeopleWritable4(ap[j]).write(out);
			}
			out.close();
			//启动并行
			r = island(value, time);

			RWF4.wlf(r.getBytes(), irp + "/i_r" + mark);

			System.out.println("I4 " + i + " Game over");
		}


		//求出最好、最差、平均解 
		LOG.info("f Leave main");
	}

}


