package org.myorg;
/*
 * Compare democratic model's PGA with serial GA
 */

import java.io.*;
import java.util.*;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

import org.myorg.GA;

/*
 * Test serial and democratic standalone GA
 * 
 * compare the running time, including wall clock and iterator times
 * so, mapper's output is solving quality, wall clock and iterator times
 * 
 * experiment repeat 25, show the best. worst and mean time *  
 * 
 * mapper's number from 2 to 16
 * 
 */


public class TSDS {

	private static final Log LOG = LogFactory.getLog(PGAmain.class);

	public static String hp = "/home/stanley/Documents/workspace/hadoop/hadoop-0.20.2";
	public static String srp = hp + "/res/sres";
	public static String drp = hp + "/res/dres";

	public static int geneLength = 1000;
	public static int peopleNum = 20;
	public static double p_cross = 0.5;
	public static double p_mut = 0.5;
	public static int i_num = 300000000;
	public static long rt = 120000000;

	public static int map_number = 4;
	public static int ln=2;
	
	public static double[] mag = {1.0E11, 1.0E10, 1.0E09,1.0E08,1.0E07 };


	public static class DPGASplit implements InputSplit {
		public void write(DataOutput out) throws IOException { }
		public void readFields(DataInput in) throws IOException { }
		public long getLength() { return 0L; }
		public String[] getLocations() { return new String[0]; }
	}

	public static class DPGAInputFormat extends Configured
	implements InputFormat<IntWritable,IntWritable> {
		
		public InputSplit[] getSplits(JobConf conf, int numSplits) {
			numSplits = map_number;
			InputSplit[] ret = new InputSplit[numSplits];
			for (int i = 0; i < numSplits; ++i) {
				ret[i] = new DPGASplit();
			}
			return ret;
		}
		
		public RecordReader<IntWritable,IntWritable> getRecordReader(
				InputSplit ignored, JobConf conf, Reporter reporter)
				throws IOException {

			final int map_c = conf.getInt("PGA.job.map.count", 1);
			if (map_c < 0) throw new IOException("Invalid map count: " + map_c);

			final int red_c = conf.getInt("PGA.job.reduce.count", 1);
			if (red_c < 0) throw new IOException("Invalid map count: " + red_c);

			return new RecordReader<IntWritable,IntWritable>() {
				private int count = 0;
				
				public boolean next(IntWritable key, IntWritable value)
				throws IOException {

					key.set(count);
					value.set(count);

					return count++ < map_c;
				}
				
				public IntWritable createKey() { return new IntWritable(); }
				
				public IntWritable createValue() { return new IntWritable(); }
				
				public long getPos() throws IOException { return count; }
				
				public void close() throws IOException { }
				
				public float getProgress() throws IOException {
					return count / ((float)map_c);
				}

			};
		}
	}



	public static class DMap extends MapReduceBase implements
	Mapper<IntWritable, IntWritable, Text, DoubleWritable> {
		//private static final Log LOG = LogFactory.getLog(Reduce.class);
		public void map(IntWritable key, IntWritable value,
				OutputCollector<Text, DoubleWritable> output, Reporter reporter)
		throws IOException {

			double[] low = new double[1000];
			double[] high = new double[1000];

			for (int i = 0; i < 1000; i++) {
				low[i] = -100;
				high[i] = 100;
			}

			String res = 
				FGA.ga(geneLength, low, high, peopleNum, p_cross, p_mut, i_num, new FF1(),mag);
			if (res == "") {
				res = "no value";
			}

			FGene gene = FGA.best_people.getPersons()[FGA.best_people.getBest(new FF1())];

			Text person = new Text(res);
			DoubleWritable fitness = new DoubleWritable(gene.fitness);
			output.collect(person, fitness);
		}
	}

	public static class DReduce extends MapReduceBase implements
	Reducer<Text, DoubleWritable, Text, DoubleWritable> {
		private static final Log LOG = LogFactory.getLog(DReduce.class);
		public void reduce(Text key, Iterator<DoubleWritable> values,
				OutputCollector<Text, DoubleWritable> output, Reporter reporter)
		throws IOException {

			output.collect(key, new DoubleWritable(values.next().get()));
		}
	}

	public static void democratic(String outp) {
		//删除hadoop的运行目录,否则hadoop会提示错误
		Process p = null;
		try {
			String cmds = hp + "/bin/hadoop fs -rmr /user/stanley/output";
			p = Runtime.getRuntime().exec(cmds);
		} catch (IOException e) {
			e.printStackTrace();
		}
		try {
			if (p.waitFor() == 0) {
				LOG.info("Command execute result is OK!");
			} else {
				LOG.info("Command execute result is fail......");
			}
		} catch (InterruptedException e2) {
			// TODO Auto-generated catch block
			e2.printStackTrace();
		}

		//set hadoop task
		JobConf conf = new JobConf(Democratic.class);
		conf.setJobName("Democratic");

		//control map's number
		//conf.setNumMapTasks(map_number);
		conf.setNumReduceTasks(0);
		
		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(DoubleWritable.class);

		conf.setMapperClass(DMap.class);
		//conf.setCombinerClass(DReduce.class);
		//conf.setReducerClass(DReduce.class);

		conf.setInputFormat(DPGAInputFormat.class);
		conf.setOutputFormat(TextOutputFormat.class);


		FileOutputFormat.setOutputPath(conf, new Path(outp));

		long t1 = System.currentTimeMillis();
		//运行测试内容
		try {
			JobClient.runJob(conf);
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		long t2 = System.currentTimeMillis();

		LOG.info(t2-t1);


		//将运算结果从hdfs中取出
		File deleFile;
		String dpath = hp + "/res/part-00000";
		try {
			deleFile = new File(dpath);
			// 如果文本文件存在则删除它
			if (deleFile.exists() == true) {
				deleFile.delete();
			}
		} catch (Exception ex) {
			System.out.println(ex.getMessage());
		}

		try {
			String cmds = hp + "/bin/hadoop fs " +
			"-get /user/stanley/output/part-00000 " +
			hp + "/res";
			p = Runtime.getRuntime().exec(cmds);
		} catch (IOException e) {
			e.printStackTrace();
		}
		try {
			if (p.waitFor() == 0) {
				LOG.info("3 Command execute result is OK!");
			} else {
				LOG.info("3 Command execute result is fail......");
			}
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		//选择最好的解		
		//将解的结果添加到一个新的文件

		RWF.wf(RWF.rfs(hp + "/res/part-00000").toString(),drp);
		
/*		String cmd = "cat " + hp + "/res/part-00000 >> " + drp; 
		RWF.execmd(cmd);*/

	}
	
	public static void serial() {
		double[] low = new double[1000];
		double[] high = new double[1000];

		for (int i_pad = 0; i_pad < 1000; i_pad++) {
			low[i_pad] = -100;
			high[i_pad] = 100;
		}


		long st = System.currentTimeMillis();
		String res = FGA.ga(geneLength, low, high, peopleNum, p_cross, p_mut, i_num, new FF1(),mag);
		long rt = System.currentTimeMillis() - st;
		System.out.println("run time is " + rt);

		FGene gene = FGA.best_people.getPersons()[FGA.best_people.getBest(new FF1())];
		RWF.wf(res,srp);
	}

	public static void main(String[] args) throws Exception {
		LOG.info("f Enter main");

		//删除Democratic 的运行结果，因为要进行新的运算
		File res = new File(drp);
		if (res.exists()) {
			res.delete();
		}

		//删除Serial的运行结果
		res = new File(srp);
		if (res.exists()) {
			res.delete();
		}

		//循环多次
		for (int i = 0; i < ln; i++) {
			//启动并行
			democratic(args[0]);
			//启动串行运行
			serial();
			System.out.println(i + " Game over");
		}
		//求出最好、最差、平均解 
		LOG.info("f Leave main");
	}

}


