package org.myorg;
/*
 * Compare democratic model's PGA with serial GA
 */

import java.io.*;
import java.util.*;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.FSDataInputStream;


/*import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.BlockLocation;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;

import org.apache.hadoop.mapred.FileInputFormat;*/
/*import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;*/
import org.myorg.GA;



public class CMMSGA {

	private static final Log LOG = LogFactory.getLog(PGAmain.class);

	public static String hp = "/home/stanley/Documents/workspace/hadoop/hadoop-0.20.2";
	public static String srp = hp + "/res/sres";
	public static String mrp = hp + "/res/mres";

	public static int geneLength = 1000;
	public static int peopleNum = 100;
	public static double p_cross = 0.5;
	public static double p_mut = 0.5;
	public static int i_num = 3000;
	public static long rt = 120000000;

	public static int map_number = 2;
	public static int ln=1;



	public static class MPGAInputFormat extends 
	FileInputFormat<IntWritable,ObjectWritable>{

			@Override
			public InputSplit[] getSplits(JobConf job, int numSplits)
					throws IOException {
				// TODO Auto-generated method stub
				FileStatus[] files = listStatus(job);
			    // generate splits
			    ArrayList<FileSplit> splits = new ArrayList<FileSplit>(files.length);
			    
			    for (FileStatus file: files) {
			        Path path = file.getPath();
			        FileSystem fs = path.getFileSystem(job);
			        long length = file.getLen();
			       
			        //Create empty hosts array for zero length files
			        splits.add(new FileSplit(path, 0, length, new String[0]));
			      }
			      LOG.info("stanley Total # of splits: " + splits.size());
			      return splits.toArray(new FileSplit[splits.size()]);
		
			}

			@Override
			public RecordReader<IntWritable, ObjectWritable> getRecordReader(
					InputSplit split, JobConf job, Reporter reporter)
					throws IOException {
				// TODO Auto-generated method stub
				FileSplit fsplit = (FileSplit)split;
				final Path file = fsplit.getPath();
			    // open the file and seek to the start of the split
			    FileSystem fs = file.getFileSystem(job);
			    FSDataInputStream fileIn = fs.open(fsplit.getPath());
			    final ObjectInputStream object = new ObjectInputStream(fileIn);
			    
				final int map_c = job.getInt("PGA.job.map.count", 1);
				if (map_c < 0) throw new IOException("Invalid map count: " + map_c);
/*				//for debug
				try {
					FPeople p = (FPeople)object.readObject();
					LOG.info("stanley test" + p.p_num);
				} catch (ClassNotFoundException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}*/
				
				return new RecordReader<IntWritable, ObjectWritable>() {
					private boolean is = true;
					
					public boolean next(IntWritable key,  ObjectWritable value)
					throws IOException {
						//key.set(Integer.parseInt(file.getName()));
						if (is) {
							key.set(1);
							try {
								value.set(object.readObject());
							} catch (ClassNotFoundException e) {
								// TODO Auto-generated catch block
								e.printStackTrace();
							}
							is = false;
							return true;
						} else 
							return false;
						//return false;
					}
					
					public IntWritable createKey() { return new IntWritable(); }
					
					public ObjectWritable createValue() { return new ObjectWritable(); }
					
					public long getPos() throws IOException { return 0; }
					
					public void close() throws IOException { }
					
					public float getProgress() throws IOException {
						return 1;
					}

				};
			}
		
	}

	public static class MMap extends MapReduceBase implements
	Mapper<IntWritable,ObjectWritable, Text, ObjectWritable> {
		private static final Log LOG = LogFactory.getLog(MMap.class);
		public void map(IntWritable key, ObjectWritable value,
				OutputCollector<Text, ObjectWritable> output, Reporter reporter)
		throws IOException {
			//负责计算
			LOG.error("enter mapper");
			FPeople res = (FPeople)value.get();

			res.caculateFitness(new FF1());

			Text person = new Text("1");

			output.collect(person, new ObjectWritable(new FPWritable(res)));
		}
	}

	public static class MReduce extends MapReduceBase implements
	Reducer<Text, ObjectWritable, NullWritable, ObjectWritable> {
		private static final Log LOG = LogFactory.getLog(MReduce.class);
		public void reduce(Text key, Iterator<ObjectWritable> values,
				OutputCollector<NullWritable, ObjectWritable> output, Reporter reporter)
		throws IOException {

			//进化
			FPeople res = null;
			while(values.hasNext()) {
				res = (FPeople)values.next().get();
			}

			output.collect(NullWritable.get(), new ObjectWritable(res));
		}
	}

	public static void master_slave(String inputp, String outp) {

		//set hadoop task
		JobConf conf = new JobConf(CMMSGA.class);
		conf.setJobName("Master-slave");

		//control map's number
		//conf.setNumMapTasks(map_number);
		conf.setNumReduceTasks(0);

		/*		conf.setMaxMapAttempts(100);
		conf.set("mapred.skip.attempts.to.start.skipping", "100");
		conf.set("mapred.task.profile.maps","0-100");
		conf.set("mapred.tasktracker.map.tasks.maximum","8");
		conf.set("mapred.reduce.max.attempts","100");
		conf.set("mapred.tasktracker.reduce.tasks.maximum","8");
		conf.set("mapred.jobtracker.taskScheduler.maxRunningTasksPerJob", "100");*/
		
		conf.set("mapred.job.tracker", "local");
		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(ObjectWritable.class);

		conf.setMapperClass(MMap.class);
		//conf.setCombinerClass(DReduce.class);
//		conf.setReducerClass(MReduce.class);

		conf.setInputFormat(MPGAInputFormat.class);
		//conf.setOutputFormat(TextOutputFormat.class);

		FileInputFormat.setInputPaths(conf, new Path(inputp));
		FileOutputFormat.setOutputPath(conf, new Path(outp));

		long t1 = System.currentTimeMillis();
		//运行测试内容
		try {
			JobClient.runJob(conf);
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		long t2 = System.currentTimeMillis();

		LOG.info(t2-t1);


/*		//将运算结果从hdfs中取出
		File deleFile;
		String dpath = hp + "/res/part-00000";
		try {
			deleFile = new File(dpath);
			// 如果文本文件存在则删除它
			if (deleFile.exists() == true) {
				deleFile.delete();
			}
		} catch (Exception ex) {
			System.out.println(ex.getMessage());
		}

		try {
			String cmds = hp + "/bin/hadoop fs " +
			"-get /user/stanley/output/part-00000 " +
			hp + "/res";
			p = Runtime.getRuntime().exec(cmds);
		} catch (IOException e) {
			e.printStackTrace();
		}
		try {
			if (p.waitFor() == 0) {
				LOG.info("3 Command execute result is OK!");
			} else {
				LOG.info("3 Command execute result is fail......");
			}
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}*/
/*		//选择最好的解		
		//将解的结果添加到一个新的文件
		try {
			RWF.wf(String.valueOf(RWF.rf()),drp);
		} catch (NumberFormatException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}*/
	}

	public static void serial() {
		double[] low = new double[1000];
		double[] high = new double[1000];

		for (int i_pad = 0; i_pad < 1000; i_pad++) {
			low[i_pad] = -100;
			high[i_pad] = 100;
		}


		long st = System.currentTimeMillis();
		FGA.ga(geneLength, low, high, peopleNum, p_cross, p_mut, i_num, new FF1(),rt);
		long rt = System.currentTimeMillis() - st;
		System.out.println("run time is " + rt);

		FGene gene = FGA.best_people.getPersons()[FGA.best_people.getBest(new FF1())];
		RWF.wf(String.valueOf(gene.fitness),srp);
	}

	public static void main(String[] args) throws Exception {
		LOG.info("f Enter main");

		//删除Master 的运行结果，因为要进行新的运算
		File res = null; 

		//删除Serial的运行结果
		res = new File(srp);
		if (res.exists()) {
			res.delete();
		}
		
		//prepare initial data of master-slave mode
		/*
		 * file_number = people_number/map_number
		 */
		for (int i = 0; i < map_number; i++) {
			res = new File(mrp + "/" + i);
			if (res.exists()) {
				res.delete();
			}
			
			
			//generate people
			double[] low = new double[geneLength];
			double[] high = new double[geneLength];

			for (int j = 0; j < geneLength; j++) {
				low[j] = -100;
				high[j] = 100;
			}
			
			FPeople my_p = new FPeople(Math.round(peopleNum/map_number),geneLength,low, high);
			RWF.wp(my_p, mrp + "/" + i);
		}
		
		//delete existing input and output directory
		RWF.delf(args[0]);
		RWF.delf(args[1]);
		
		// move mrp to /user/stanley/input
		RWF.inf(mrp, args[0]);
		
		//循环多次
		for (int i = 0; i < ln; i++) {
			//启动并行
			master_slave(args[0],args[1]);
			//启动串行运行
			//serial();
			System.out.println(i + " Game over");
		}
		//求出最好、最差、平均解 
		LOG.info("f Leave main");
	}

}

