package org.myorg;
/*
 * Compare democratic model's PGA with serial GA
 */

import java.io.*;
import java.util.*;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FSInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.FSDataInputStream;


/*import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.BlockLocation;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;

import org.apache.hadoop.mapred.FileInputFormat;*/
/*import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;*/
import org.myorg.GA;



public class NCMMSGA {

	private static final Log LOG = LogFactory.getLog(PGAmain.class);

	public static String hp = "/home/stanley/Documents/workspace/hadoop/hadoop-0.20.2";
	public static String srp = hp + "/res/sres";
	public static String mrp = hp + "/res/mres";

	public static int geneLength = 1000;
	public static int peopleNum = 100;
	public static double p_cross = 0.5;
	public static double p_mut = 0.5;
	public static int i_num = 3000;
	public static long rt = 120000000;

	public static int map_number = 4;
	public static int ln = 2;
	
	public static Fc function = new FF1(); 



	public static class NMPGAInputFormat extends 
	FileInputFormat<IntWritable,FPeopleWritable>{

			@Override
			public InputSplit[] getSplits(JobConf job, int numSplits)
					throws IOException {
				// TODO Auto-generated method stub
				FileStatus[] files = listStatus(job);
			    // generate splits
			    ArrayList<FileSplit> splits = new ArrayList<FileSplit>(files.length/map_number);
			    
			    for (FileStatus file: files) {
			        Path path = file.getPath();
			        FileSystem fs = path.getFileSystem(job);
			        long length = file.getLen();
			        LOG.info("file length is " + length);
			       
			        //Create empty hosts array for zero length files
			        for (int i = 0; i < map_number; i++) {
			        	//long length = file.getLen();
			        	LOG.info("put start is " + length * i/map_number);
			        	splits.add(new FileSplit(path, length * i/map_number, length/map_number, new String[0]));
			        }
			        
			      }
			      LOG.info("stanley Total # of splits: " + splits.size());
			      for (int t = 0; t < splits.size(); t++ ) {
			    	  LOG.info("start is " + splits.get(t).getStart());
			      }
			      
			      return splits.toArray(new FileSplit[splits.size()]);
		
			}

			@Override
			public RecordReader<IntWritable, FPeopleWritable> getRecordReader(
					InputSplit split, JobConf job, Reporter reporter)
					throws IOException {
				// TODO Auto-generated method stub
				FileSplit fsplit = (FileSplit)split;
				final Path file = fsplit.getPath();
			    // open the file and seek to the start of the split
			    FileSystem fs = file.getFileSystem(job);
			    final FSDataInputStream fileIn = fs.open(fsplit.getPath());
			    fileIn.seek(fsplit.getStart());
			    //final ObjectInputStream object = new ObjectInputStream(fileIn);
			   //FPeopleWritable fp = new FPeopleWritable(peopleNum/map_number,geneLength);
			   //fp.readFields(fileIn);
			    LOG.info("XXX: " + fsplit.getStart());
			    
				final int map_c = job.getInt("PGA.job.map.count", 1);
				if (map_c < 0) throw new IOException("Invalid map count: " + map_c);
/*				//for debug
				try {
					FPeople p = (FPeople)object.readObject();
					LOG.info("stanley test" + p.p_num);
				} catch (ClassNotFoundException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}*/
				
				return new RecordReader<IntWritable, FPeopleWritable>() {
					private boolean is = true;
					
					public boolean next(IntWritable key,  FPeopleWritable value)
					throws IOException {
						//key.set(Integer.parseInt(file.getName()));
						if (is) {
							key.set(1);
							value.readFields(fileIn);
							is = false;
							return true;						
						} else {							
							return false;
						}
							
						
						//value.set(fp);
						
						//return count++ < map_c;
						
					}
					
					public IntWritable createKey() { return new IntWritable(); }
					
					public FPeopleWritable createValue() { 
/*						FPeopleWritable p = new FPeopleWritable(peopleNum/map_number,geneLength);
						try {
							p.readFields(fileIn);
						} catch (IOException e) {
							// TODO Auto-generated catch block
							e.printStackTrace();
						} 
						return p;*/
						return new FPeopleWritable(peopleNum/map_number,geneLength);
					}
					
					public long getPos() throws IOException { return 0; }
					
					public void close() throws IOException { }
					
					public float getProgress() throws IOException {
						return 1;
					}

				};
			}
		
	}
	
	public static class NMPGAOutputFormat extends FileOutputFormat<NullWritable,FPeopleWritable> {


		@Override
		public RecordWriter<NullWritable,FPeopleWritable> getRecordWriter(FileSystem ignored,
				JobConf job, String name, Progressable progress)
				throws IOException {
			// TODO Auto-generated method stub
			Path file = FileOutputFormat.getTaskOutputPath(job, name);
			FileSystem fs = file.getFileSystem(job);
			final FSDataOutputStream fileOut = fs.create(file, progress);
			
			//return new MyWriter(fileOut);
			return new RecordWriter<NullWritable, FPeopleWritable>() {
				
		
				@Override
				public void write(NullWritable key,
						org.myorg.FPeopleWritable value) throws IOException {
					// TODO Auto-generated method stub
					//key.write(fileOut);
					value.write(fileOut);
				}

				@Override
				public void close(Reporter reporter) throws IOException {
					// TODO Auto-generated method stub
					fileOut.close();
				}
				
			};
			
		}
	
	}

	public static class NMMap extends MapReduceBase implements
	Mapper<IntWritable,FPeopleWritable, NullWritable, FPeopleWritable> {
//		private static final Log LOG = LogFactory.getLog(NMMap.class);
		public void map(IntWritable key, FPeopleWritable value,
				OutputCollector<NullWritable, FPeopleWritable> output, Reporter reporter)
		throws IOException {
			//负责计算
//			LOG.error("enter mapper");
			//FPeopleWritable res = FPeopleWritable(value);


			value.caculateFitness(function);
//			LOG.info(value.getBest(new FF1()));
			

//			FPeopleWritable cal	= new FPeopleWritable(value);

			output.collect(NullWritable.get(), value);
			//output.collect(person, new FPeopleWritable(res));
		}
	}

	@SuppressWarnings("deprecation")
	public static class NMReduce extends MapReduceBase implements
	Reducer<NullWritable, FPeopleWritable, NullWritable, FPeopleWritable> {
		private static final Log LOG = LogFactory.getLog(NMReduce.class);
		public void reduce(NullWritable key, Iterator<FPeopleWritable> values,
				OutputCollector<NullWritable, FPeopleWritable> output, Reporter reporter)
		throws IOException {

			//进化
			FPeopleWritable res = new FPeopleWritable();
			while(values.hasNext()) {
				res.add(values.next());
			}
			
			FEvolution my_e = new FEvolution(0.1, p_cross, p_mut, res, function);
			my_e.select();
			//my_e.select(best_people);
			my_e.crossOver();
			my_e.mutation();
			
			FPeople[] ap = res.seg(map_number);
			for (int i = 0; i < map_number; i++) {
				output.collect(NullWritable.get(), new FPeopleWritable(ap[i]));
			}
			
			//output.collect(NullWritable.get(), res);
			
		}
	}
	
	public static class RTest extends MapReduceBase implements
	Reducer<NullWritable, FPeopleWritable, IntWritable, Text> {
		private static final Log LOG = LogFactory.getLog(NMReduce.class);
		public void reduce(NullWritable key, Iterator<FPeopleWritable> values,
				OutputCollector<IntWritable, Text> output, Reporter reporter)
		throws IOException {

			//进化
			FPeopleWritable res = null;
			IntWritable IntTest = new IntWritable(0);
			Text text = new Text("no input");
			int count = 0;
			String s = "xxx";
			while(values.hasNext()) {
				
				//res = new FPeopleWritable(values.next());
				IntTest.set(1);
				//text.set("input" + new FPeopleWritable(values.next()).p_num);
				s = s + "input" + values.next().getPersons()[0].fitness + count;
				//text.set("input" + values.next().getPersons()[0].fitness + count);
				count++;
			}
			text.set(s);
			output.collect(IntTest,text);
			
		}
	}

	public static void master_slave(String inputp, String outp) {

		//set hadoop task
		JobConf conf = new JobConf(NCMMSGA.class);
		conf.setJobName("Master-slave");

		//control map's number
		//conf.setNumMapTasks(map_number);
		conf.setNumReduceTasks(1);
		//conf.setNumReduceTasks(1);

		/*		conf.setMaxMapAttempts(100);
		conf.set("mapred.skip.attempts.to.start.skipping", "100");
		conf.set("mapred.task.profile.maps","0-100");
		conf.set("mapred.tasktracker.map.tasks.maximum","8");
		conf.set("mapred.reduce.max.attempts","100");
		conf.set("mapred.tasktracker.reduce.tasks.maximum","8");
		conf.set("mapred.jobtracker.taskScheduler.maxRunningTasksPerJob", "100");*/
		conf.set("mapred.child.java.opts","-Xmx1000m");
		
		//conf.set("mapred.job.tracker", "local");
		conf.setOutputKeyClass(NullWritable.class);
		conf.setOutputValueClass(FPeopleWritable.class);
		//conf.setOutputKeyClass(IntWritable.class);
		//conf.setOutputValueClass(Text.class);
		conf.setMapOutputKeyClass(NullWritable.class);
		conf.setMapOutputValueClass(FPeopleWritable.class);
		
		//conf.setOutputKeyClass(IntWritable.class);
		//conf.setOutputValueClass(Text.class);

		conf.setMapperClass(NMMap.class);
		conf.setReducerClass(NMReduce.class);
		//conf.setReducerClass(RTest.class);

		conf.setInputFormat(NMPGAInputFormat.class);
		
		conf.setOutputFormat(NMPGAOutputFormat.class);
		//conf.setOutputFormat(MapFileOutputFormat.class);
		

		FileInputFormat.setInputPaths(conf, new Path(inputp));
		FileOutputFormat.setOutputPath(conf, new Path(outp));

		long t1 = System.currentTimeMillis();
		//运行测试内容
		try {
			JobClient.runJob(conf);
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		long t2 = System.currentTimeMillis();

		LOG.info(t2-t1);


		
		//将运算结果从hdfs中取出
		File res = new File(mrp + "/part-00000");
		if (res.exists()) {
			res.delete();
		}
		
		String cmd = hp + "/bin/hadoop fs " +
		"-get /user/stanley/output/part-00000 " +
		mrp;
		
		RWF.execmd(cmd);

		
		
		
		//选择最好的解		
		//将解的结果添加到一个新的文件
		
		FileInputStream fin = null;
		try {
			fin = new FileInputStream(mrp + "/part-00000");
		} catch (FileNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		DataInputStream in = new DataInputStream(fin);
		FPeopleWritable[] ap = new FPeopleWritable[map_number];
		long length = 0;
		
		try {
			length = in.available();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		LOG.info("file length is " + length);
		for (int i = 0; i < map_number; i++) {
			
			ap[i] = new FPeopleWritable();
			LOG.info("put start is " + length * i/map_number);
			
			try {
				ap[i].readFields(in);
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			
			int best = ap[i].getBest(function);
			FGene[] gene = ap[i].getPersons();
			
			LOG.info("best " + best + "and fitness is " + gene[best].getFitness());
		}
		
	
		
		
	}

	public FPeopleWritable FPeopleWritable(FPeopleWritable value) {
		// TODO Auto-generated method stub
		return null;
	}

	public static void serial() {
		double[] low = new double[1000];
		double[] high = new double[1000];

		for (int i_pad = 0; i_pad < 1000; i_pad++) {
			low[i_pad] = -100;
			high[i_pad] = 100;
		}


		long st = System.currentTimeMillis();
		FGA.ga(geneLength, low, high, peopleNum, p_cross, p_mut, i_num, new FF1(),rt);
		long rt = System.currentTimeMillis() - st;
		System.out.println("run time is " + rt);

		FGene gene = FGA.best_people.getPersons()[FGA.best_people.getBest(new FF1())];
		RWF.wf(String.valueOf(gene.fitness),srp);
	}

	public static void main(String[] args) throws Exception {
		LOG.info("f Enter main");

		//删除Master 的运行结果，因为要进行新的运算
		File res = null; 

		//删除Serial的运行结果
		res = new File(srp);
		if (res.exists()) {
			res.delete();
		}
		
		//prepare initial data of master-slave mode
		/*
		 * file_number = people_number/map_number
		 */
		//FPeopleWritable all_p = new FPeopleWritable();
/*		for (int i = 0; i < map_number; i++) {
			res = new File(mrp + "/" + i);
			if (res.exists()) {
				res.delete();
			}
			
			
			//generate people
			double[] low = new double[geneLength];
			double[] high = new double[geneLength];

			for (int j = 0; j < geneLength; j++) {
				low[j] = -100;
				high[j] = 100;
			}
			
			//FPeople my_p = new FPeople(Math.round(peopleNum/map_number),geneLength,low, high);
			FPeopleWritable my_p = new FPeopleWritable(Math.round(peopleNum/map_number),geneLength,low, high);
			
			all_p.add(my_p);
			
			//RWF.nwp(my_p, mrp + "/" + 0,true);
			
			//RWF.nwp(all_p, mrp + "/" + 0,true);
		}*/
		
		//generate people
		res = new File(mrp + "/0");
		if (res.exists()) {
			res.delete();
		}
		
		
		double[] low = new double[geneLength];
		double[] high = new double[geneLength];

		for (int j = 0; j < geneLength; j++) {
			low[j] = -100;
			high[j] = 100;
		}
		
		FPeopleWritable all_p = new FPeopleWritable(peopleNum,geneLength,low, high);
		FPeople[] ap = all_p.seg(map_number);
		DataOutputStream out = new DataOutputStream(new FileOutputStream(mrp + "/" + 0,true));
		for (int i = 0; i < map_number; i++) {
			 new FPeopleWritable(ap[i]).write(out);
		}
		
		
		//delete existing input and output directory
		RWF.delf(args[0]);
		
		
		// move mrp to /user/stanley/input
		RWF.inf(mrp, args[0]);
		
		//循环多次
		for (int i = 0; i < ln; i++) {
			RWF.delf(args[1]);
			//启动并行
			master_slave(args[0],args[1]);
			//启动串行运行
			//serial();
			//RWF.delf(args[0] + "/*");
			//RWF.mvf("/user/stanley/output/part-00000", 
			//		args[0]);
						
			System.out.println(i + " Game over");
		}
		
		
		//求出最好、最差、平均解 
		LOG.info("f Leave main");
	}

}

