package org.myorg;
/*
 * Compare democratic model's PGA with serial GA
 */

import java.io.*;
import java.util.*;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FSInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.FSDataInputStream;


/*import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.BlockLocation;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;

import org.apache.hadoop.mapred.FileInputFormat;*/
/*import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;*/
import org.myorg.GA;



public class M2 extends Para2 {

	private static final Log LOG = LogFactory.getLog(PGAmain.class);





	public static class NMPGAInputFormat extends 
	FileInputFormat<IntWritable,FPeopleWritable2>{

			@Override
			public InputSplit[] getSplits(JobConf job, int numSplits)
					throws IOException {
				FileStatus[] files = listStatus(job);
			    // generate splits
			    ArrayList<FileSplit> splits = new ArrayList<FileSplit>(files.length/mmn);
			    
			    for (FileStatus file: files) {
			        Path path = file.getPath();
			        FileSystem fs = path.getFileSystem(job);
			        long length = file.getLen();
			        LOG.info("file length is " + length);
			       
			        //Create empty hosts array for zero length files
			        for (int i = 0; i < mmn; i++) {
			        	//long length = file.getLen();
			        	LOG.info("put start is " + length * i/mmn);
			        	splits.add(new FileSplit(path, length * i/mmn, length/mmn, new String[0]));
			        }
			        
			      }
			      LOG.info("stanley Total # of splits: " + splits.size());
			      for (int t = 0; t < splits.size(); t++ ) {
			    	  LOG.info("start is " + splits.get(t).getStart());
			      }
			      
			      return splits.toArray(new FileSplit[splits.size()]);
		
			}

			@Override
			public RecordReader<IntWritable, FPeopleWritable2> getRecordReader(
					InputSplit split, JobConf job, Reporter reporter)
					throws IOException {
				// TODO Auto-generated method stub
				FileSplit fsplit = (FileSplit)split;
				final Path file = fsplit.getPath();
			    // open the file and seek to the start of the split
			    FileSystem fs = file.getFileSystem(job);
			    final FSDataInputStream fileIn = fs.open(fsplit.getPath());
			    fileIn.seek(fsplit.getStart());
			    //final ObjectInputStream object = new ObjectInputStream(fileIn);
			   //FPeopleWritable2 fp = new FPeopleWritable2(peopleNum/mmn,geneLength);
			   //fp.readFields(fileIn);
			    LOG.info("XXX: " + fsplit.getStart());
			    
				final int map_c = job.getInt("PGA.job.map.count", 1);
				if (map_c < 0) throw new IOException("Invalid map count: " + map_c);
/*				//for debug
				try {
					FPeople p = (FPeople)object.readObject();
					LOG.info("stanley test" + p.p_num);
				} catch (ClassNotFoundException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}*/
				
				return new RecordReader<IntWritable, FPeopleWritable2>() {
					private boolean is = true;
					
					public boolean next(IntWritable key,  FPeopleWritable2 value)
					throws IOException {
						//key.set(Integer.parseInt(file.getName()));
						if (is) {
							key.set(1);
							value.readFields(fileIn);
							is = false;
							return true;						
						} else {							
							return false;
						}
							
						
						//value.set(fp);
						
						//return count++ < map_c;
						
					}
					
					public IntWritable createKey() { return new IntWritable(); }
					
					public FPeopleWritable2 createValue() { 
/*						FPeopleWritable2 p = new FPeopleWritable2(peopleNum/mmn,geneLength);
						try {
							p.readFields(fileIn);
						} catch (IOException e) {
							// TODO Auto-generated catch block
							e.printStackTrace();
						} 
						return p;*/
						return new FPeopleWritable2(mpn/mmn,geneLength);
					}
					
					public long getPos() throws IOException { return 0; }
					
					public void close() throws IOException { }
					
					public float getProgress() throws IOException {
						return 1;
					}

				};
			}
		
	}
	
	public static class NMPGAOutputFormat extends FileOutputFormat<NullWritable,FPeopleWritable2> {


		@Override
		public RecordWriter<NullWritable,FPeopleWritable2> getRecordWriter(FileSystem ignored,
				JobConf job, String name, Progressable progress)
				throws IOException {
			// TODO Auto-generated method stub
			Path file = FileOutputFormat.getTaskOutputPath(job, name);
			FileSystem fs = file.getFileSystem(job);
			final FSDataOutputStream fileOut = fs.create(file, progress);
			
			//return new MyWriter(fileOut);
			return new RecordWriter<NullWritable, FPeopleWritable2>() {
				
		
				@Override
				public void write(NullWritable key,
						org.myorg.FPeopleWritable2 value) throws IOException {
					// TODO Auto-generated method stub
					//key.write(fileOut);
					value.write(fileOut);
				}

				@Override
				public void close(Reporter reporter) throws IOException {
					// TODO Auto-generated method stub
					fileOut.close();
				}
				
			};
			
		}
	
	}

	public static class NMMap extends MapReduceBase implements
	Mapper<IntWritable,FPeopleWritable2, NullWritable, FPeopleWritable2> {
//		private static final Log LOG = LogFactory.getLog(NMMap.class);
		public void map(IntWritable key, FPeopleWritable2 value,
				OutputCollector<NullWritable, FPeopleWritable2> output, Reporter reporter)
		throws IOException {
			//负责计算
//			LOG.error("enter mapper");
			//FPeopleWritable2 res = FPeopleWritable2(value);


			value.caculateFitness(function);
//			LOG.info(value.getBest(new FF1()));
			

//			FPeopleWritable2 cal	= new FPeopleWritable2(value);

			output.collect(NullWritable.get(), value);
			//output.collect(person, new FPeopleWritable2(res));
		}
	}

	@SuppressWarnings("deprecation")
	public static class NMReduce extends MapReduceBase implements
	Reducer<NullWritable, FPeopleWritable2, NullWritable, FPeopleWritable2> {
		private static final Log LOG = LogFactory.getLog(NMReduce.class);
		public void reduce(NullWritable key, Iterator<FPeopleWritable2> values,
				OutputCollector<NullWritable, FPeopleWritable2> output, Reporter reporter)
		throws IOException {

			//进化
			FPeopleWritable2 res = new FPeopleWritable2();
			while(values.hasNext()) {
				res.add(values.next());
			}
			
			FEvolution2 my_e = new FEvolution2(0.1, p_cross, p_mut, res, function);
			my_e.select();
			//my_e.select(best_people);
			my_e.crossOver();
			my_e.mutation();
			
			FPeople2[] ap = res.seg(mmn);
			for (int i = 0; i < mmn; i++) {
				output.collect(NullWritable.get(), new FPeopleWritable2(ap[i]));
			}
			
			//output.collect(NullWritable.get(), res);
			
		}
	}
	
	public static class RTest extends MapReduceBase implements
	Reducer<NullWritable, FPeopleWritable2, IntWritable, Text> {
		private static final Log LOG = LogFactory.getLog(NMReduce.class);
		public void reduce(NullWritable key, Iterator<FPeopleWritable2> values,
				OutputCollector<IntWritable, Text> output, Reporter reporter)
		throws IOException {

			//进化
			FPeopleWritable2 res = null;
			IntWritable IntTest = new IntWritable(3);
			Text text = new Text("no input");
			int count = 0;
			String s = "www";
			while(values.hasNext()) {
				
				res = new FPeopleWritable2(values.next());
				IntTest.set(2);
				//text.set("input" + new FPeopleWritable2(values.next()).p_num);
				s = s + " xxinput" + res.getPersons()[0].fitness + count + "people number is " + res.p_num;
				//text.set("input" + values.next().getPersons()[0].fitness + count);
				count++;
			}
			text.set(s);
			output.collect(IntTest,text);
			
		}
	}

	public static double get_best_recorder() {
		double rt = Double.MAX_VALUE;


		Configuration conf = new Configuration();
		FileSystem fs = null;
		try {
			fs = FileSystem.get(conf);
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		Path f = new Path(mout + "/part-00000");
		FSDataInputStream in = null;
		try {
			in = fs.open(f);
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		
		
		//选择最好的解		
		//将解的结果添加到一个新的文件
		
		FPeopleWritable2[] ap = new FPeopleWritable2[mmn];
		long length = 0;
		
		try {
			length = in.available();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		LOG.info("file length is " + length);
		for (int i = 0; i < mmn; i++) {
			
			ap[i] = new FPeopleWritable2();
			//LOG.info("put start is " + length * i/mmn);
			
			try {
				ap[i].readFields(in);
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			
			int best = ap[i].getBest(function);
			FGeneS[] gene = ap[i].getPersons();
			
			LOG.info("best " + best + " and fitness is " + gene[best].getFitness());
			if ( gene[best].getFitness() < rt) {
				rt = gene[best].getFitness();
			}

		}
		return rt;
	}
	
	public static String master_slave() {

		String cmd;
		
		long r_t = 0;
		double c_b = Double.MAX_VALUE;
		double best = Double.MAX_VALUE;
		String re = "";
		
		boolean[] mark = new boolean[mag.length];
		for (boolean bm : mark) {
			bm = false;
		}
		
		int count = 0;
		
		for (int i =0; i < i_num; i++ ) {
			RWF.delf(mout);
			//set hadoop task
			JobConf conf = new JobConf(M2.class);
			conf.setJobName("M2");

			//control map's number
			conf.setNumReduceTasks(1);

			conf.set("mapred.child.java.opts","-Xmx2000m");
			conf.set("mapred.reduce.max.attempts", "100");
			conf.set("mapred.task.timeout", "0");
			
			//conf.setOutputKeyClass(NullWritable.class);
			//conf.setOutputValueClass(FPeopleWritable2.class);

			conf.setMapOutputKeyClass(NullWritable.class);
			conf.setMapOutputValueClass(FPeopleWritable2.class);
			
			conf.setOutputKeyClass(IntWritable.class);
			conf.setOutputValueClass(Text.class);

			conf.setMapperClass(NMMap.class);
			conf.setReducerClass(NMReduce.class);
			//conf.setReducerClass(RTest.class);

			conf.setInputFormat(NMPGAInputFormat.class);
			
			conf.setOutputFormat(NMPGAOutputFormat.class);
			//conf.setOutputFormat(MapFileOutputFormat.class);
			

			FileInputFormat.setInputPaths(conf, new Path(min));
			FileOutputFormat.setOutputPath(conf, new Path(mout));

			long t1 = System.currentTimeMillis();
			//运行测试内容
			try {
				JobClient.runJob(conf);
			} catch (IOException e1) {
				// TODO Auto-generated catch block
				e1.printStackTrace();
			}
			long t2 = System.currentTimeMillis();
						

			
			//record running time
			LOG.info(t2-t1);
			r_t = r_t + (t2 -t1);
			
			//for record running time, iterator time and best value
			// c_b current best record
			c_b = get_best_recorder();
			
			LOG.info(i + " the current best value is " + c_b);
			if( c_b < best ) {

				best = c_b;
				LOG.info(i + "the global best value is " + c_b);
				
				if (count < mag.length && !mark[count] && best < mag[count] ) {
					re = re + "\t" + r_t +"\t" + i*mpn + "\t" + best;
					mark[count] = true;
					count++;
					if (count == mag.length) {
						break;
					}
				}
			}
			
			//delete old input file
			cmd = hp + "/bin/hadoop fs -rmr " + min + "/*";
			RWF.execmd(cmd);
			
			//mv new people to input file
			cmd = hp + "/bin/hadoop fs -mv " + mout + "/part-00000 " 
			+ min + "/0";
			RWF.execmd(cmd);
	
		}
		return re;
	}

	public FPeopleWritable2 FPeopleWritable2(FPeopleWritable2 value) {
		// TODO Auto-generated method stub
		return null;
	}


	public static void main(String[] args) throws Exception {
		LOG.info("f Enter main");

		//reserve results
		String cmd = "mkdir -p ./res";
		RWF.execmd(cmd);
		
		//for running much programmers
		File res = null; 
		String mark = "0";
		res = new File("./res/mmark");
		if (res.exists()) {
			BufferedReader input = new BufferedReader(new FileReader(res));			
		    mark = input.readLine();
		    int im = 1;
		    if(mark != null){
		    	im =  Integer.parseInt(mark);
		    }
		    
		    
		    PrintWriter output = new PrintWriter(new FileWriter(res));
		    output.print(im+1);
		    output.close();
		} 
		else {
		    PrintWriter output = new PrintWriter(new FileWriter(res));
		    output.print(1);
		    output.close();
		}
		
		//put different results of different program to different directory 
		mrp = mrp+mark;
		cmd = "mkdir -p " + mrp;
		RWF.execmd(cmd);
		
		//input file
		min = min  + mark;
		//output file
		mout = mout + mark;
		
		//循环多次
		for (int i = 0; i < ln; i++) {

			//prepare initial data of master-slave mode
			//generate people
			
			
			float low = -100;
			float high = 100;

			FPeopleWritable2 all_p = new FPeopleWritable2(mpn,geneLength,low, high);
			FPeople2[] ap = all_p.seg(mmn);

			Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(conf);
			Path f = new Path(min + "/0");			
			FSDataOutputStream out = fs.create(f, true, 0);
			for (int j = 0; j < mmn; j++) {
				 new FPeopleWritable2(ap[j]).write(out);
			}
			out.close();
			
			//启动并行
			String r = master_slave();

			RWF.wf(r,mrp + "/m_r");		
			System.out.println(i + " Game over");
		}
		
		LOG.info("f Leave main");
	}

}

