package org.myorg.v3;

/*
 * Compare democratic model's PGA running time 
 *  
 */

import java.io.*;
import java.util.*;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

import org.apache.hadoop.fs.FileSystem;

/*
 * Test serial and democratic standalone GA
 * 
 * compare the running time, including wall clock and iterator times
 * so, mapper's output is solving quality, wall clock and iterator times
 * 
 * experiment repeat 25, show the best. worst and mean time *  
 * 
 * mapper's number from 2 to 16
 * 
 */


public class D3 extends Para3 {

	private static final Log LOG = LogFactory.getLog(D3.class);
	
	public static int ser = 0;

	public static class DPGAInputFormat extends 
	FileInputFormat<IntWritable, IntWritable> {
		
		public int begin = 0;
		
		@SuppressWarnings("deprecation")
		public InputSplit[] getSplits(JobConf conf, int numSplits) 
		throws IOException {
			
			
			FileStatus[] files = listStatus(conf);
		    // generate splits
		    ArrayList<FileSplit> splits = new ArrayList<FileSplit>(dmn);
		    
		    for (FileStatus file: files) {
		        Path path = file.getPath();
		        FileSystem fs = path.getFileSystem(conf);
		        long length = file.getLen();
		        long start = 0;
		        FSDataInputStream in = fs.open(path);
		        
		        
		        LOG.info("file length is " + length);
		       
		        //Create empty hosts array for zero length files
		        for (int i = 0; i < dmn; i++) {
		        	//long length = file.getLen();
		        	String line = in.readLine();
		        	int l = line.length();
		        	LOG.info("put start is " + start + " line is " + line);
		        	splits.add(new FileSplit(path, start, l, new String[0]));
		        	start = start + l + 1;
		        }
		        
		      }
		    
/*		    FileSplit[] ss = new FileSplit[splits.size()];
		    splits.toArray(ss);
		    
		      LOG.info("stanley Total # of splits: " + splits.size());
		      for (int t = 0; t < splits.size(); t++ ) {
		    	  LOG.info("start is " + splits.get(t).getStart());
		    	  
					
					final Path file = ss[t].getPath();
				    FileSystem fs = file.getFileSystem(conf);
				    final FSDataInputStream fileIn = fs.open(ss[t].getPath());
				    fileIn.seek(ss[t].getStart());
				    String line = fileIn.readLine();
				    //final ObjectInputStream object = new ObjectInputStream(fileIn);
				   //FPeopleWritable2 fp = new FPeopleWritable2(peopleNum/mmn,geneLength);
				   //fp.readFields(fileIn);
				    LOG.info("XXX: " + ss[t].getStart() + "line is " + line);
		    	  
		      }*/
		      
		      return splits.toArray(new FileSplit[splits.size()]);



		}
		
		public RecordReader<IntWritable,IntWritable> getRecordReader(
				InputSplit split, JobConf conf, Reporter reporter)
				throws IOException {
			FileSplit fsplit = (FileSplit)split;
			final Path file = fsplit.getPath();
		    // open the file and seek to the start of the split
		    FileSystem fs = file.getFileSystem(conf);
		    final FSDataInputStream fileIn = fs.open(fsplit.getPath());
		    fileIn.seek(fsplit.getStart());
		    String line = fileIn.readLine();
		    //final ObjectInputStream object = new ObjectInputStream(fileIn);
		   //FPeopleWritable2 fp = new FPeopleWritable2(peopleNum/mmn,geneLength);
		   //fp.readFields(fileIn);
		    LOG.info("XXX: " + fsplit.getStart() + "line is " + line);

			return new DRecordReader(Integer.parseInt(line),dmn);
		}
	}



	public static class DMap extends MapReduceBase implements
	Mapper<IntWritable, IntWritable, DoubleWritable, Text > {
		
		public void map(IntWritable key, IntWritable value,
				OutputCollector<DoubleWritable, Text> output, Reporter reporter)
		throws IOException {

			String res = ga(geneLength, dpn, p_cross, p_mut, di_num,dfc, key.get(),value.get());
			//String res = GA3.ga(geneLength, dpn, p_cross, p_mut, i_num,dfc,dmag);
			if (res == "") {
				res = "no value";
			}

			Text person = new Text(res);
			DoubleWritable fitness = new DoubleWritable(key.get());
			output.collect(fitness, person);
			reporter.setStatus("key= " + key.get() + " " + value.get());
		}
	}

	public static class DReduce extends MapReduceBase implements
	Reducer<DoubleWritable, Text, DoubleWritable, Text> {
		private static final Log LOG = LogFactory.getLog(DReduce.class);
		public void reduce(DoubleWritable key, Iterator<Text> values,
				OutputCollector<DoubleWritable, Text> output, Reporter reporter)
		throws IOException {

			output.collect(key, new Text(values.next()));
		}
	}
	
	
	public static String ga(int geneLength,
			int peopleNum, double p_cross,
			double p_mut, long di_num,Fc3 fc,
			int start, int all) {

		String res = "";
		double g_b = Double.NEGATIVE_INFINITY;

		long start_time = System.currentTimeMillis();
		FPeople3 my_p = new FPeople3(peopleNum,geneLength);
		FEvolution3 my_e = new FEvolution3(0.1, p_cross, p_mut);
		fc.caculate(my_p.persons,start,all);

		for (int i=0; i < di_num; i++) {

			/*if current best value are letter than global best value,
			 * replace global best value with current value
			 */

			int is_r = fc.is_r(my_p.getBest(), start, all);
			if (1 == is_r || 0 == is_r) {
				res = res + "\t" + (System.currentTimeMillis() - start_time) 
				+"\t" + i + "\t" + fc.org_value(my_p.getBest(), start, all);

				if (0 == is_r ) {
					break;
				}
			} 


			my_e.select(my_p);
			my_e.crossOver(my_p);
			my_e.mutation(my_p);
			fc.caculate(my_p.persons, start, all);
		}

		return res;
	}

	public static void democratic() {
		//删除hadoop的运行目录,否则hadoop会提示错误

		String cmd = hp + "/bin/hadoop fs -rmr " + dout;
		RWF3.execmd(cmd);

		//set hadoop task
		JobConf conf = new JobConf(D3.class);
		conf.setJobName("D3");
		
		conf.set("mapred.child.java.opts","-Xmx1000m");
		conf.set("mapred.reduce.max.attempts", "100");
		conf.set("mapred.task.timeout", "0");

		//control map's number
		conf.setNumReduceTasks(1);
		
		conf.setOutputKeyClass(DoubleWritable.class);
		conf.setOutputValueClass(Text.class);

		conf.setMapperClass(DMap.class);

		conf.setReducerClass(DReduce.class);

		conf.setInputFormat(DPGAInputFormat.class);
		conf.setOutputFormat(TextOutputFormat.class);


		FileOutputFormat.setOutputPath(conf, new Path(dout));
		FileInputFormat.setInputPaths(conf, new Path(din));

		long t1 = System.currentTimeMillis();
		//运行测试内容
		try {
			JobClient.runJob(conf);
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		long t2 = System.currentTimeMillis();

		LOG.info(t2-t1);

		
	}
	
	public String getBestRecord() {
		String br = "";
		//open file and read all line
		String all = new String(RWF3.rhf(dout + "/part-00000"));
		String[] lines = all.split("\n");
		
		//ArrayList<double> 
		//get value
		//for 
		//choose best record
		
		return br;
	}
	
	public static void main(String[] args) throws Exception {
		LOG.info("f Enter main");

		//reserve results
		String cmd = "mkdir -p ./res";
		RWF3.execmd(cmd);
		
		//for running much programmers
		String mark = RWF3.mkMark("./res/dmark", dfc);

		//put different results of different program to different directory 
		cmd = "mkdir -p " + drp;
		RWF3.execmd(cmd);
		
		dout = dout + "/" + mark;
		din = din + "/" + mark;
		
	
		String seg = "";
		for (int i = 0; i < dmn; i++ ) {
			seg = seg + i + "\n";
		}
		System.out.print("length is " + seg.length() + seg);
		System.out.println("end");
		
		RWF3.whf(seg, din + "/seg");

		//循环多次
		for (int i = 0; i < ln; i++) {
			//dfc.init();
			//启动并行
			democratic();
			RWF3.wlf(RWF3.rhf(dout + "/part-00000"), drp + "/d_r" + mark);

			System.out.println(i + " Game over");
		}
		//求出最好、最差、平均解 
		LOG.info("f Leave main");
	}

}



