package mapreduce;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.TreeMap;
import java.util.Vector;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;


/**
 * This Class takes charge of Task initialization, Task schedule, result integration.
 * @author Songbo Liao
 *
 * @param <MAPKEYIN>
 * @param <MAPVALUEIN>
 * @param <MAPKEYOUT>
 * @param <MAPVALUEOUT>
 * @param <REDUCEKEYIN>
 * @param <REDUCEVALUEIN>
 * @param <REDUCEKEYOUT>
 * @param <REDUCEVALUEOUT>
 */
public class Job<MAPKEYIN, MAPVALUEIN, MAPKEYOUT, MAPVALUEOUT, REDUCEKEYOUT, REDUCEVALUEOUT> {
	 private int JobId;
	 private static JobConf jobconf = null;
	 private String jobdir;
	 /**
	  * MiddleBuffer store the intermediate result if it is memory-base.
	  */
	 private Vector< pair<MAPKEYOUT, MAPVALUEOUT> >[][] MiddleBuffer;
	 /**
	  * resultmap store the final result if it is memory-base
	  */
	 private Vector< pair<REDUCEKEYOUT, REDUCEVALUEOUT>>[] resultmap;
	 private int mapnum;
	 /**
	  * using for inverse sorting
	  * @author Songbo Liao
	  *
	  */
	 public static class LineComparator implements Comparator<String> {
		 @Override
         public int compare(String element1, String element2) {
        	 int result;
        	 if(getReduceOutputKeyClass()==String.class)
         	{
        		result = element1.compareTo(element2);
         		if(result < 0)
         			return 1;
         		else if (result >0  )
         			return -1;
         		else {
					return 0;
				}
         	}
         	else if (jobconf.getReduceOutputKeyClass()==Integer.class)
         	{
         		int num1= Integer.parseInt(element1.split("\t")[0]);
         		int num2= Integer.parseInt(element2.split("\t")[0]);
         		
         		if(num1< num2)
         			return 1;
         		else if (num1 >num2 )
         			return -1;
         		else {
					return 0;
				}
         	}
         	else {
     			double num1 = Double.parseDouble(element1.toString().split("\t")[0]);
     			double num2 = Double.parseDouble(element2.toString().split("\t")[0]);
     			if(num1< num2)
         			return 1;
         		else if (num1 >num2 )
         			return -1;
         		else {
					return 0;
				}
     		}
         }

		
     }

   
	public Job(JobConf jobconf) throws IOException {
		Job.jobconf = jobconf;
		Random random = new Random();
		this.JobId = random.nextInt() % 1000000;
		this.jobdir = jobconf.getRuningDirectory() + "job_" + JobId + "/";
		File Jobdirectory = new File(jobdir);
		this.mapnum =initialize();
		if (!Jobdirectory.exists())
			Jobdirectory.mkdir();
		
	}
    /**
     * collect all the input files.
     * @param dir
     * @param filelist
     */
	private void collectfiles(File dir, Vector<String> filelist) {
		for (File f : dir.listFiles()) {
			if (f.isFile())
				filelist.add(f.getAbsolutePath());
			else
				collectfiles(f, filelist);
		}

	}
    /**
     * Read input files , prepare data for each map task
     * @return The number of map task
     * @throws IOException
     */
	private int initialize() throws IOException {
		Vector<String> filelist = new Vector<String>();
		int numofmap = 0;
		long volume = jobconf.getAmountperMap() / 4;
		File dir = new File(jobconf.getInputDir());
		collectfiles(dir, filelist);
		String datadir = this.jobdir + "map/input/";
		File datadir1 = new File(datadir);
		if (!datadir1.exists())
			datadir1.mkdirs();
		BufferedReader br;
		BufferedWriter bw = new BufferedWriter(new FileWriter(datadir
				+ "0.data"));
		int vol = 0;
		String line;
		for (int i = 0; i < filelist.size(); i++) {
			br = new BufferedReader(new FileReader(filelist.elementAt(i)));
			while ((line = br.readLine()) != null) {
				bw.write(line);
				bw.newLine();
				vol += line.length();
				if (vol > volume) {
					vol = 0;
					bw.close();
					bw = new BufferedWriter(new FileWriter(datadir
							+ (++numofmap) + ".data"));
				}
			}
		}
		bw.close();
		return (numofmap + 1);
	}
	/**
	 * Get the output key type of Reduce 
	 * @return the output key type of Reduce
	 */
	public static Class<?> getReduceOutputKeyClass()
	{
		return jobconf.getReduceOutputKeyClass();
	}
	/**
	 * Set Mapper Class
	 * @param MapperClass
	 */
	public void setMapperClass(Class<?> MapperClass) {
		jobconf.setMapperClass(MapperClass);
	}
    /**
     * Set Reduce Class
     * @param ReducerClass
     */
	public void setReducerClass(Class<?> ReducerClass) {
		jobconf.setReducerClass(ReducerClass);
	}
    /**
     * Set the number of reduce tasks
     * @param num
     */
	public void setNumofreduce(int num) {
		jobconf.setNumofreduce(num);

	}
    /**
     * Get the running folder of this job.
     * @return running folder of this job
     */
	public String getrunningdir() {
		return jobconf.getRuningDirectory();
	}
    /**
     * Get Mapper Class type
     * @return Mapper Class type
     */
	public Class<?> getMapperClass() {
		return jobconf.getMapperClass();
	}
    /**
     * Get the input key type of Map 
     * @return the input key type of Map
     */
	public Class<?> getMapInputKeyClass() {
		return jobconf.getMapInputKeyClass();
	}
    /**
     * Get the input value type of Map 
     * @return the input value type of Map 
     */
	public Class<?> getMapInputValueClass() {
		return jobconf.getMapInputValueClass();
	}
    /**
     * Get the input key type of Reduce 
     * @return the input key type of Reduce
     */
	public Class<?> getReduceInputKeyClass() {
		return jobconf.getReduceInputKeyClass();
	}
	/**
	 * Get the input value type of Reduce 
	 * @return the input value type of Reduce 
	 */
	public Class<?> getReduceInputValueClass() {
		return jobconf.getReduceInputValueClass();
	}
    /**
     * Get buffer size of writing. 
     * @return buffer size of writing
     */
	public long getBuffersize() {
		return jobconf.getBuffersize();
	}
	/**
	 * set the flag indicating if it needs sorting of key.
	 * @param flag
	 */
	public void setsortflag(boolean flag)
	{
		jobconf.setsortflag(flag);
	}
	/**
	 * Get the flag indicating if it needs sorting of key
	 * @return sort flag
	 */
    public boolean getsortflag()
    {
    	return jobconf.getsortflag();
    }
    
    /**
     * set the flag indicating whether it is memory-base or disk base.
     * @param flag
     */
    public void setMemoryflag(boolean flag)
	{
		jobconf.setMemoryflag(flag);
	}
	
    /**
     * Get the flag indicating whether it is memory-base or disk base
     * @return memory flag
     */
	public boolean getMemoryflag()
	{
		return jobconf.getMemoryflag();
	}
	
	/**
	 * Compare two string or integer
	 * @param line1
	 * @param line2
	 * @return the result of comparison
	 */
    public boolean compare(String line1, String line2)
    {
    	if(jobconf.getReduceOutputKeyClass()==String.class)
    	{
    		if(line1.compareTo(line2)<=0)
    			return true;
    		else 
    			return false;
    	}
    	else if (jobconf.getReduceOutputKeyClass()==Integer.class)
    	{
    		int num1= Integer.parseInt(line1.split("\t")[0]);
    		int num2= Integer.parseInt(line2.split("\t")[0]);
    		return (num1<=num2)? true:false;
    	}
    	else {
			double num1 = Double.parseDouble(line1.split("\t")[0]);
			double num2 = Double.parseDouble(line2.split("\t")[0]);
			return (num1<=num2)? true:false;
		}
    }
    
    /**
     * Using in external sorting. Insert new element to a sorted array. 
     * @param lines
     * @param line
     * @return the removed element
     */
    public String insert(Vector<String> lines, String line)
    {
    	int flag=0 ;
    	for(int i=lines.size()-1 ;i>=0; i-- )
    	{
    		if(compare(line, lines.get(i)))
    			{
    			lines.add(i+1, line);
    			flag =1;
    			break;
    			}
    	}
    	if(flag==0)
    		lines.add(0, line);
    	return lines.remove(lines.size()-1);
    }
    
    /**
     * Find the minimum in a array. Using in external sorting.
     * @param array
     * @param point
     * @return the position of the minimum
     */
    public  int findmin(String[][] array, int[] point)
    {
    	int pos=0;
    	String  min =array[0][point[0]];
    	String temp=null;
    	for(int i=1 ;i<array.length;i++)
    		{
    		temp=array[i][point[i]];
    		if(min.compareTo(temp)>=0)
    		{
    			min = temp;
    			pos =i;
    		}
    	}
    	return pos;
    }
    
    /**
     * Merge results of reduce tasks into one final result file.
     * @throws IOException
     */
    public void GenerateResult() throws IOException
    {
    	File dir = new File(jobdir+"reduce/output/");
    	BufferedWriter bw =new BufferedWriter(new FileWriter(jobdir+"result.txt"));
    	String line;
    	if(getMemoryflag() == false)
    	{if(getsortflag()==false)
    	{
    		for(File f: dir.listFiles())
        	{
        		BufferedReader br = new BufferedReader(new FileReader(f));
        		while((line=br.readLine())!=null)
        		{
        			bw.write(line);
        			bw.newLine();
        		}
        		br.close();
        	}
    	}
    	else {
    		int filenum  =jobconf.getNumofreduce();
    		int buffersize =1024*1024;
			String max = "zzzzzzzzzz";
			String[][] datas = new String[filenum][buffersize];
			int[] points = new int[filenum];
			int flag =0;
			BufferedReader[] brs = new BufferedReader[dir.listFiles().length];
			for(int i= 0; i< filenum;i++)
        	{
        		brs[i] = new BufferedReader(new FileReader(jobdir+"reduce/output/"+i+".data"));
        	}
			Arrays.fill(points, 0);
			for(int i=0 ; i<dir.listFiles().length;i++)
			{
//				Arrays.fill(datas[i], Integer.MAX_VALUE);
				for(int j=0 ;j < buffersize  ;j++)
				{
					line = brs[i].readLine();
				    if(line==null)
				    {
			
				    	datas[i][j]=max;
				    	break;
				    }
			
					datas[i][j] =line;
					
				}
			}
			while(true)
			{
				
				int pos = findmin(datas, points);
//				System.out.println(datas[pos][points[pos]]);
				bw.write(String.valueOf(datas[pos][points[pos]]));
				bw.newLine();
				points[pos]++;
				if(points[pos]==buffersize)
				{
					for(int j=0 ;j < buffersize  ;j++)
					{
						line = brs[pos].readLine();
						if(line==null)
					    {
					    	
					    	datas[pos][j]=max;
					    	break;
					    }
						datas[pos][j] =line;
						
					}
					points[pos] =0;
				}
				flag= 0;
				for(int i =0 ;i< filenum ;i++ )
				{
					String temp =datas[i][points[i]];
					if(!datas[i][points[i]].equals(max))
						flag =1;
				}
				if(flag ==0)
					break;
				
			}
		
	}
    	bw.close();
		
    	
    	}
    	else {
			if(getsortflag() == false)
			{
				for(int i =0; i< jobconf.getNumofreduce(); i++)
				{
					Iterator<pair<REDUCEKEYOUT, REDUCEVALUEOUT> > iterator = resultmap[i].iterator();
					while(iterator.hasNext())
					{
						bw.write(iterator.next().toString());
						bw.newLine();
					}
				}
			}
			else {
				TreeMap<REDUCEKEYOUT, REDUCEVALUEOUT> sortmap =new TreeMap<REDUCEKEYOUT, REDUCEVALUEOUT>();
				for(int i =0; i< jobconf.getNumofreduce(); i++)
				{
					Iterator<pair<REDUCEKEYOUT, REDUCEVALUEOUT> > iterator = resultmap[i].iterator();
					while(iterator.hasNext())
					{
						pair<REDUCEKEYOUT, REDUCEVALUEOUT> p= iterator.next();
						sortmap.put(p.getfirst(), p.getsecond());
					}
				}
				Iterator it = sortmap.entrySet().iterator();
				while (it.hasNext()) {
					Map.Entry<REDUCEKEYOUT, REDUCEVALUEOUT> entry = (Entry<REDUCEKEYOUT, REDUCEVALUEOUT>) it.next();
					bw.write(entry.getKey()+"\t"+entry.getValue());
					bw.newLine();
					
				}
			}
		}
    	bw.close();
    }
    
    /**
     * the main function of Job.Firstly read all input files , then launch all map tasks ,when all
     * map tasks finish,  launch all reduce tasks, finally do merge sort and produce final result
     * @throws IOException
     */
	public void run() throws IOException {
		long time = System.currentTimeMillis();
		long starttime = System.currentTimeMillis();
		int  numofreduce = 0;
		for (int i = 0; i < jobconf.getNumofreduce(); i++) {
			File resultinputdir = new File(jobdir + "reduce/input/" + i + "/");
			if (!resultinputdir.exists())
				resultinputdir.mkdirs();
		}
		if(getMemoryflag()==true)
		{
			this.MiddleBuffer= new Vector[mapnum][jobconf.getNumofreduce()];
			for(int i=0; i<this.mapnum;++i)
			{
				
				for( int j=0;j<jobconf.getNumofreduce();j++ )
				MiddleBuffer[i][j] = new Vector<pair<MAPKEYOUT,MAPVALUEOUT>>();
			}
			System.out.println("Job map len="+MiddleBuffer.length);
			resultmap =new Vector[jobconf.getNumofreduce()];
			for(int i=0;i< jobconf.getNumofreduce(); i++)
				resultmap[i]= new Vector<pair<REDUCEKEYOUT,REDUCEVALUEOUT>>();
		}
		System.out.println("Initialize finish. Init cost: "
				+ (System.currentTimeMillis() - time) / 1000.0 + " s");
		CountDownLatch end = new CountDownLatch(this.mapnum);
		ExecutorService exec = Executors.newCachedThreadPool();
		System.out.println("Map start.");
		time = System.currentTimeMillis();
		for (int i = 0; i < this.mapnum; i++) {
//			System.out.println("Job map len="+ this.MiddleBuffer.length);
			MapTaskConf<MAPKEYIN, MAPVALUEIN, MAPKEYOUT, MAPVALUEOUT> conf = new MapTaskConf<MAPKEYIN, MAPVALUEIN, MAPKEYOUT, MAPVALUEOUT>(
					getMapperClass(), getMapInputKeyClass(),
					getMapInputValueClass(), this.jobdir, i,
					jobconf.getNumofreduce(), getMemoryflag(),MiddleBuffer );
			exec.execute(new Maptask<MAPKEYIN, MAPVALUEIN, MAPKEYOUT, MAPVALUEOUT>(
					end, i, conf));

		}
		try {
			end.await();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
		System.out.println("Map finish. Map cost: "
				+ (System.currentTimeMillis() - time) / 1000.0 + " s");
		exec.shutdown();
//		if(getMemoryflag()==true)
//		{
//			
//			for(int i=0; i<this.mapnum;++i)
//			{
//				
//				for( int j=0;j<jobconf.getNumofreduce();j++ )
//				System.out.println(i+"\t"+j+"\t"+MiddleBuffer[i][j].size());
//			}
//			
//		}
		exec = Executors.newCachedThreadPool();
		numofreduce = jobconf.getNumofreduce();
		CountDownLatch start = new CountDownLatch(numofreduce);
		System.out.println("Reduce start.");
		time = System.currentTimeMillis();
		for (int i = 0; i < numofreduce; i++) {
			ReduceTaskConf<MAPKEYOUT, MAPVALUEOUT, REDUCEKEYOUT, REDUCEVALUEOUT> conf = new ReduceTaskConf<MAPKEYOUT, MAPVALUEOUT, REDUCEKEYOUT, REDUCEVALUEOUT>(
					jobconf.getReduceClass(), getReduceInputKeyClass(),
					getReduceInputValueClass(), this.jobdir,
					jobconf.getBuffersize(), i,getMemoryflag(),MiddleBuffer, resultmap);
			exec.execute(new Reducetask<MAPKEYOUT, MAPVALUEOUT, REDUCEKEYOUT, REDUCEVALUEOUT>(
					start, end, i, conf, getsortflag(), MiddleBuffer));
		}
		try {
			start.await();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
		exec.shutdown();
		System.out.println("Reduce finish. Reduce cost: "
				+ (System.currentTimeMillis() - time) / 1000.0 + " s");
		time = System.currentTimeMillis();
		GenerateResult();
		System.out.println("Output finish. Output cost: "
				+ (System.currentTimeMillis() - time) / 1000.0 + " s");
		System.out.println("Total time: "
				+ (System.currentTimeMillis() - starttime) / 1000.0 + " s");
	}



	/**
	 * Getter of the property <tt>JobId</tt>
	 * 
	 * @return Returns the JobId.
	 * 
	 */

	public int getJobId() {
		return JobId;
	}

}
