package edu.utexas.mapreduce;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;

import org.apache.log4j.Logger;

/**
 * Main execution thread for job processing.  This thread determines
 * when to start or restart map/reduce worker threads.
 */
public class JobThread extends Thread {

	private static final int PROCESS_CHECK_TIMEOUT = 2000;
	private JobState jobState;
	private Mapper mapper;
	private Reducer reducer;
	private String basedir;
	private String owner;
	
	JobThread(JobState state, Mapper m, Reducer r, String basedir, String owner) {
		this.jobState = state;
		this.mapper = m;
		this.reducer = r;
		this.basedir = basedir;
		this.owner = owner;
	}

	public void run() {
		
		String jobDir = "job_" + Integer.toString(jobState.getJobID()) + "/";
		String mapOutputDir = basedir + "/map_output/" + jobDir;
		
		// create the output directory
		{
			File dir = new File(mapOutputDir);
			dir.mkdirs();
		}
		Logger log = Log.getLogger(jobState.getJobID(), mapOutputDir + "log", true);
		
		log.info("JOB_START " + jobState.getJobID());
		int mapSequence = 0;
		
		while(!runJob(jobDir, mapOutputDir, log, mapSequence)) {
			// runJob returns false when a failure in another machine is
			// detected.  Restart the job to  reprocessed the map/reduce
			// tasks owned by the bad machine.
			
			// Increment the sequence number to prevent new map files 
			// from overwriting results from a previous run
			mapSequence += 1;
		}
		
		log.info("JOB_END " + jobState.getJobID());
				
	}
	
	public boolean runJob(String jobDir, String mapOutputDir, Logger log, int mapSequence) {
		
		try {
			
			//
			// ---- START MAP PROCESSING ----
			//
			log.info("MAP_START");
			
			// one writer will collect the results from all map workers
			MapOutputWriter mapOutput = new MapOutputWriter(mapOutputDir, mapSequence);
			Thread[] mapThreads = new Thread[Constants.NUMBER_OF_MAP_WORKERS];
			
			// create and start all map threads
			for (int i = 0; i < mapThreads.length; i++) {
				Thread thread = new MapThread(jobState, 
						                      mapper.getClass().newInstance(), 
						                      mapOutput,
						                      owner,
						                      log);
				thread.setDaemon(true);
				thread.start();
				mapThreads[i] = thread; 
			}
			
			// wait for all the map threads to complete.  No point in polling the 
			// global structure while there is work here.
			for (int i = 0; i < mapThreads.length; i++) {
				mapThreads[i].join();
			}
			
			mapOutput.close();

			log.info("MAP_DONE_LOCAL_PROCESSING");
			
			final String host = owner.split(":")[0];
			final int port = Integer.parseInt(owner.split(":")[1]);
			
			// commit map results to the job state information
			Map<Integer, URL> mapResults = new TreeMap<Integer, URL>();
			for(Map.Entry<Integer, String> entry: mapOutput.getOutputFiles().entrySet()) {
				URL url = new URL("http", 
						   host,
						   port,
				           "/map_result/" + jobDir + entry.getValue());
				mapResults.put(entry.getKey(), url);
				
				log.info("MAP_RESULT " + url.toString());
			}
			jobState.commitMapResults(owner, mapResults);
			
			log.info("MAP_COMMITTED");
			
			Set<String> activeProcessSet = new TreeSet<String>();
			
			// All local map threads are done.  Wait for all tasks to complete.
			while(!jobState.mapTasksComplete(activeProcessSet)) {
				//
				// Ping the owner of a task in progress to ensure it is working.
				// If a failure is detected clear tasks associated with the failed server.
				//
				for (String process : activeProcessSet) {
					log.info("PING_PROCESS " + process);
					if (!isProcessRunning(process)) {
						jobState.recoverFromProcessFailure(process);
						log.info("FAULT_DETECTED process: " + process);
						return false; // not done.  restart. 
					}
				}
				// wait a bit before polling the table again. 
				Thread.sleep(Constants.IN_PROGRESS_POLL_TIME);
				activeProcessSet.clear();
			}
			
			log.info("MAP_END");
			
			//
			// ---- START REDUCE PROCESSING ----
			//
			
			log.info("REDUCE_START");
			
			Thread[] reduceThreads = new Thread[Constants.NUMBER_OF_REDUCE_WORKERS];
			
			// create and start all reduce threads
			for (int i = 0; i < reduceThreads.length; i++) {
				Thread thread = new ReduceThread(jobState, 
						                         reducer.getClass().newInstance(),
						                         owner,
						                         log);
				thread.setDaemon(true);
				thread.start();
				reduceThreads[i] = thread; 
			}
			
			// wait for all the reduce threads to complete.
			for (int i = 0; i < reduceThreads.length; i++) {
				reduceThreads[i].join();
			}
			
			//
			// If map tasks are not complete, then a failure was detected
			// during reduce processing that caused a rollback.  restart.
			//
			if(!jobState.mapTasksComplete(null)) {
				return false; // not done.  restart.
			}
			
			// wait for all reduce tasks to complete
			activeProcessSet.clear();
			while(!jobState.reduceTasksComplete(activeProcessSet)) {
				//
				// Ping the owner of a task in progress to ensure it is working.
				// If a failure is detected clear tasks associated with the failed server.
				// 
				for (String process : activeProcessSet) {
					log.info("PING_PROCESS " + process);
					if (!isProcessRunning(process)) {
						jobState.recoverFromProcessFailure(process);
						log.info("FAULT_DETECTED process: " + process);
						return false; // not done.  restart. 
					}
				}
				
				// wait a bit before polling the table again.  
				Thread.sleep(Constants.IN_PROGRESS_POLL_TIME);
				activeProcessSet.clear();
			}
			
			log.info("REDUCE_END");
			
			// DONE!!!
			jobState.sendNotification();
			jobState.close();
			
		} catch (IOException e) {
			e.printStackTrace();
		} catch (InterruptedException e) {
			e.printStackTrace();
		} catch (InstantiationException e) {
			e.printStackTrace();
		} catch (IllegalAccessException e) {
			e.printStackTrace();
		}
		
		return true;
	}
	
	JobState getJobState() {
		return jobState;
	}
	
	boolean isProcessRunning(String server) {
		try {
			URL pingURL = new URL("http://" + server + "/ping?" + jobState.getJobID());
			URLConnection urlconn = pingURL.openConnection();
			urlconn.setReadTimeout(PROCESS_CHECK_TIMEOUT);
			urlconn.setConnectTimeout(PROCESS_CHECK_TIMEOUT);
			urlconn.connect();
			BufferedReader in = new BufferedReader(new InputStreamReader(urlconn.getInputStream()));
			String pingReply = in.readLine();
			boolean isOK = (pingReply != null) && pingReply.equals("OK");
			in.close();
			return isOK;
		} 
		catch(IOException e) {
			return false;
		}
	}
	
}
