package edu.utexas.mapreduce;

import java.io.IOException;
import java.net.URL;
import java.net.URLConnection;
import java.util.List;
import java.util.Map.Entry;

import org.apache.log4j.Logger;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.XMLReaderFactory;

/**
 * Worker thread for reduce task processing.  The thread continuously requests
 * idle reduce tasks.  When a idle task is reserved, the thread will read the 
 * map output using the URLs listed for the reduce task.  The map output is
 * sorted by keys using a SAX XML parser.  The thread terminates when no more
 * reduce tasks are available or when it fails to read the map output.  In the
 * failure case, the thread will update the job state to release the reserved
 * reduce task and revert all map/reduce tasks associated with the failed process.
 */
public class ReduceThread extends Thread {

	private final int MAP_URL_TIMEOUT = 10000;
	private JobState jobState;
	private Reducer reducer;
	private String owner;
	private Logger log;
	
	ReduceThread(JobState jobState, Reducer reducer, String owner, Logger log) {
		this.jobState = jobState;
		this.reducer = reducer;
		this.owner = owner;
		this.log = log;
	}
	
	public void run() {
		try {
			XMLReader parser = XMLReaderFactory.createXMLReader("org.apache.xerces.parsers.SAXParser");
			parser.setFeature("http://xml.org/sax/features/validation", true);

			ReduceTask reduceTask;
			while((reduceTask = jobState.reserveIdleReduceTask(owner)) != null) {

				log.info("REDUCE_TASK_START [" + reduceTask.getID() + "]");
				
				// XML handler will sort all keys from all map results associated with this key
				XMLMapOutputHandler xmlMapOutput = new XMLMapOutputHandler();
				parser.setContentHandler(xmlMapOutput);

				// process one URL at a time
				for(URL url : reduceTask.getMapResults()) {
					try {
						log.info("REDUCE_TASK_READ_INPUT [" + reduceTask.getID()  + "] " + url);
						URLConnection urlconn = url.openConnection();
						urlconn.setConnectTimeout(MAP_URL_TIMEOUT);
						urlconn.setReadTimeout(MAP_URL_TIMEOUT);
						urlconn.connect();
						parser.parse(new InputSource(urlconn.getInputStream())); 
					} catch (IOException e) {
						System.out.println("failed to read from " + url);
						
						//
						// release the current task and fixed the job state.
						//
						final int port = url.getPort();
						final String failedProcess = url.getHost() + ((port != -1) ? (":" + port) : "");
						jobState.markReduceTaskIdle(reduceTask.getID(), owner);
						jobState.recoverFromProcessFailure(failedProcess);
						return;
					}
				}

				log.info("REDUCE_TASK_PROCESS [" + reduceTask.getID()  + "]");
				
				ReduceOutputWriter reduceOutput = new ReduceOutputWriter(jobState.getOutputDir(), reduceTask.getID());
				for (Entry<String, List<String>> entry: xmlMapOutput.entrySet()) {
					reducer.reduce(entry.getKey(), entry.getValue().iterator(), reduceOutput);
					
					Thread.sleep(1); // TODO !!! remove.  for demo only
				}
				reduceOutput.close();
				jobState.completeReduceTask(owner, reduceTask);
				
				log.info("REDUCE_TASK_DONE [" + reduceTask.getID() + "]");
			}
		} catch (SAXException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}
}
