package edu.utexas.mapreduce;

import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.jar.JarInputStream;
import org.apache.commons.codec.binary.Base64;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;

import edu.utexas.ipc.Connection;
import edu.utexas.ipc.IPC;
import edu.utexas.ipc.Mutex;


public class HttpSubmitJobHandler implements HttpHandler {

	final String basedir;
	final IPC ipc;
	
	// SS: local view of sState
	private SharedMapState sState = null;

	
	HttpSubmitJobHandler(String basedir, IPC ipc, SharedMapState sState) {
		this.basedir = basedir;
		this.ipc     = ipc;
		
		// SS: get the shared state object
		this.sState  = sState;
	}
	
	public void handle(HttpExchange httpEx) throws IOException {
		
		System.out.println("HTTP submit job request");
		
		InputStream is = httpEx.getRequestBody();
//		BufferedReader in = new BufferedReader(new InputStreamReader(is));

		Job newJob = new Job(is);
	    
	    //TODO: server should not need to validate, but I am leaving it here for now for an extra level of checking.
	    assert(newJob.validate());
		
		System.out.println("  map    : " + newJob.map);
		System.out.println("  reduce : " + newJob.reduce);
		System.out.println("  input  : " + newJob.input);
		System.out.println("  output : " + newJob.output);
		System.out.println("  jar    : ");
		System.out.println(newJob.jar);

	    Base64 base64 = new Base64();
	    byte[] jarData = base64.decode(newJob.jar);
	    
	    // Remaining data in the stream as binary jar file.
		JarClassLoader loader = new JarClassLoader(new JarInputStream(new ByteArrayInputStream(jarData)));
		
		// Load map/reduce classes from the jar
		try {
			Class<?> mapClass = loader.loadClass(newJob.map);
			Class<?> reduceClass = loader.loadClass(newJob.reduce);
		
			if (!Mapper.class.isAssignableFrom(mapClass)) {
				System.out.println("Error.  Map class " + newJob.map + " does not implement " + Mapper.class.getName());
			}
			if (!Reducer.class.isAssignableFrom(reduceClass)) {
				System.out.println("Error.  Reduce class " + newJob.reduce + " does not implement " + Reducer.class.getName());
			}			
			Mapper mapper = (Mapper)mapClass.newInstance();
			Reducer reducer = (Reducer)reduceClass.newInstance();
			
			// input/output path validation
			File inputFile = new File(newJob.input);
			File outputFile = new File(newJob.output);
			if(!inputFile.canRead()) {
				throw new IOException("can't read from " + newJob.input);
			}
			if(!outputFile.canWrite()) {
				throw new IOException("can't write to " + newJob.output);
			}

			int jobID;
			List<InputBlock> inputList = null;

			if (newJob.jobIsRelay == null || !Boolean.parseBoolean(newJob.jobIsRelay))
			{
				//We are the first server to receive this request 
				//We are the only server that needs to do the following:
				
				// Assign jobID.  Use the hash of the whole request string to get a number
				jobID = Math.abs((newJob.map + newJob.reduce + newJob.input + newJob.output + newJob.jar).hashCode());
				
		        // Split the input
				InputSplitter fileSplitter = new InputSplitterLine(Constants.MAP_INPUT_BLOCK_SIZE);
				inputList = new InputSplitterRecursive(fileSplitter).splitInput(inputFile);
				for(InputBlock b: inputList) {
					System.out.println(b);
				}
				
				//Get list of other servers
				List<InetSocketAddress> otherServers = new ArrayList<InetSocketAddress>(ServerMapReduce.getServerList());
				Collections.copy(otherServers, ServerMapReduce.getServerList());
				otherServers.remove(ServerMapReduce.getPid()); //remove self

				if(otherServers.size()>0){

					// build job request with job ID, splits, and relay = true
					newJob.jobId = Integer.toString(jobID);
					newJob.jobIsRelay = Boolean.TRUE.toString();

					//serialize the splits
					ByteArrayOutputStream out = new ByteArrayOutputStream();
					ObjectOutputStream oOut;
					oOut = new ObjectOutputStream(out);
					oOut.writeObject(inputList);
					byte bytes[]= out.toByteArray();
					String encodedString = base64.encodeToString(bytes);
					newJob.jobSplits = "![CDATA[" + encodedString + "]]";
					System.out.println("job split data: " + newJob.jobSplits);

					//Open HTTP connection to each server, relay the request
					System.out.println("Sending job to other servers");
					for (InetSocketAddress server : otherServers){
						PrintWriter serverOut = null;
						BufferedReader serverIn = null;
						try {
							//TODO: find a better way to guess the HTTP port, other than subtracting 1000 from TCP port
							String serverAddress = server.getAddress().getHostAddress() + ":" + (server.getPort()-1000);
							System.out.println("Connecting with " + serverAddress);
							URL url = new URL("http://" + serverAddress + "/submit_job");
							System.out.println(url);
							URLConnection serverConnection = url.openConnection();
							serverConnection.setDoOutput(true);
							serverOut = new PrintWriter(serverConnection.getOutputStream());
							serverOut.println(newJob.toXMLString(true));	
							serverOut.flush();

							// Get the server response
							serverIn = new BufferedReader(new InputStreamReader(serverConnection.getInputStream()));
							String line;
							while ((line = serverIn.readLine()) != null) {
								System.out.println(line);
							}
						}
						catch(IOException e) {
							System.out.println("Unable to connect with " + server + ". skip...");
						}
						finally {
							System.out.println("closing...");
							if (serverOut != null) serverOut.close();
							if (serverIn != null)  serverIn.close();
						}
					}
				}
		    }
			else{
				//We are receiving a relayed job
				jobID = Integer.parseInt(newJob.jobId);
				try{
					System.out.println(newJob.jobSplits);
					String splitString = newJob.jobSplits.split("^!\\x5BCDATA\\x5B")[1];
					splitString = splitString.split("]]$")[0];

					byte[] splitData = base64.decode(splitString);
					ByteArrayInputStream in = new ByteArrayInputStream(splitData);

					ObjectInputStream oIn;
					oIn = new ObjectInputStream(in);
					Object obj = oIn.readObject();
					inputList = (List<InputBlock>)(obj);
				}
				catch (Exception e){
					e.printStackTrace();
				}
			}
			
			// run the job!!!
			//
			Connection ipcConnection = ipc.openConnection("job_" + jobID);
			Mutex ipcMutex = ipc.openMutex("mutex_" + jobID);
			String owner = InetAddress.getLocalHost().getHostAddress() + ":" + httpEx.getLocalAddress().getPort();
			JobState job = new JobStateGlobal(new JobStateLocal(jobID, newJob.output, inputList, newJob.requesterEmail), ipcMutex, ipcConnection);
			JobThread jobThread = new JobThread(job, mapper, reducer, basedir, owner);
			jobThread.setDaemon(true);
			jobThread.start();
			
			//SS: add the job to the sharedState map
			sState.insertJob(jobID, jobThread);
			
		} catch (ClassNotFoundException e) {
			e.printStackTrace();
		} catch (InstantiationException e) {
			e.printStackTrace();
		} catch (IllegalAccessException e) {
			e.printStackTrace();
		}

	    String response = "OK";
	    httpEx.sendResponseHeaders(200, response.length());
        OutputStream os = httpEx.getResponseBody();
        os.write(response.getBytes());
        os.close();
	}

}
