package edu.indiana.d2i.job;

import java.io.File;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import edu.indiana.d2i.vm.VMPoolManager;
import edu.indiana.d2i.cluster.HadoopClusterManager;
import edu.indiana.d2i.job.HTRCJobStatus.STATUS;

class JobQueueManager implements Runnable {
	private VMPoolManager vmpool = null;
	private JobSchedule scheduler = null;
	private Map<String, HTRCJobInProgress> waitingJobs = new LinkedHashMap<String, HTRCJobInProgress>();
	private Map<String, JobExecutor> runningJobs = new HashMap<String, JobExecutor>();
	// put it in file/DB?
	// include failed jobs
	private Map<String, HTRCJobInProgress> completedJobs = new HashMap<String, HTRCJobInProgress>();
	private ExecutorService executor = null;

	public static final Log LOG = LogFactory.getLog(JobQueueManager.class);
	
	// assume one job per cluster
	class JobExecutor implements Runnable {
		private HTRCJobInProgress jobInProgress;
		private HadoopClusterManager cluster = null;

		private void updateJobStatusFromCluster() {
			try {
				jobInProgress.jobStatus.info = cluster.getJobStatusFromScreen(jobInProgress.clusterJobId);
				LOG.info("Update status of job " + jobInProgress.jobConf.jobID);
			} catch (Exception e) {
				e.printStackTrace();
				LOG.warn("Can not update status of job " + jobInProgress.jobConf.jobID, e);
				return;
			}
		}
		
		public JobExecutor(HTRCJobInProgress job) {
			this.jobInProgress = job;
		}

		public HTRCJobStatus getJobStatus() {
			return getJobInProgress().jobStatus;
		}

		public HTRCJobInProgress getJobInProgress() {
			if (jobInProgress.clusterJobId != null) updateJobStatusFromCluster();
			LOG.debug(jobInProgress.clusterJobId);			
			return jobInProgress;
		}

		@Override
		public void run() {
			LOG.debug("Run job " + jobInProgress.jobConf.jobID);
			try {
				switch (jobInProgress.jobConf.jobtype) {
				case MAPREDUCE:
					cluster = new HadoopClusterManager(jobInProgress.vms);
					cluster.startup();

					String propPath = jobInProgress.jobConf.propFilePath;
					String propFile = propPath.substring(propPath
							.lastIndexOf('/')+1);
					FileInputStream propInput = new FileInputStream(
							new File(propFile));
					String jobPath = jobInProgress.jobConf.jobFilePath;
					String jobFile = jobPath
							.substring(jobPath.lastIndexOf('/')+1);
					FileInputStream jobInput = new FileInputStream(new File(
							jobFile));
					
					// an asynchronized call
					String clusterJobId = cluster.submitJob(jobInput, jobFile, propInput, propFile, jobInProgress.jobConf.args);
					jobInProgress.clusterJobId = clusterJobId;
					System.out.println("clusterJobId " + clusterJobId);
					LOG.info("Job " + jobInProgress.jobConf.jobID + 
							" is submitted. ID in cluster is " + jobInProgress.clusterJobId);
					
					while (!cluster.isJobFinishedOrFailed(clusterJobId)) {
						Thread.sleep(5000);
					}
					updateJobStatusFromCluster();
					switch (cluster.getHadoopJobState(clusterJobId)) {
						case 2 : jobInProgress.jobStatus.status = STATUS.SUCCEEDED; break;
						case 3 : jobInProgress.jobStatus.status = STATUS.FAILED; break;
					}
					cluster.stop(); // ??					
					break;
				default:
					break;
				}
			} catch (Exception e) {
				LOG.error("Job " + jobInProgress.jobConf.jobID + " fails", e);
				jobInProgress.jobStatus.status = STATUS.FAILED;
			} finally {
				LOG.info("Job " + jobInProgress.jobConf.jobID + " finishes. Clean up resources.");
				// clean up
				vmpool.releaseVMs(jobInProgress.jobConf.jobID);
				// synchronized (runningJobs) {
				runningJobs.remove(jobInProgress.jobConf.jobID);
				// synchronized (completedJobs) {
				completedJobs.put(jobInProgress.jobConf.jobID, jobInProgress);
				// }
				// }
			}
		}
	}

	public JobQueueManager(VMPoolManager vmpool, JobSchedule scheduler) {
		this.vmpool = vmpool;
		this.scheduler = scheduler;
		executor = Executors.newFixedThreadPool(vmpool.size());
	}

	public void submitJob(HTRCJobConfig jobConf) {
		synchronized (waitingJobs) {
			waitingJobs.put(jobConf.jobID, new HTRCJobInProgress(jobConf,
					new HTRCJobStatus(jobConf.jobID, STATUS.WAITING), null));
			LOG.info("Add job " + jobConf.jobID + " to waiting queue");
		}
	}

	public HTRCJobStatus getJobStatus(String jobID) {
		// synchronized (waitingJobs) {
		HTRCJobInProgress job = waitingJobs.get(jobID);
		if (job != null)
			return job.jobStatus;
		// synchronized (runningJobs) {
		JobExecutor executor = runningJobs.get(jobID);
		if (executor != null)
			return executor.getJobStatus();
		return completedJobs.get(jobID).jobStatus;
		// }
		// }
	}
	
	public HTRCJobInProgress getJobFullInfo(String jobID) {
		HTRCJobInProgress job = waitingJobs.get(jobID);
		if (job != null)
			return job;
		JobExecutor executor = runningJobs.get(jobID);
		if (executor != null)
			return executor.getJobInProgress();
		return completedJobs.get(jobID);
	}

	public List<HTRCJobInProgress> getRunningJobs() {
		List<HTRCJobInProgress> runningjobs = new ArrayList<HTRCJobInProgress>();
		Iterator<Entry<String, JobExecutor>> iterator = runningJobs.entrySet()
				.iterator();
		while (iterator.hasNext()) {
			runningjobs.add(iterator.next().getValue().getJobInProgress());
		}
		return runningjobs;
	}

	public List<HTRCJobInProgress> getWaitingJobs() {
		List<HTRCJobInProgress> waitingjobs = new ArrayList<HTRCJobInProgress>();
		Iterator<Entry<String, HTRCJobInProgress>> iterator = waitingJobs
				.entrySet().iterator();
		while (iterator.hasNext()) {
			waitingjobs.add(iterator.next().getValue());
		}
		return waitingjobs;
	}

	public List<HTRCJobInProgress> getCompletedJobs() {
		List<HTRCJobInProgress> completedjobs = new ArrayList<HTRCJobInProgress>();
		Iterator<Entry<String, HTRCJobInProgress>> iterator = completedJobs
				.entrySet().iterator();
		while (iterator.hasNext()) {
			completedjobs.add(iterator.next().getValue());
		}
		return completedjobs;
	}

	@Override
	public void run() {
		LOG.info("JobQueueManager is running");
		while (true) {
			synchronized (waitingJobs) {
				if (!waitingJobs.isEmpty()) {
					HTRCJobInProgress job = scheduler.schedule(waitingJobs,
							vmpool);
					if (job != null) {
						JobExecutor runningJob = new JobExecutor(job);
						waitingJobs.remove(job.jobConf.jobID);
						runningJobs.put(job.jobConf.jobID, runningJob);
						executor.execute(runningJob);
						LOG.info("Job " + job.jobConf.jobID + " is scheduled to run");
					}
				}
			}
		}
	}
}
