package edu.indiana.d2i.vmm.job;

import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.jcraft.jsch.JSchException;

import edu.indiana.d2i.vmm.SigiriHTRCConfiguration;
import edu.indiana.d2i.vmm.SigiriHTRCJobManager;
import edu.indiana.d2i.vmm.cloud.CloudInstanceInfo;
import edu.indiana.d2i.vmm.cluster.SSHSession;
import edu.indiana.d2i.vmm.vm.BasicHTRCVMController;
import edu.indiana.extreme.sigiri.JobInfoBean;
import edu.indiana.extreme.sigiri.util.ConnectionManager;
import edu.indiana.extreme.sigiri.util.Constants;

public class SigiriHTRCSingleVMJobMgr extends SigiriHTRCJobManager {
	public static class HTRCSingleVMController extends BasicHTRCVMController {
		private String WORK_HOME = null;
		private SSHSession headNode = null;
		private final CloudInstanceInfo singleVM;
		private String jobQueryPath = null;
		private final int MAX_TRY = 5;

		private static final Log logger = LogFactory
				.getLog(HTRCSingleVMController.class);

		private void initHeadNode() throws JSchException, InterruptedException {
			int count = 0;
			while (true) {
				try {
					Thread.sleep(5000);
					headNode = new SSHSession(singleVM.publicIP, singleVM.user,
							singleVM.pwd);
//					 headNode = new SSHSession("149.165.146.152",
//					 singleVM.user,
//					 singleVM.pwd);
					break;
				} catch (JSchException e) {
					logger.error(e);
					if ((++count) > MAX_TRY)
						throw e;
					else
						logger.info("Retry to connect to VM "
								+ singleVM.publicIP + " ...");
				}
			}
		}

		public HTRCSingleVMController(BasicHTRCVMController controller) {
			super(controller.getVMPolicy(), controller
					.getSoftwareFrameworkName(), controller.getVMInfo());
			singleVM = vmInfo.get(0);
			WORK_HOME = singleVM.workDir;
			WORK_HOME = (WORK_HOME.lastIndexOf("/") != WORK_HOME.length()-1) ? 
					WORK_HOME+"/" : WORK_HOME;
		}

		@Override
		public void setupVMs() throws Exception {
			if (headNode == null)
				initHeadNode();
			headNode.execCmdWithoutBlocking("chown -R hduser:hadoop "
					+ WORK_HOME, true);
		}

		@Override
		public void uploadFile(InputStream file, String filename)
				throws Exception {
			if (headNode == null)
				initHeadNode();
			headNode.writeFile(file, WORK_HOME + filename, false);
			// headNode.writeFile(file, WORK_HOME + "test.sh", false);
			logger.info("Upload " + filename);
		}

		@Override
		public void runJob(String cmdLine, boolean sudo) throws Exception {
			if (headNode == null)
				initHeadNode();

			// generate container script
			String containerPath = WORK_HOME + "container.sh";
			jobQueryPath = WORK_HOME + "query.sh";
			headNode.writeSmallFile(
					jobExeContainer.generateJobExeContainerScript(WORK_HOME
							+ cmdLine, this.softwareFramework), containerPath,
							sudo);
			headNode.writeSmallFile(jobExeContainer.generateQueryJobScript(),
					jobQueryPath, sudo);
			headNode.execCmdWithoutBlocking("chmod u+x " + containerPath, sudo);
			headNode.execCmdWithoutBlocking("chmod u+x " + jobQueryPath, sudo);
			headNode.execCmdWithoutBlocking("chmod u+x " + WORK_HOME + cmdLine,
					sudo);
			logger.info("Upload scripts to " + containerPath + ", and "
					+ jobQueryPath);

			headNode.execCmdWithoutBlocking(containerPath + " &", sudo);
		}

		@Override
		public String getJobStatusString() {
			if (jobQueryPath == null) {
				logger.error("No job query point to access");
				return Constants.JobStatus.NOT_AVAILABLE;
			}

			try {
				String result = headNode
						.execCmdReturnScreenOutput(jobQueryPath, false)
						.toString().trim();
				logger.info("String returned from query: " + result);
				if (jobExeContainer.JOB_SUCCESS.equals(result))
					return Constants.JobStatus.JOB_COMPLETED;
				else if (jobExeContainer.JOB_FAIL.equals(result))
					return Constants.JobStatus.FAILED;
				else if (jobExeContainer.JOB_RUNNING.equals(result))
					return Constants.JobStatus.STATE_JOB_STARTED;
			} catch (Exception e) {
				logger.error(e);
			}
			return Constants.JobStatus.NOT_AVAILABLE;
		}

		@Override
		public void downloadFile(String src, String dst) throws Exception {
			// save to local directory by sftp
			logger.info("Download file " + WORK_HOME + src);
			headNode.downloadFile(WORK_HOME + src, dst);
		}
	}

	private static final Log logger = LogFactory
			.getLog(SigiriHTRCSingleVMJobMgr.class);
	private SigiriHTRCJobRequest request;
	private String localWorkDir;

	public SigiriHTRCSingleVMJobMgr(ConnectionManager connectionManager,
			BasicHTRCVMController vmController, JobInfoBean jobInfoBean, 
			SigiriHTRCConfiguration configuration) {
		super(connectionManager, new HTRCSingleVMController(vmController),
				jobInfoBean);
		localWorkDir = configuration.getProperty(SigiriHTRCConstants.VMM_LOCAL_WORKDIR);
		localWorkDir = (localWorkDir.lastIndexOf("/") != localWorkDir.length()-1) ? 
				localWorkDir+"/" : localWorkDir;
		localWorkDir = localWorkDir + jobInfoBean.getInternalId();
		File localDir = new File(localWorkDir);
		if (localDir.exists()) logger.info(""); 
		else {
			if (!localDir.mkdirs()) {
				logger.fatal("Cannot create local working directory " + localWorkDir);
				vmController.releaseVMs();
				throw new RuntimeException("Cannot create local working directory " + localWorkDir);
			}
		}
		localWorkDir += "/";
	}

	@Override
	protected JobInfoBean submitJob(JobInfoBean jobInfoBean) {
		JobInfoBean outJobInfoBean = new JobInfoBean();

		try {
			request = SigiriHTRCJobRequest
					.parseFromXML(jobInfoBean.getJobDescription());

			// download files from registry
			logger.info("Path for executable " + request.executablePath);
			logger.info("Path for property file " + request.propPath);
			FileInputStream jobInput = new FileInputStream(new File(
					request.executablePath));
			FileInputStream jobProp = new FileInputStream(new File(
					request.propPath));

			// take a break
			logger.info("Take a 5 seconds break to wait for the VM up.");
			Thread.sleep(5000);

			// send files to VM
			vmController.setupVMs();
			vmController.uploadFile(jobInput, request.executableName);
			vmController.uploadFile(jobProp, request.propName);

			// execute job
			vmController.runJob(request.exeCmdLine, false);

			outJobInfoBean.setJobId(getJobID());
			outJobInfoBean
					.setStatus(Constants.JobStatus.SUBMITTED_TO_JOB_MANAGER);
			return outJobInfoBean;
		} catch (Exception e) {
			logger.error(e);
			return outJobInfoBean;
		}
	}

	@Override
	protected void clearupJob() {
		try {
			vmController.downloadFile(request.outputFileName, localWorkDir + jobInfoBean.getJobId());
		} catch (Exception e) {
			logger.error("Unable to download file because " + e);
		} finally {
			vmController.releaseVMs();
		}
	}
}
