package com.headcaselabs.queue;

import java.io.ByteArrayInputStream;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.headcaselabs.orchestra.Configuration;
import com.headcaselabs.queue.message.CancelledJobReport;
import com.headcaselabs.queue.message.FinishedJobReport;
import com.headcaselabs.queue.message.Report;
import com.headcaselabs.s3.DFSUtils;
import com.headcaselabs.work.TestUnitOfWork;
import com.headcaselabs.work.UnitOfWork;
import com.thoughtworks.xstream.XStream;
import com.xerox.amazonws.sqs.Message;
import com.xerox.amazonws.sqs.MessageQueue;
import com.xerox.amazonws.sqs.QueueService;
import com.xerox.amazonws.sqs.SQSException;

/**
 * Provides tools for sending and retrieving messages as well as organizing jobs
 * 
 * @author Florian Leibert
 * 
 */
public class SQSUtils {
	protected List<QueueParameters>														queueParameters;

	//private static HashMap<Job, QueueParameters>	job_params	= new HashMap<Job, QueueParameters>();

	private XStream																					xstream;

	private QueueSelectionAlgorithm													qsa;

	private MessageQueue																		reportQueue;

	private Configuration																		conf;
	
	private HashMap<String, QueueParameters> name_mapping = new HashMap<String, QueueParameters>();
	
	private DFSUtils dfsUtils;
	

  private final static Log log_ =
    LogFactory.getLog(SQSUtils.class.getName());

	/**
	 * Constructor
	 * 
	 * @param conf
	 */
	public SQSUtils(Configuration conf) {
		this.conf = conf;
		xstream = new XStream();
		xstream.alias("unitOfWork", UnitOfWork.class);
		//TODO: All classes that extend UnitOfWork and might be serialized have to be registered here! 
		xstream.alias("testUnitOfWork", TestUnitOfWork.class);
		xstream.alias("report", Report.class);
		xstream.alias("cancelledJobReport", CancelledJobReport.class);
		xstream.alias("finishedReport", FinishedJobReport.class);
		final QueueService qs = new QueueService(conf.getAwsAccessKey(), conf
				.getAwsSecretKey());

		try {
			reportQueue = qs.getOrCreateMessageQueue(conf.getReportQueue());
			reportQueue.setVisibilityTimeout(conf.getReportQueueTimeout());
			queueParameters = new LinkedList<QueueParameters>();
			int i;
			for (i = 0; i < conf.getQueues().size(); i++) {
				MessageQueue mq = qs.getOrCreateMessageQueue(conf.getQueues().get(i));
				QueueParameters qp = new QueueParameters(mq, conf.getQueues().get(i),
						conf.getQueueWeights().get(i), conf.getQueueCpuTimes().get(i), conf
								.getQueueExpectedProcessingTimes().get(i), conf.getTimeouts()
								.get(i));
				queueParameters.add(qp);
				name_mapping.put(qp.getQueueName(),qp);
			}
			qsa = new WeightedRoundRobin(queueParameters,new Object());
			dfsUtils = new DFSUtils(conf.getAwsAccessKey(),conf.getAwsSecretKey());
		}

		catch (SQSException e) {
			log_.error(e);
		}
	}
	
	
	/**
	 * Retrieves an estimate for a timeout for a specific queue based on the queue statistics.
	 * @param queueName
	 * @return
	 */
	public int getTimeoutEstimateForQueue(String queueName) {
		
		QueueParameters mqp = name_mapping.get(queueName);
		if (mqp==null)
			throw new QueueNotFoundException("Queue with name:"+queueName+" not found. Please make sure to initialize the queues in your configuration!");
		int jobVisibilityTimeout = mqp.getTimeout() + (int)(mqp.getExpectedTime()/1000);
		return jobVisibilityTimeout;
	}

	/**
	 * Attempts to retrieve the next available job as selected by the QueueSelectionAlgorithm
	 * @return
	 * @throws SQSException
	 */
	public UnitOfWork nextJob() throws SQSException {
		UnitOfWork job = null;
		QueueParameters mqp = null;
		Message msg = null;
		int i = 0;
		while (msg==null) {
			log_.debug("Selecting the queue...");
			mqp = qsa.nextQueue();
			if (mqp!=null) {
				int jobVisibilityTimeout = mqp.getTimeout() + (int)(mqp.getExpectedTime()/1000);
				msg = mqp.getQueue().receiveMessage(jobVisibilityTimeout);
				mqp.setCheckCount(mqp.getCheckCount()+1);
				log_.debug("Selected Queue:" + mqp.getQueueName() + " timeout is:"+jobVisibilityTimeout+" seconds");
				break;
			} else if (i>=qsa.getQueueCount()) {
				return null;
			} else {
				try {
					log_.debug("About to go to sleep for:"+conf.getSleepOnEmptyQueues()+" ms");
					Thread.sleep(conf.getSleepOnEmptyQueues());
				} catch (InterruptedException ie) {
					log_.error(ie);
				}
			}
			i++;
		}
		if (msg != null) {
			final String xml = msg.getMessageBody();
			if (xml != null) {
				job = (UnitOfWork) xstream.fromXML(xml);
				log_.debug("Got job:"+job.toString());
				job.setMessageId(msg.getMessageId());
				//TODO: add support for repeating jobs!
			}
		}
		return job;
	}
	
	public UnitOfWork nextJobFromQueue(String queueName) throws SQSException {
		UnitOfWork job = null;
		QueueParameters mqp = null;
		Message msg = null;
		int i = 0;
		while (msg==null) {
			log_.debug("Selecting the queue...");
			mqp = name_mapping.get(queueName);
			if (mqp!=null) {
				int jobVisibilityTimeout = mqp.getTimeout() + (int)(mqp.getExpectedTime()/1000);
				msg = mqp.getQueue().receiveMessage(jobVisibilityTimeout);
				log_.debug("Selected Queue:" + mqp.getQueueName() + " timeout is:"+jobVisibilityTimeout+" seconds");
				break;
			} else if (i>=qsa.getQueueCount()) {
				return null;
			} else {
				try {
					log_.debug("About to go to sleep for:"+conf.getSleepOnEmptyQueues()+" ms");
					Thread.sleep(conf.getSleepOnEmptyQueues());
				} catch (InterruptedException ie) {
					log_.error(ie);
				}
			}
			i++;
		}
		if (msg != null) {
			final String xml = msg.getMessageBody();
			if (xml != null) {
				job = (UnitOfWork) xstream.fromXML(xml);
				log_.debug("Got job:"+job.toString());
				job.setMessageId(msg.getMessageId());				
			}
		}
		return job;
	}

	

	/**
	 * Deletes a given job as identified by the taskId. Can only delete a job that was locally processed and is still in the caching table! 
	 * @param taskId
	 */
	public synchronized void deleteJob(String messageId, String taskId, String queueName) {
		try {
			QueueParameters qp = name_mapping.get(queueName);
			MessageQueue queue = qp.getQueue();
			log_.debug("Getting the job's queue...");
			queue.deleteMessage(messageId);
			try {
				log_.debug("Deleting in the job-directory...");
				String objectId = queueName+"."+taskId;
				com.headcaselabs.s3.Report r = dfsUtils.deleteObject(conf.getJobDirectory(),objectId);
				//TODO: refactor
				if (!(r.getStatusCode()>=200  && r.getStatusCode() < 300)) {
					log_.error("Error trying to delete job - got status:"+r.getStatusCode());
					log_.error(r.toString());
				} else {
					log_.debug("Deleted job from job-directory:"+objectId);
				}
			} catch (Exception e) {
				log_.error(e);
			}
			
		} catch (SQSException sqse) {
			log_.error(sqse);
		} 
	}
	
	/**
	 * Deletes a message in a specific queue. The queue must have been initialized! This is useful for low-level access to the queues.
	 * @param queueName
	 * @param messageId
	 */
	public void deleteMessage(String queueName, String messageId) {
		QueueParameters qp = name_mapping.get(queueName);
		if (qp==null) {
			throw new QueueNotFoundException("Queue:" + queueName
					+ " not found. Check your configuration...");
		}
		try {
			qp.getQueue().deleteMessage(messageId);
		} catch (SQSException sqse) {
			log_.error(sqse);
		}
	}
	
	public boolean peek(String taskId,String queueName ) {
		try {

			String objectId = queueName+"."+taskId;
			return dfsUtils.hasObject(conf.getJobDirectory(),objectId);
		} catch(Exception e) {
			log_.error(e);
			return false;
		}
	}
	
	
	/**
	 * Clears out a queue. Attention all visible messages in the respective queue will be deleted.
	 * @param queueName
	 * @return
	 */
	public int clearQueue(String queueName) {
		UnitOfWork job = null;
		Message msg = null;
		int deleted = 0;
		QueueParameters qp = name_mapping.get(queueName);
		if (qp==null) {
			throw new QueueNotFoundException("Queue:" + job.getQueueName()
					+ " not found. Check your configuration...");
		}
		try {
			while ((msg = qp.getQueue().receiveMessage(conf.getDefaultTimeout())) != null) {
				qp.getQueue().deleteMessage(msg);
				deleted++;
			}
		} catch (SQSException sqse) {
			log_.error(sqse);
		}
		return deleted;
	}

	/**
	 * Updates the cpu-time of the queue for the target task. (Will subtract the expected time from the QueueParameter's cpu-time and add the actual time it took to process the job).
	 * @param taskId
	 */
	public synchronized void updateJobTime(String taskId,final long elapsed_ticks) {
			log_.debug("About to retrieve the taskid ");
			//Job job = JobUtils.getJob(taskId);
			log_.debug("About to retrieve the job parameters");
			//QueueParameters qp = job_params.get(job);
			log_.debug("About to update the job time ");
			log_.debug("Elapsed ticks:"+elapsed_ticks);
			//qsa.updateQueueParameters(job.getQueueName(), elapsed_ticks);
			log_.debug("Updated job time ");
	}

	/**
	 * Sends a report
	 * @param finishedJobReport
	 * @return the messageId of the FinishedJobReport that was sent (as assigned by SQS)
	 */
	/**
	public String sendReport(Report finishedJobReport) {
		String xml = xstream.toXML(finishedJobReport);
		try {
			return reportQueue.sendMessage(xml);
		} catch (SQSException sqse) {
			sqse.printStackTrace();
			return null;
		}
	}**/

	/**
	 * @return the next available FinishedJobReport
	 */
	/**public Report nextReport() {
		Message msg = null;
		try {
			msg = reportQueue.receiveMessage(conf.getReportQueueTimeout());
		} catch (SQSException sqse) {
			sqse.printStackTrace();
		}
		Report report = (Report) xstream.fromXML(msg.getMessageBody());
		report.setMessageId(msg.getMessageId());
		return report;
	}
**/
	/**
	 * Sends a job
	 * @param job
	 * @return the messageId the job that was sent.
	 */
	public String sendJob(UnitOfWork job) {
		String id = null;
		if (job.getQueueName()==null || job.getQueueName().equals("")) {
			throw new RuntimeException("Job has no queue associated with it");
		}
		if (peek(job.getId(), job.getQueueName())) {
			return null;
		}
		String xml = xstream.toXML(job);
		QueueParameters qp = name_mapping.get(job.getQueueName());
		log_.debug("QueueURL:"+qp.getQueue().getUrl());
		if (qp==null) {
			throw new QueueNotFoundException("Queue:" + job.getQueueName()
					+ " not found. Check your configuration...");
		}
		try {
			id = qp.getQueue().sendMessage(xml);
		} catch (SQSException sqse) {
			sqse.printStackTrace();
		}
		
		ByteArrayInputStream bais = new ByteArrayInputStream(new byte[0]);
		String objectId = job.getQueueName()+"."+job.getId();
		com.headcaselabs.s3.Report r = dfsUtils.putStream(conf.getJobDirectory(), objectId, bais, 0);
		log_.debug("Response from trying to store job in job directory was:"+r.getStatusCode()+" objectName:"+objectId + " ");
		return id;
	}

	/**
	 * Gets a job from a particular queue
	 * @param timeout
	 * @param queueName
	 * @return the retrieved job or null if the respective queue didn't contain the job.
	 */
	public UnitOfWork getJob(final int timeout, String queueName) {
		UnitOfWork job = null;
		Message msg = null;
		String xml = null;
		QueueParameters qp = name_mapping.get(queueName);
		if (qp==null) {
			throw new QueueNotFoundException("Queue:" + job.getQueueName()
					+ " not found. Check your configuration...");
		}
		try {
			msg = qp.getQueue().receiveMessage(timeout);
			if (msg != null)
				xml = msg.getMessageBody();
			if (xml != null) {
				job = (UnitOfWork) xstream.fromXML(xml);
				job.setMessageId(msg.getMessageId());
			}
		} catch (SQSException sqse) {
			log_.error(sqse);
		}
		return job;
	}
	
	public Message nextMessage(int timeout, String queueName) {
		Message msg = null;
		QueueParameters qp = name_mapping.get(queueName);
		if (qp==null) {
			throw new QueueNotFoundException("Queue:" + queueName
					+ " not found. Check your configuration...");
		} 
		try {
			msg = qp.getQueue().receiveMessage(timeout);
			
		} catch (SQSException sqse) {
			log_.error(sqse);
		}
		return msg;
	}
	
	public synchronized void markJobCancelled(String taskId, String queueName) {
		ByteArrayInputStream bais = new ByteArrayInputStream(new byte[0]);
		String objectId = queueName+"."+taskId;
		com.headcaselabs.s3.Report r = dfsUtils.putStream(conf.getCancelledJobDirectory(), objectId, bais, 0);
		log_.info("Response from trying to store job in cancelled job directory was:"+r.getStatusCode()+" objectName:"+objectId + " ");
		
		
	}

	/**
	 * @return the xstream
	 */
	public XStream getXstream() {
		return xstream;
	}

	/**
	 * @param xstream the xstream to set
	 */
	public void setXstream(XStream xstream) {
		this.xstream = xstream;
	}
	
	/**
	 * Allows to clean out the job directory
	 *
	 */
	public void clearJobDirectory() {
		try {
			List<String> jobs = dfsUtils.listObjects(conf.getJobDirectory(), null, 5000, null);
			com.headcaselabs.s3.Report r;
			for (String s : jobs) {
				r = dfsUtils.deleteObject(conf.getJobDirectory(),s);
				if (r.getStatusCode()<=200&&r.getStatusCode()>=300) {
					log_.error("Error trying to delete - got status:"+r.getStatusCode());
					log_.error(r.toString());
				}
			}
		} catch (Exception e) {
			log_.error(e);
		}
	}
	
	/**
	 * Lists the jobs in a directory.
	 *
	 */
	public void listJobDirectory() {
		try {
			List<String> jobs = dfsUtils.listObjects(conf.getJobDirectory(), null, 5000, null);
			for (String s : jobs) {
				log_.info(s);
			}
		} catch (Exception e) {
			log_.error(e);
		}
	}
	
	public List<String> getJobDirectory() {
		List<String> jobs = null;
		try {
			jobs = dfsUtils.listObjects(conf.getJobDirectory(), null, 5000, null);
		} catch (Exception e) {
			log_.error(e);
		}
		return jobs;
	}
	
	public void clearDirectoryWithPrefix(String prefix) {
		try {
			List<String> jobs = dfsUtils.listObjects(conf.getJobDirectory(), null, 5000, null);
			com.headcaselabs.s3.Report r;
			for (String s : jobs) {
				if (s.startsWith(prefix)) {
					r = dfsUtils.deleteObject(conf.getJobDirectory(),s);
					if (r.getStatusCode()<=200&&r.getStatusCode()>=300) {
						log_.error("Error trying to delete - got status:"+r.getStatusCode());
						log_.error(r.toString());
					}
				}
			}
		} catch (Exception e) {
			log_.error(e);
		}
	}


	/**
	 * @return the queueParameters
	 */
	public List<QueueParameters> getQueueParameters() {
		return queueParameters;
	}


	/**
	 * @return the dfsUtils
	 */
	public DFSUtils getDfsUtils() {
		return dfsUtils;
	}


}
