package com.headcaselabs.orchestra;

import java.io.ByteArrayInputStream;
import java.net.UnknownHostException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.headcaselabs.queue.QueueParameters;
import com.headcaselabs.queue.SQSUtils;
import com.headcaselabs.queue.message.CancelledJobReport;
import com.headcaselabs.queue.message.FinishedJobReport;
import com.headcaselabs.s3.DFSUtils;
import com.headcaselabs.work.UnitOfWork;
/**
 * FinishedJobReport Daemon - sends out reports about jobs executed.
 * @author Florian Leibert
 *
 */
public class ReportDaemon implements Runnable {
	
	private final long SLEEP_INTERVAL;
	private boolean running = true;
	private static final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
	private SQSUtils sqsUtils;
	private Object lock;
	private int maxIterations;
	private DFSUtils dfsUtils;
	

  private final static Log log_ =
    LogFactory.getLog(ReportDaemon.class.getName());
	
	/**
  public ReportDaemon() {
		SLEEP_INTERVAL = 1L;
	}**/
  
  
  public ReportDaemon(final long sleep, SQSUtils sqsUtils, Object lock) {
		this.sqsUtils = sqsUtils;
		this.dfsUtils = sqsUtils.getDfsUtils();
		this.SLEEP_INTERVAL = sleep;
		this.lock = lock;
		maxIterations = -1;
	}
	
	public ReportDaemon(final long sleep, SQSUtils sqsUtils, Object lock, int maxIterations) {
		this.sqsUtils = sqsUtils;
		this.dfsUtils = sqsUtils.getDfsUtils();
		this.SLEEP_INTERVAL = sleep;
		this.lock = lock;
		this.maxIterations = maxIterations;
	}

	
	
	public CancelledJobReport getUnfinishedJobReport() {
		CancelledJobReport cancelled_tasks = new CancelledJobReport("Cancelled Job Summary");
		cancelled_tasks.setTime(dateFormat.format(new Date(System.currentTimeMillis())));
		/*for (String s : JobUtils.getCancelledJobs()) {
			cancelled_tasks.getTasks().add(s);
		}*/
		//TODO: implement the cancelled job reports
		return cancelled_tasks;
	}
	
	public FinishedJobReport getFinishedJobReport() {
		UnitOfWork uof = null;
		Collection<Object> objects = JobUtils.getUnitsOfWork();
		FinishedJobReport finishedJobReport = new FinishedJobReport("Job Summary");
		finishedJobReport.setTime(dateFormat.format(new Date(System.currentTimeMillis())));
		int completed = 0;
		for (Object o : objects) {
			if (o instanceof UnitOfWork) {
				uof = (UnitOfWork)o;
				if(uof.getIsFinished()&&!uof.getWasInterrupted()) {
					finishedJobReport.getTasks().add(uof.getClass()+ " : " +uof.getId()) ;
					//Job enclosedJob = JobUtils.getJob(uof.getId());
					//report.getQueues().add(enclosedJob.getQueueName());
					finishedJobReport.getExecutionTimes().add(""+uof.getElapsed_ticks());
					completed++;
					//log_.debug("\tRemoved UnitOfWork:"+uof.getId()+" from the lookup table...");
				}
			}
		}
		for (QueueParameters qp : sqsUtils.getQueueParameters()) {
			finishedJobReport.getQueueAccessCounts().put(qp.getQueueName(), qp.getCheckCount());
		}
		return finishedJobReport;
	}
	
	
	
	public void run() {
		if (sqsUtils==null) {
			throw new RuntimeException("No SQS Utilities found. Cannot operate in daemon mode.");
		}
		log_.debug("Starting FinishedJobReport Daemon");
		java.net.InetAddress localMachine = null;
		int i = 0;
		try {
			localMachine =
				java.net.InetAddress.getLocalHost();
		} catch (UnknownHostException uhe) {
			uhe.printStackTrace();
		}
		long t0,t1,t_f;
		while(running&&maxIterations>0) {
			maxIterations--;
			log_.debug("FinishedJobReport iteration:"+i);
			t0 = System.currentTimeMillis();
			try {
				Thread.sleep(SLEEP_INTERVAL);
			} catch (InterruptedException ie) {
				ie.printStackTrace();
			}

			log_.debug("FinishedJobReport Daemon woke up...");
			t1 = System.currentTimeMillis();
			t_f = t1 - t0;
			log_.debug("Slept for: "+t_f+" ms");
			UnitOfWork uof = null;
			Collection<Object> objects = JobUtils.getUnitsOfWork();
			FinishedJobReport finishedJobReport = new FinishedJobReport("Job Summary");
			finishedJobReport.setFrom(localMachine.getHostName()+" @ "+localMachine.getHostAddress());
			finishedJobReport.setTime(dateFormat.format(new Date(System.currentTimeMillis())));
			int completed = 0;
			for (Object o : objects) {
				if (o instanceof UnitOfWork) {
					uof = (UnitOfWork)o;
					if(uof.getIsFinished()&&!uof.getWasInterrupted()) {
						finishedJobReport.getTasks().add(uof.getClass()+ " : " +uof.getId()) ;
						//Job enclosedJob = JobUtils.getJob(uof.getId());
						//report.getQueues().add(enclosedJob.getQueueName());
						finishedJobReport.getExecutionTimes().add(""+uof.getElapsed_ticks());
						completed++;
						//log_.debug("\tRemoved UnitOfWork:"+uof.getId()+" from the lookup table...");
					}
				}
			}
			finishedJobReport.setCompletedJobs(completed);
			log_.debug("Sending FinishedJobReport...\n"+finishedJobReport.toXml());
			for (QueueParameters qp : sqsUtils.getQueueParameters()) {
				finishedJobReport.getQueueAccessCounts().put(qp.getQueueName(), qp.getCheckCount());
			}
			log_.info("Report:"+finishedJobReport.toXml());
			ByteArrayInputStream bais = new ByteArrayInputStream(finishedJobReport.toXml().getBytes());
			String objectId = "report-"+System.currentTimeMillis();
			//TODO:refactor with config-object
			com.headcaselabs.s3.Report r = dfsUtils.putStream("reportDirectory", objectId, bais, bais.available());
			//sqsUtils.sendReport(finishedJobReport);
			
			//CancelledJobReport cancelled_tasks = new CancelledJobReport("Cancelled Job Summary");
			//cancelled_tasks.setTime(dateFormat.format(new Date(System.currentTimeMillis())));
			//for (String s : JobUtils.getCancelledJobs()) {
			//	cancelled_tasks.getTasks().add(s);
			//}
			
			
			//sqsUtils.sendReport(cancelled_tasks);
		}
		
	}
	
	public synchronized void stop() {
		synchronized(lock) {
			running = false;
			this.notify();
		}
	}

}
