package cs5226.batchjob;

import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Date;
import org.quartz.*;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;


import cs5226.dao.DBUtil;


public class CronJob implements Job
	{
	    static Log log = LogFactory.getLog(CronJob.class);	
	   
	    public void execute(JobExecutionContext jobexecutioncontext)
	    {
	    	try{ 	 
	    		System.out.println ("=== Cron @" + new Date());
		        log.info("Executing cron job - " + jobexecutioncontext.getJobDetail().getFullName());
		        log.info("Cron jobs to do put after this..." + new Date());
		        collectData();        
		        log.info("Cron jobs done...");
	        }
	        catch(Exception exception)
	        {
	            log.error("Exception occurred in running cron job...", exception);
	        }
	    }

	    private void collectData() throws SQLException {
			Connection conn = null;
			CallableStatement proc = null;
			PreparedStatement st = null;
			ResultSet rs = null;
			ArrayList<MetricBean> aList  = new ArrayList<MetricBean>();
			try {
				conn = DBUtil.getConnection();
				
				if (!conn.getAutoCommit()) {
					log.info("Connection does not autoCommit. Set autoCommit true");
					conn.setAutoCommit(true);
				}
				
				proc = conn.prepareCall(QueryUtil.CREATE_SNAPSHOT);
				proc.registerOutParameter(1, Types.INTEGER);
				proc.execute();
				int snapshotID = proc.getInt(1);
				log.info ("Snapshot ID = " + snapshotID);
				
				if(snapshotID > 0){					
					st = conn.prepareStatement(QueryUtil.GET_METRIC);
					st.setInt(1, snapshotID);
					rs = st.executeQuery();
					while(rs.next()){
						MetricBean bean = new MetricBean();
						bean.setMetric_id(rs.getString(1));
						bean.setMetric_name(rs.getString(2));
						bean.setMetric_unit(rs.getString(3));
						bean.setAvg_value(rs.getString(4));
						bean.setMax_value(rs.getString(5));
						bean.setMin_value(rs.getString(6));
						bean.setEnd_time(rs.getString(7));
						bean.setSnapshot_id(snapshotID+"");
						
						log.info ("Normal metric: " + bean.toString());
						aList.add(bean);
					}
					DBUtil.closeResultSet(rs);
					DBUtil.closeStatement(st);
					
					
					st = conn.prepareStatement(QueryUtil.GET_REDOMETRIC);
					st.setInt(1, snapshotID);
					rs = st.executeQuery();
					while(rs.next()){
						MetricBean bean = new MetricBean();
						bean.setMetric_id(rs.getString(1));
						bean.setMetric_name(rs.getString(2));
						bean.setMetric_unit("microseconds");
						bean.setAvg_value(rs.getString(3));
						bean.setAvg_timeout(rs.getString(4));
						bean.setSnapshot_id(snapshotID+"");
						
						aList.add(bean);
						log.info ("Redo metric:" + bean.toString());
					}
					DBUtil.closeResultSet(rs);
					DBUtil.closeStatement(st);
					
					st = conn.prepareStatement(QueryUtil.INSERT_METRIC);
					for(MetricBean bean:aList){
						st.setString(1, bean.getSnapshot_id());
						st.setString(2, bean.getMetric_id());
						st.setString(3, bean.getMetric_name());
						st.setString(4, bean.getMetric_unit());
						st.setString(5, bean.getAvg_value());
						st.setString(6, bean.getMin_value());
						st.setString(7, bean.getMax_value());
						st.setString(8, bean.getAvg_timeout());
						st.setString(9, bean.getEnd_time());
						
						st.addBatch();
					}
					st.executeBatch();
					st.close();

				}

			} 
			catch (Throwable t) {
				log.error("Error when executing batch job", t);
			}
			finally {
				DBUtil.closeResultSet(rs);
				DBUtil.closeStatement(st);
				DBUtil.closeConnection(conn);	
			}
	    }
	    	
	    
}
