package predictors;


import java.io.BufferedWriter;
import java.io.FileWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;

import jsonobj.JobInfo;

import utils.Utils;
import weka.classifiers.functions.LinearRegression;
import weka.core.Instances;
import weka.experiment.InstanceQuery;

public class ResourcePredictorImpl {
	public static DateFormat O = new SimpleDateFormat("d/M/yyyy HH:mm:ss");
	@SuppressWarnings("deprecation")
	public static Map<String, ArrayList<Tuple>> GetAvailableResourcesForTimeRange(int reqnodes,int ppn,
			Date start,  int trange) throws Exception {
			
			Date end = new Date(start.getTime() + trange * hoursInMillis);
			Map<String, ArrayList<Tuple>> probabilities = new HashMap<String,ArrayList<Tuple>>();
			Connection conn;
			//String url = "jdbc:mysql://laptop.local/sire";
			String url = "jdbc:mysql://127.0.0.1/sire";
			Class.forName("com.mysql.jdbc.Driver");
			conn = DriverManager.getConnection(url, "root", "");
			Utils.out("+ Searching for clusters with ppn = "+ppn);
			String query = "select * from clusters where ppn = ?";
			PreparedStatement stmt = conn.prepareStatement(query);
			stmt.setInt(1,ppn);
			ResultSet rs = stmt.executeQuery();
			int min = -1;
			if(!rs.next())
			{
				Utils.out("+ Not found.");
				Utils.out("+ Searching for clusters with ppn > "+ppn);
				//System.out.println(rs.getString("name"));
				query = "select * from clusters where ppn > ? order by ppn asc";
				stmt = conn.prepareStatement(query);
				stmt.setInt(1,ppn);
				rs = stmt.executeQuery();
				if(!rs.next())
				{
					Utils.out("+ Not found, exiting predictor function");
					Utils.out(" --------------------------------------");
					return null;
				}
				min = rs.getInt("ppn");
				Utils.out("+ Closest match: "+rs.getString("lhost")+" ppn: "+rs.getInt("ppn"));
			}
			else
			{
				Utils.out("+ Matched system: "+rs.getString("lhost")+" ppn: "+rs.getInt("ppn"));
			}

			do {
				if(!rs.isFirst())
				{
					Utils.out("\n+ Next match: "+rs.getString("lhost")+" ppn: "+rs.getInt("ppn"));
				}
				double coeff = -9999;
				String name = rs.getString("lhost").contains("crystal") ? "bc1" : "bc2";
				String host = rs.getString("host");
				//int ppnode = rs.getInt("ppn");
				probabilities.put(name, new ArrayList<Tuple>());
				Date tmp_start = start;
				
				query = "select freenodes from clu_data where cluster = ? order by id desc limit 1";
				stmt = conn.prepareStatement(query);
				stmt.setString(1, name);
				ResultSet res = stmt.executeQuery();
				res.first();
				int fnodes = res.getInt("freenodes");
				Utils.out("+ Current free nodes on '"+name+"': "+fnodes );
				if(fnodes <= 2*reqnodes && trange > 2)
				{
					Utils.out("+ Checking resource availability trend.");
					if((coeff=CheckTrend(name)) < 0)
					{
						Utils.out("+ Found negative coefficient -> delaying start time.");
						tmp_start = new Date(tmp_start.getTime() + hoursInMillis);
					}
					else Utils.out("+ Found positive coefficient -> normal procedure.");
				}
				//if( min != -1 && ppnode > min)
				//	continue;
				while( tmp_start.compareTo(end) <= 0)
				{
					double tot_instances = 0;
					double tot_instances_over = 0;
					int i = tmp_start.getHours();
					
					int day = tmp_start.getDay();
					//System.out.println("i is "+i+" "+day);
					query = "select freenodes from clu_data where cluster = ? and  timenum = ? and datenum = ? order by freenodes asc";
					stmt = conn.prepareStatement(query);
					stmt.setString(1, name);
					stmt.setInt(2, i);
					stmt.setInt(3, day);
					ResultSet rss = stmt.executeQuery();
					int mean = 0;
					while (rss.next()) {
						int n = rss.getInt("freenodes");
						mean += n;
						tot_instances += 1;
						if(n>=reqnodes)
							tot_instances_over+=1;
					}
					if(mean != 0)
						mean /= tot_instances;
					double std_dev = 0;
					rss.beforeFirst();
					while (rss.next()) {
						double val = rss.getInt("freenodes") - mean;
						std_dev += (val*val);
					}
					std_dev /= tot_instances;
					std_dev = Math.sqrt(std_dev);
					double probability;
					if(tot_instances_over == 0)
						probability = 0;
					else probability = ( tot_instances_over / tot_instances * 100);
					Utils.out("+ Predicted probability at '"+O.format(tmp_start)+"' = "+probability);
					//System.out.println("cl "+name+" time "+i+" tot "+tot_instances+" over "+tot_instances_over+" prob "+probability+" mean "+mean+" "+std_dev);
					probabilities.get(name).add(new Tuple(host,tmp_start,probability,mean,std_dev,coeff));
					tmp_start = new Date(tmp_start.getTime() + hoursInMillis);
				}
				
			}while(rs.next());
			conn.close();
		return probabilities;
	}
	
	
	public static ProbaInfo GetHighestProbability(JobInfo ji) throws Exception
	{
		return GetHighestProbability(ji, new Date(), ji.trange);
	}
	public static ProbaInfo GetHighestProbability(JobInfo ji, Date start)
	throws Exception{
		return GetHighestProbability(ji, start, ji.trange);
	}
	
	public static ProbaInfo GetHighestProbability(JobInfo ji, Date start, int trange)
			throws Exception {
		Utils.out(" -------- Resource Predictor --------");
		Utils.out("+ Job attributes: [nodes:" +ji.nodes+"] [procs: "+ji.procs+"] [subtime: "+start+"] [trange:"+trange+"]");
		Map<String, ArrayList<Tuple>> probabilities = GetAvailableResourcesForTimeRange(ji.nodes,ji.procs,start,trange);
		Iterator<String> it = probabilities.keySet().iterator();
		//String cluster = "bc2";
		double highest_prob = -1;
		int highest_mean = -1;
		Tuple h_prob=null;
		Tuple h_mean=null;
		Utils.out(" ===========================");
		Utils.out("+ Searching highest probability.");
		while(it.hasNext())
		{
			String s = it.next();
			ArrayList<Tuple> list = probabilities.get(s);
			for(Tuple t : list)
			{
				if( t.probability > highest_prob)
				{
					highest_prob = t.probability;
					h_prob = t;
					//cluster = s;
				}
				if( t.mean > highest_mean)
				{
					highest_mean = t.mean;
					h_mean = t;
					//cluster = s;
				}
			}
		}
		//System.out.print("clu "+h_prob.cluster+" [time "+ h_prob.time+" "+highest_prob+"]");
		//System.out.println(" [time "+ h_mean.time+" "+highest_mean+"]");
		Utils.out("+ Highest found: "+Utils.f.format(highest_prob)+" on system: '"+h_prob.cluster+"' at time: "+O.format(h_prob.time));
		Utils.out(" ===========================\n\n");
		return new ProbaInfo(h_prob.cluster,h_prob,h_mean);

	}
	final static long hoursInMillis = 60L * 60L * 1000L;
	
	
	public static double CheckTrend(String cluster) throws Exception
	{
		InstanceQuery query = new InstanceQuery();
		query.setDatabaseURL("jdbc:mysql://127.0.0.1/sire");
		query.setUsername("root");
		query.setPassword("");
		query.connectToDatabase();
		query.execute("set @a=-1");
		query.setQuery("select @a:=@a+1,freenodes from clu_data where cluster='"+cluster+"' and id >= (select max(id) from clu_data) - 120");
		Instances data = query.retrieveInstances();
		data.setClassIndex(1);
		LinearRegression lr = new LinearRegression();
		lr.buildClassifier(data);
		
		double [] coeff = lr.coefficients();
		/*for(double b : coeff)
		{
			System.out.println(coeff[0]);
		}*/
		query.close();
		Utils.out("+ Linear regression coefficient: "+coeff[0]);
		return coeff[0];
	}
	
	
	public int GetProbabilityForTime(Date time) throws Exception {
		// TODO Auto-generated method stub
		return 0;
	}

}
