package cbbx_sm.endtoend;

import java.io.BufferedWriter;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Hashtable;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

import cbbx_sm.decision_maker.DecisionMakerFactory;
import cbbx_sm.decision_maker.IDecisionMaker;
import cbbx_sm.decision_maker.TreeLookAheadDecisionMaker;
import cbbx_sm.decision_maker.search.DynamicProgrammingLookahead;
import cbbx_sm.decision_maker.search.WorldStateTransition;
import cbbx_sm.evaluation.CompareAlternativeSchedulers.Decision;
import cbbx_sm.parser.CameraData;
import cbbx_sm.parser.Frame;
import cbbx_sm.parser.Parser;
import cbbx_sm.probabilistic_model.Cluster;
import cbbx_sm.probabilistic_model.Clustering;
import cbbx_sm.probabilistic_model.IPredictor;
import cbbx_sm.probabilistic_model.NoisyOrPredictor;
import cbbx_sm.probabilistic_model.ProbabilisticModel;
import cbbx_sm.probabilistic_model.SingleCameraNoisyOrPredictor;
import cbbx_sm.probabilistic_model.SystemShortTermMemory;
import cbbx_sm.simulation.Scheduler;
import cbbx_sm.utils.ExperimentManager;
import cbbx_sm.utils.LookaheadPredictorSerializator;
import edu.uci.ics.jung.graph.util.Pair;

public class EndToEndTestUtils {
	public static void print(ArrayList<ArrayList<String>> allCamData, String fileNameFormat) throws IOException {
		for (int i = 0; i < allCamData.size(); i++) {
			ArrayList<String> camData = allCamData.get(i);
			OutputStream outputStream = new FileOutputStream(new File(String.format(fileNameFormat,"", i)));
			BufferedWriter out = new BufferedWriter(new OutputStreamWriter(outputStream));
			for (String line : camData) {
				out.write(line + "\n");
			}
			out.flush();
		}
	}
	
	public static void evaluateSchedule(
			ArrayList<String> cameraIds, 
			String FILE_NAME_FORMAT, 
			int k,
			int numberOfTimeStampsLookAhead,
			int numberOfSecondsCorrelation,
			int numberOfStates,
			double utilityZoom,
			double utilityUP,
			boolean discounting,
			double error,
			boolean regenerate,
			Decision decision,
			boolean useAprioriProb,
			int timeInFuture, 
			String trainDay,
			String testDay) throws Exception {
		
		// Load camera data used for training.
		LinkedHashMap<String, CameraData> camDataMap = loadCameraData(FILE_NAME_FORMAT, cameraIds, trainDay);
		List<CameraData> camDataList = new ArrayList<CameraData>(camDataMap.values());
		
		// Each camera has a cluster.
		Hashtable<String, List<Cluster>> camClusters = getCamClusters(cameraIds, camDataMap, k);
		
		// All the clusters together.
		List<Cluster> fusedClusters = new ArrayList<Cluster>();
		for (String cam: cameraIds){
			fusedClusters.addAll(camClusters.get(cam));
		}
		
		// Load Probabilistic Model.
		ProbabilisticModel probModel =
			new ProbabilisticModel(camDataList, fusedClusters, numberOfSecondsCorrelation);
		probModel.setThreshold(0.0);
		System.out.println("Probabilities computed.");
		System.out.println(probModel.toString());

		System.out.println("Computing Lookahead Model.");
		Hashtable<String, Cluster> clusterIndex = new Hashtable<String, Cluster>();
		for (Cluster c : probModel.getFusedClusters()) {
			clusterIndex.put(c.getUniqueId(), c);
		}
		IDecisionMaker decisionMaker = null;
				switch (decision){
		case mostlikelyAlways:
			decisionMaker = DecisionMakerFactory.getMostLikelyClusterDecisionMaker(0);
			break;
		case smartMostlikely:
			decisionMaker = DecisionMakerFactory.getSmartMostLikelyClusterDecisionMaker(utilityZoom, utilityUP);
			break;
		case mostlikelyWhenMotion:
			decisionMaker = DecisionMakerFactory.getMostLikelyClusterDecisionMaker(1);
			break;
		case oracle:
			// The oracle approach writes directly to the output file and thus we return after the call.
			Pair<Integer> results = EndToEndTestUtils.computeOracleResults(loadCameraData(FILE_NAME_FORMAT, cameraIds, testDay), utilityZoom, utilityUP);
			String decisionMakerName = 
				String.format("%s_%d_%.1f_%.1f_0_0_0",
						Decision.oracle.name(), camDataMap.size(), utilityZoom, utilityUP);
			EndToEndTestUtils.printResults(
					ExperimentManager.outputFilename, false, probModel.getThreshold(), probModel.getFusedClusters().size(),
					String.format("%s_0_0\t0", Decision.oracle.name()), decisionMakerName, results.getFirst(), results.getSecond(), 0, 0);
			return;
		case lookaheadLocalProb: // both cases are dealt the same once the probability model is set.
		case lookahead:
			Hashtable<String,DynamicProgrammingLookahead> table = new Hashtable<String,DynamicProgrammingLookahead>();	
			for (String cam: cameraIds) {

				// Load model to figure out which index was each cluster when single camera model was created.
				List<CameraData> singleCameraTrain = new ArrayList<CameraData>();
				singleCameraTrain.add(camDataMap.get(cam));
				ProbabilisticModel singleCamModel =
					new ProbabilisticModel(singleCameraTrain, camClusters.get(cam), numberOfSecondsCorrelation);
				DynamicProgrammingLookahead camTable = 
					LookaheadPredictorSerializator.generateLookaheadTable(
							singleCamModel, utilityZoom, utilityUP, numberOfTimeStampsLookAhead,
							discounting, error, numberOfStates, regenerate);
				table.put(cam, camTable);	
			}
			decisionMaker = DecisionMakerFactory.getLookAheadDecisionMaker(
					cameraIds, table, clusterIndex, utilityZoom, utilityUP, error, numberOfStates, numberOfTimeStampsLookAhead);
			break;
		case treeLookahead:
			Hashtable<String,WorldStateTransition> wst = null;
			wst = new Hashtable<String,WorldStateTransition>();
			for (String cam: cameraIds) {
				// Load model to figure out which index was each cluster when single camera model was created.
				List<CameraData> singleCameraTrain = new ArrayList<CameraData>();
				singleCameraTrain.add(camDataMap.get(cam));
				ProbabilisticModel singleCamModel = new ProbabilisticModel(singleCameraTrain, camClusters.get(cam), numberOfSecondsCorrelation);
				WorldStateTransition worldStateTransition = new WorldStateTransition(singleCamModel);
				wst.put(cam, worldStateTransition);
			}
			
			
			decisionMaker = DecisionMakerFactory.getTreeLookAheadDecisionMaker(
					cameraIds, wst, clusterIndex, utilityZoom, utilityUP, numberOfTimeStampsLookAhead, ExperimentManager.treeLookaheadRecordsMisses);
			
			break;
		case rr:
			decisionMaker = DecisionMakerFactory.getRoundRobinDecisionMaker(probModel.getFusedClusters(), false);
			break;
		case rrGOUP:
			decisionMaker = DecisionMakerFactory.getRoundRobinDecisionMaker(probModel.getFusedClusters(), true);
			break;
		case up:
			// Will go up for sure with a pro. cut off of 1.1
			decisionMaker = DecisionMakerFactory.getMostLikelyClusterDecisionMaker(1.1); 
			break;
		}
		
		SystemShortTermMemory memory = new SystemShortTermMemory(numberOfSecondsCorrelation);
		
		// Load camera data used for testing.
		camDataMap = loadCameraData(FILE_NAME_FORMAT, cameraIds, testDay);
		camDataList = new ArrayList<CameraData>(camDataMap.values());
		Scheduler scheduler = new Scheduler(camDataList, probModel, ExperimentManager.outputFilename, ExperimentManager.IMAGE_STREAM_DIRECTORY);
		scheduler.setDecisionMaker(decisionMaker);
		scheduler.setMemory(memory);
		IPredictor predictor;
		if (decision == Decision.lookaheadLocalProb){
			predictor = new SingleCameraNoisyOrPredictor(memory, probModel, probModel.getFusedClusters(), useAprioriProb, timeInFuture);
		} else if (decision == Decision.mostlikelyAlways 
				|| decision == Decision.smartMostlikely){
			// Always go to the most likely in the next timestamp.
			predictor = new NoisyOrPredictor(memory, probModel, probModel.getFusedClusters(), useAprioriProb, 1);
		} else {
			predictor = new NoisyOrPredictor(memory, probModel, probModel.getFusedClusters(), useAprioriProb, timeInFuture);
		}
		scheduler.setPredictor(predictor);
		scheduler.sequentialRun();
		
		// Print the initial grid if we used a tree lookahead.
		if (decision == Decision.treeLookahead && ExperimentManager.treeLookaheadRecordsMisses) {
			((TreeLookAheadDecisionMaker) decisionMaker).dumpNeededStates();
		}
	}
	
	public static LinkedHashMap<String, CameraData> loadCameraData(
			String FILE_NAME_FORMAT, ArrayList<String> cameraIds, String trainDay) throws IOException {
		LinkedHashMap<String, CameraData> camDataMap = new LinkedHashMap<String, CameraData>();
		for (String camId : cameraIds) {
			System.out.println("Loading for training: "+camId+"...");
			camDataMap.put(camId, Parser.parseFile(String.format(FILE_NAME_FORMAT, trainDay, camId), camId));
		}
		return camDataMap;
	}

	public static Hashtable<String, List<Cluster>> getCamClusters(
			ArrayList<String> cameraIds, Map<String, CameraData> data, int k) {
		Hashtable<String, List<Cluster>> camClusters = new Hashtable<String, List<Cluster>>();
		for (String cam: cameraIds){
			System.out.println("Clustering cam "+cam+"...");
			List<Cluster> clusters = Clustering.kMeansClustering(data.get(cam), k);
			Collections.sort(clusters, new Comparator<Cluster>(){

				@Override
				public int compare(Cluster arg0, Cluster arg1) {
					return Double.compare(arg0.getX(), arg1.getX());
				}
			});
			camClusters.put(cam, clusters);
		}
		return camClusters;
	}

	/**
	 * Create the optimal possible value for each value of alpha and beta.
	 * @param data
	 * @return
	 */
	public static Pair<Integer> computeOracleResults(
			Map<String, CameraData> data, double alpha, double beta) {
		int highRes = 0;
		int lowRes = 0;
		// For each camera in the dataset.
		for (CameraData camData : data.values()) {
			// For each frame in that dataset.
			for (Frame frame : camData.getFrames()) {
				int numberOfPeople = frame.getEntities().size();
				if (numberOfPeople > 0) {
					int maxHighRes = (numberOfPeople > 0) ? 1 : 0;
					if ((alpha * maxHighRes) > (beta * numberOfPeople)) {
						highRes++;
					} else {
						lowRes+=numberOfPeople;
					}	
				}
			}
		}
		return new Pair<Integer>(highRes, lowRes);
	}

	public static void printResults(String fileOutputName, boolean printTitle, double modelThreshold, int numberOfClusters, 
			String predictorName, String decisionMaker, int highReshits, int lowResHits, int misses, double decisionMakingTimeMs) {
		try {
			DataOutputStream out = new DataOutputStream(new FileOutputStream(fileOutputName, true));
			
			if (printTitle){
				out.writeBytes("Date"+"\t"+"Threshold"+"\t"+"Cluster Size"+"\tProbability Predicting Technique" +
						"\tNumber of Cameras"+"\tDecision maker"+"\tHighrest"+"\tLowRes"+"\tHighResMiss"+"\trate"+"\tDecision avg latency"+"\n");
			}
			out.writeBytes(new SimpleDateFormat("yyyy/MMM/dd HH:mm:ss").format(Calendar.getInstance().getTime()) +"\t");
			out.writeBytes(modelThreshold + "\t");
			out.writeBytes(numberOfClusters + "\t");
			out.writeBytes(predictorName + "\t");
			out.writeBytes(decisionMaker + "\t");
			out.writeBytes(highReshits + "\t");
			out.writeBytes(lowResHits + "\t");
			out.writeBytes(misses + "\t");
			out.writeBytes(getRate(highReshits, misses) + "\t");
			out.writeBytes(decisionMakingTimeMs + "\n");
			out.flush();
			
			out.close();
		} catch (IOException e) {
			e.printStackTrace();
		}		
	}
	
	/**
	 * Return the ratio of hits.
	 * 
	 * @param hits
	 * @param misses
	 * @return ratio of hits
	 */
	private static double getRate(long hits, long misses){
		return (double) hits / (hits + misses+1);
	}
}
