package cz.cuni.amis.episodic.bayes.experiment;

import static cz.cuni.amis.episodic.bayes.memories.MemoryCreationStrategy.outputResults;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;

import jsmile.utils.JSmileUtil;
import lombok.AllArgsConstructor;
import lombok.Setter;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;

import smile.Network;
import smile.Network.BayesianAlgorithmType;
import au.com.bytecode.opencsv.CSVReader;
import au.com.bytecode.opencsv.CSVWriter;
import cz.cuni.amis.episodic.bayes.memories.IntervalSurpriseMemoryCreator;
import cz.cuni.amis.episodic.bayes.memories.MemoryCreationStrategy;
import cz.cuni.amis.episodic.bayes.memories.MemoryCreator;
import cz.cuni.amis.episodic.bayes.memories.MinimizeKLMemoryCreator;
import cz.cuni.amis.episodic.bayes.memories.PointSurpriseMemoryCreator;
import cz.cuni.amis.episodic.bayes.memories.eval.SingleTestCasePerformance;
import cz.cuni.amis.episodic.bayes.utils.DyBaNeMUtil;
import cz.cuni.amis.episodic.bayes.utils.DynamicNodeProperty;
import cz.cuni.amis.episodic.bayes.utils.MemoryChartCreator;
import cz.cuni.amis.episodic.bayes.utils.chart.BeliefEvolutionChart;
import cz.cuni.amis.episodic.bayes.utils.chart.GraphPaintingDevice;
import cz.cuni.amis.episodic.bayes.utils.chart.PngGraphDevice;
import cz.cuni.amis.episodic.data.EpisodicHierarchyInstance;
import cz.cuni.amis.episodic.dybanem.DBN;
import cz.cuni.amis.episodic.lisp.DefaultLispTranslator;
import cz.cuni.amis.episodic.lisp.LispPlanToCsvData;
import cz.cuni.amis.episodic.lisp.behan.LispTree;
import cz.cuni.amis.episodic.lisp.visitor.TreeTraceVisitor;

/**
 * 
 * @author ik
 */
@AllArgsConstructor
public abstract class Experiment {
	/**
	 * Type of algorithm to use for inference.
	 * 
	 * @see BayesianAlgorithmType
	 */
	@Setter
	int bayesianAlgorithmType = BayesianAlgorithmType.Lauritzen;

	/**
	 * Number of samples used in approximative algorithms.
	 */
	@Setter
	int numberOfSamplesInApproximation = 1000;

	String experimentName;

	int memsCreatedNum = 3;
	
	/**
	 * Directory common to all experiments.
	 */
	File targetExperimentDir;

	String[] networkFilenames;

	/**
	 * Should graphs from memory creation phase be created?
	 */
	boolean drawMemoryCreationGraphs = false;

	/**
	 * Should the belief evolution example be performed? That is iteratively
	 * setting <0,i++> observations and inspecting how the predicted goals
	 * change. This isn't necessary for memory creation.
	 */
	boolean performEvolutionExample = true;

	double surpriseMaxRangeInEvolutionGraphs = -1;
	
	/**
	 * Range of days used for training.
	 */
	@Setter
	int[] trainingDataRange = new int[] { 0, Integer.MAX_VALUE };

	/**
	 * Range of days used for testing the model.
	 */
	@Setter
	int[] testingDataRange = new int[] { 0, 25 };

	/**
	 * Amount of smoothing of transition probabilities in DBN. 0 - original
	 * probs, 1 - uniform.
	 */
	@Setter
	double probabilitiesSmoothingFactor = 0.001;

	// String[] targets;

	public Experiment(String experimentName, File targetExperimentsDir) {
		this.experimentName = experimentName;
		targetExperimentDir = new File(targetExperimentsDir, experimentName);
		targetExperimentDir.mkdirs();
	}

	/**
	 * Test various architectures on the evidence.
	 * 
	 * @param experimentsDir
	 * @throws Exception
	 */
	public Map<String, Map<String, DescriptiveStatistics[][]>> perform()
			throws Exception {

		Map<String, Map<String, DescriptiveStatistics[][]>> architectureToStrategiesResult = new HashMap<>();
		Arrays.sort(networkFilenames);

		for (String net : networkFilenames) {
			List<EpisodicHierarchyInstance> evidence = getTestCases(net);
			File netFull = new File(targetExperimentDir, net);
			Map<String, DescriptiveStatistics[][]> results = testArchitecture(
					targetExperimentDir, netFull.getAbsolutePath(), evidence);

			String architectureName = networkFilenameToExperimentName(net);
			architectureToStrategiesResult.put(architectureName, results);
		}
		// TODO

		PrintStream out = new PrintStream(new File(targetExperimentDir,
				"comp_mems_means.txt"));
		// for all level hierarchies
		int levelsMax = findMaxLevel(networkFilenames) + 1;
		for (int level = 0; level < levelsMax; level++) {
			// for all mems
			out.println("Level of hierarchy: " + level);
			for (int memsNum = 1; memsNum <= memsCreatedNum; memsNum++) {
				out.println("Mems used: " + memsNum);
				printArchitectureXStrategyResults(memsNum, level,
						architectureToStrategiesResult, out);
			}
			out.println();
		}
		out.flush();

		// serialize experiment data in a binary file
		ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(
				new File(targetExperimentDir,
						"architectureToStrategiesResult.ser")));
		oos.writeObject(architectureToStrategiesResult);
		oos.close();

		return architectureToStrategiesResult;
	}

	int findMaxLevel(String[] networkFilenames) {
		List<Integer> l = new ArrayList<>();
		for (String name : networkFilenames) {
			String levels = name.split("\\^")[1].split("_")[0].split("\\.")[0];
			l.add(Integer.parseInt(levels));
		}
		return Collections.max(l);
	}

	/**
	 * Create text files and graphs that compare efficiency of each strategy and
	 * architecture.
	 * 
	 * @param architectureToStrategiesResult
	 * @throws IOException
	 */
	public void printArchitectureXStrategyResults(
			int memsNum,
			Map<String, Map<String, DescriptiveStatistics[]>> architectureToStrategiesResult)
			throws IOException {
		// text based results
		File meansOut = new File(targetExperimentDir, "comp_means_mems"
				+ memsNum + ".txt");
		// header
		CSVWriter writer = new CSVWriter(new FileWriter(meansOut), '\t');
		List<String> header = new ArrayList<>();
		header.add("A\\S"); // architecture versus strategy
		for (String strategy : architectureToStrategiesResult.entrySet()
				.iterator().next().getValue().keySet()) {
			header.add(strategy);
		}
		writer.writeNext(header.toArray(new String[0]));
		// strategy per architecture
		String[] line = new String[header.size()];
		List<String> archs = new ArrayList<>(
				architectureToStrategiesResult.keySet());
		Collections.sort(archs);
		for (String arch : archs) {
			line[0] = arch;
			for (int i = 1; i < line.length; i++) {
				DescriptiveStatistics[] stat = architectureToStrategiesResult
						.get(arch).get(header.get(i));
				line[i] = Double.toString(stat[memsNum - 1].getMean());
			}
			writer.writeNext(line);
		}
		writer.close();
	}

	/**
	 * Create text files and graphs that compare efficiency of each strategy and
	 * architecture.
	 * 
	 * @param architectureToStrategiesResult
	 * @throws IOException
	 */
	public void printArchitectureXStrategyResults(
			int memsNum,
			int hierarchyLevel,
			Map<String, Map<String, DescriptiveStatistics[][]>> architectureToStrategiesResult,
			PrintStream out) throws IOException {

		// header
		List<String> header = new ArrayList<>();
		header.add("Architecture\\Strategy"); // architecture versus strategy
		for (String strategy : architectureToStrategiesResult.entrySet()
				.iterator().next().getValue().keySet()) {
			header.add(strategy);
		}
		out.println(StringUtils.join(header, '\t'));

		// strategy per architecture
		String[] line = new String[header.size()];
		List<String> archs = new ArrayList<>(
				architectureToStrategiesResult.keySet());
		Collections.sort(archs);
		for (String arch : archs) {
			line[0] = arch;
			for (int i = 1; i < line.length; i++) {
				DescriptiveStatistics[] stat = architectureToStrategiesResult
						.get(arch).get(header.get(i))[hierarchyLevel];
				line[i] = Double.toString(stat[memsNum - 1].getMean());
			}
			out.println(StringUtils.join(line, '\t'));
		}
	}

	public List<List<String>> getTestCases() {
		throw new UnsupportedOperationException();
	};

	public List<EpisodicHierarchyInstance> getTestCases(String netFilename) {
		try {
			List<EpisodicHierarchyInstance> batches = new ArrayList<>();

			String netName = netFilename.split("\\.")[0];
			String csvDataFilename = netName + "_smile_data.csv";
			File smileDataFile = new File(targetExperimentDir, csvDataFilename);
			CSVReader reader;

			reader = new CSVReader(new FileReader(smileDataFile), '\t', '#');

			String lastBatch = "NA";
			EpisodicHierarchyInstance episodes = null;
			List<String[]> lines = reader.readAll();

			String[] header = lines.get(0);
			// we have to be careful since some models like AHMEM do not store
			// only G* and O in the CSV
			int hierarchyLevels = 0;
			for (String label : header) {
				if (DyBaNeMUtil.getDepthForNodeName(label) != -1) {
					hierarchyLevels++;
				}
			}

			for (String[] line : lines.subList(1, lines.size())) {
				String batch = line[0];
				if (!batch.equals(lastBatch)) {
					episodes = new EpisodicHierarchyInstance(hierarchyLevels);
					batches.add(episodes);
					lastBatch = batch;
				}

				for (int i = 2; i < line.length; i++) {
					int depth = DyBaNeMUtil.getDepthForNodeName(header[i]);
					if (depth != -1) {
						episodes.getHierarchyByTime().get(depth).add(line[i]);
					}

				}
			}

			return batches;
		} catch (IOException e) {
			throw new RuntimeException(e);
		}

	}

	/**
	 * Computes array of target node ids, those are the nodes whose state can be
	 * remembered. This will be usually "O", "G1" ... "Gn". Depending on what
	 * nodes are present in the network. TODO maybe this should be part of the
	 * DBN or NetCreator
	 * 
	 * @param net
	 * @return
	 */
	public String[] getTargetNodes(Network net) {
		Set<String> nodes = new HashSet<>(Arrays.asList(net.getAllNodeIds()));
		List<String> targetNodes = new ArrayList<>();
		targetNodes.add("O");
		int i = 1;
		String node;
		while (nodes.contains(node = "G" + i)) { // TODO what do you thing is it
													// ugly or not? I am not
													// sure ... :-)
			targetNodes.add(node);
			i++;
		}
		return targetNodes.toArray(new String[0]);
	}

	public String networkFilenameToExperimentName(String networkFilename) {
		File networkFile = new File(networkFilename);
		String experimentName = networkFile.getName().split("\\.")[0];
		return experimentName;
	}

	protected GraphPaintingDevice createGraphDevice(File rootDir) {
		return new PngGraphDevice(rootDir);
	}

	protected MemoryCreator[] createMemoryCreators(GraphPaintingDevice device) {
		return new MemoryCreator[] { new MinimizeKLMemoryCreator(device),
				new PointSurpriseMemoryCreator(device),
				new IntervalSurpriseMemoryCreator(device) };
	}

	/**
	 * 
	 * @param experimentRootDir
	 * @param networkFilename
	 * @param evidence
	 * @return map where key is strategy id and values are performances when the
	 *         memory contains 1, 2 .. n mems.
	 * @throws Exception
	 */
	public Map<String, DescriptiveStatistics[][]> testArchitecture(
			File experimentRootDir, String networkFilename,
			List<EpisodicHierarchyInstance> evidence) throws Exception {
		Map<String, DescriptiveStatistics[][]> map = new HashMap<>();
		String experimentName = networkFilenameToExperimentName(networkFilename);

		File experimentSubDir = new File(experimentRootDir, experimentName);
		// create root dir for this subexperiment
		experimentSubDir.mkdirs();

		Network origNet = new Network();
		origNet.readFile(networkFilename);
		// Smoothing to avoid zero probabilities, SMILE does not smooth
		// transition probabilities during training, it has to be done
		// externally
		JSmileUtil.smoothProbabilities(origNet, probabilitiesSmoothingFactor);
		String[] targets = getTargetNodes(origNet);

		// TODO List<String> evidence =
		// readEvidence(evidencePlanFilename);//Arrays.asList("NAVEGATE_VEHICLE_a",
		// "CLIMB_IN_a", "NAVEGATE_VEHICLE_a", "CLIMB_OUT_a",
		// "HOOK_TO_TOW_TRUCK_a", "PLACE_CONES_a", "UNHOOK_FROM_TOW_TRUCK_a");

		// use only a portion of the whole data for testing
		evidence = DyBaNeMUtil.subList(evidence, testingDataRange);

		DBN net = new DBN(origNet, evidence.get(0).getHierarchyByTime().get(0)
				.size());
		// set custom alg
		net.getNet().setBayesianAlgorithm(bayesianAlgorithmType);
		net.getNet().setSampleCount(numberOfSamplesInApproximation);

		File graphsDir = new File(experimentSubDir, "graphs");
		graphsDir.mkdirs();

		GraphPaintingDevice graphDevice = createGraphDevice(graphsDir);

		if (performEvolutionExample) {
			System.out.println("Performing belief evolution example...");
			List<DynamicNodeProperty[]> res = Experiment_2.evolveOverTime(net,
					"O", evidence.get(0).getHierarchyByTime().get(0), targets);

			// draw single graph per node
			BeliefEvolutionChart.createEvolutionCharts(res, net, graphsDir,
					"beliefEvolution", surpriseMaxRangeInEvolutionGraphs);

			// draw default graphs
			MemoryChartCreator.createDefaultCharts(res, net, graphDevice);
		}

		File statsDir = new File(experimentSubDir, "stats");

		System.out.println("**************** \n Network: " + networkFilename
				+ "\n****************");

		GraphPaintingDevice memoryCreationGraphDevice = drawMemoryCreationGraphs ? graphDevice
				: null;
		// MemoryCreationStrategy.compare(evidence, net, targets, graphDevice);
		
		Map<String, List<SingleTestCasePerformance>> result = MemoryCreationStrategy
				.compareAndReturn(evidence, net, targets,
						createMemoryCreators(memoryCreationGraphDevice), memsCreatedNum,
						memoryCreationGraphDevice); 
		for (Map.Entry<String, List<SingleTestCasePerformance>> perfs : result
				.entrySet()) {
			String strategy = perfs.getKey();

			System.out.println(strategy);

			// output performance in a directory for this strategy
			File strategySubdir = new File(statsDir, strategy);
			strategySubdir.mkdirs();

			// print results per each testcase and mem
			File plainResultsFile = new File(strategySubdir,
					"results_plain.txt");
			outputResults(new PrintStream(plainResultsFile), perfs.getValue());

			// print aggregated stats
			DescriptiveStatistics[][] performanceStats = MemoryCreationStrategy
					.agregatePerformances(perfs.getValue());
			File statsFile = new File(strategySubdir, "stats.txt");
			File rawPerformanceFile = new File(strategySubdir,
					"raw_performance.txt");

			PrintStream statsOut = new PrintStream(statsFile);
			PrintStream rawOut = new PrintStream(rawPerformanceFile);
			rawOut.println("Level\tMems\tMean\tSD\tPerformance data, one entry per day");

			for (int level = 0; level < performanceStats.length; level++) {
				statsOut.println("***** Level of abstraction " + level
						+ "*****");
				printAggregatedStatsPerLevel(statsOut, performanceStats[level]);

				// raw data
				printRawDataPerLevel(rawOut, performanceStats[level], level);

				DescriptiveStatistics timeStats = MemoryCreationStrategy
						.agregateComputationTime(perfs.getValue());
				statsOut.println("TIME");
				statsOut.println(timeStats.toString());
			}
			map.put(strategy, performanceStats);
		}

		return map;
	}

	static void printAggregatedStatsPerLevel(PrintStream out,
			DescriptiveStatistics[] stats) {
		for (int i = 0; i < stats.length; i++) {
			out.println("MEMS ix " + i);
			out.println(stats[i].toString());
		}
	}

	static void printRawDataPerLevel(PrintStream out,
			DescriptiveStatistics stats, int level, int mems) {
		double[] data = stats.getValues();
		String[] vals = new String[data.length];

		for (int j = 0; j < vals.length; j++) {
			vals[j] = Double.toString(data[j]);
		}
		out.println(level + "\t" + mems + "\t" + stats.getMean() + "\t"
				+ stats.getStandardDeviation() + "\t"
				+ StringUtils.join(vals, ';'));
	}

	static void printRawDataPerLevel(PrintStream out,
			DescriptiveStatistics[] stats, int level) {
		for (int i = 0; i < stats.length; i++) {
			printRawDataPerLevel(out, stats[i], level, i);
		}
	}

	public static List<String> readEvidence(String inputLispPlan) {
		LispTree parser = new LispTree(inputLispPlan);
		LispPlanToCsvData.LispToCsvResult res = new LispPlanToCsvData.LispToCsvResult();

		final List<String> evidence = new LinkedList<>();

		parser.walkInForest(new TreeTraceVisitor() {

			@Override
			public void traceVisited(List<List<String>> trace) {
				String atomicAction = trace.get(trace.size() - 1).get(0);
				evidence.add(DefaultLispTranslator
						.translateOutcome(atomicAction));
				/*
				 * List<String> episTrace = new ArrayList<>(); for (List<String>
				 * elem : trace) { episTrace.add(elem.get(0)); }
				 * System.out.println(StringUtils.join(episTrace, " -> "));
				 */
			}
		});
		return evidence;
	}

	/**
	 * Loads experiment results.
	 *  optional: this can be also loaded from "raw_performance.txt"
	 * 
	 * @throws FileNotFoundException
	 * @throws IOException
	 * @throws ClassNotFoundException
	 */
	public static Map<String, Map<String, DescriptiveStatistics[][]>> loadResults(
			File resultsDir) throws FileNotFoundException, IOException,
			ClassNotFoundException {
		// load data serialized in binary file
		// optional: this can be also loaded from "raw_performance.txt"
		ObjectInputStream ois = new ObjectInputStream(new FileInputStream(
				new File(resultsDir, "architectureToStrategiesResult.ser")));

		Map<String, Map<String, DescriptiveStatistics[][]>> data = (Map<String, Map<String, DescriptiveStatistics[][]>>) ois
				.readObject();

		return data;
	}

	public Map<String, Map<String, DescriptiveStatistics[][]>> loadResults()
			throws FileNotFoundException, IOException, ClassNotFoundException {
		return loadResults(targetExperimentDir);

	}

}
