package tul.cot.local;

import java.io.File;
import java.io.IOException;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.conf.Configuration;

import tul.cot.job.Migrations;
import tul.cot.job.MyLabels;
import tul.cot.job.output.LineSaver;
import tul.cot.job.output.LinesGrouper;
import tul.cot.job.output.OutputGetter;
import tul.cot.job.output.OutputReader;
import tul.cot.util.Pairs;


public class GetResults {
	
	private Configuration conf;
	
	private double startTime;
	private int numSeg;
	private int numBuck;
	private File pairsFile;
	
//	private boolean fsLocal;
	private File rawOutDir;
	private File localDir;
	
	private Pairs pairs;
	
	private GetResults() {}
	public GetResults(Configuration conf,
			Pairs pairs, double startTime, int timeSeg, int numBuck,
			File rawOutDir, File localDir) {

		this.conf = conf;
		
		this.startTime = startTime;
		this.numSeg = timeSeg;
		this.numBuck = numBuck;
		
//		this.fsLocal = fsLocal;
		this.rawOutDir = rawOutDir;
		this.localDir = localDir;
		
		this.pairs = pairs;
	}
	
	/**
	 * command line args
	 */
	interface MainArgs {
		
//		String ARG_HADOOP_BIN	= "hadoopBin";
//		String DESC_HADOOP_BIN	= "Full path to hadoop executable";
		
		String ARG_START_TIME	= "startTime";
		String DESC_START_TIME	= "Start time of the first segment";
		
		String ARG_NUM_SEG		= "numSeg";
		String DESC_NUM_SEG		= "Number of segments";
		
		String ARG_PAIRS		= "pairs";
		String DESC_PAIRS		= "Local path to csv file with pairs";
		
		String ARG_OUT_HDFS		= "outHdfs";
		String DESC_OUT_HDFS	= "HDFS path to the output dir";
		
		String ARG_OUT_LOCAL	= "outLocal";
		String DESC_OUT_LOCAL	= "Local path to the output dir";
	}
	
	private static Options	options;
	// adding options
	static {
		options = new Options();
		
//		Option optHadoop	= new Option(MainArgs.ARG_HADOOP_BIN, true, MainArgs.DESC_HADOOP_BIN);
		Option optStartTime	= new Option(MainArgs.ARG_START_TIME, true, MainArgs.DESC_START_TIME);
		Option optNumSeg	= new Option(MainArgs.ARG_NUM_SEG,	  true, MainArgs.DESC_NUM_SEG);
		Option optPairs		= new Option(MainArgs.ARG_PAIRS,	true, MainArgs.DESC_PAIRS);
		Option optOutHdfs	= new Option(MainArgs.ARG_OUT_HDFS,	true, MainArgs.DESC_OUT_HDFS);
		Option optOutLocal	= new Option(MainArgs.ARG_OUT_LOCAL, true, MainArgs.DESC_OUT_LOCAL);
		
//		optHadoop.setRequired(true);
		optStartTime.setRequired(true);
		optNumSeg.setRequired(true);
		optPairs.setRequired(true);
		optOutHdfs.setRequired(true);
		optOutLocal.setRequired(true);
		
//		options.addOption(optHadoop);
		options.addOption(optStartTime);
		options.addOption(optNumSeg);
		options.addOption(optPairs);
		options.addOption(optOutHdfs);
		options.addOption(optOutLocal);
	}
	
	private Set<String> getDominants(File rawOutDir) throws IOException {
		OutputReader reader = OutputReader.create(conf, rawOutDir);
		
		Set<String> dominants = new HashSet<String>();
		for (String line: reader.getLines()) {
			if (line.startsWith("HISTOGRAM")) {
				int first = line.indexOf(Migrations.CSV_SEP);
				int snd   = line.indexOf(Migrations.CSV_SEP, first + 1);
				if (first < 0 || snd <= first)
					throw new RuntimeException("cannot parse histogram line: " + line);
				
				String mig = line.substring(first + 1, snd);
				dominants.add(mig);
			}
		}
		
		return dominants;
	}
	
	private void run(String[] args) throws IOException, InterruptedException {
		loadArgs(args);
		
		pairs = new Pairs();
		pairs.addPairs(pairsFile);
		
		run();
	}
	
	public void run() throws IOException, InterruptedException {
		validate();
		
		OutLinesGrouper linesGrouper = new OutLinesGrouper(startTime, numSeg, numBuck);
		Set<String> dominants = getDominants(rawOutDir);
		LabelComparator labelComparator = new LabelComparator(pairs.getMap(), dominants);
		
		LineSaver lineSaver = new LineSaver(linesGrouper);
		OutputGetter outGetter = OutputGetter.create(conf);
		
		outGetter.init(rawOutDir, localDir, labelComparator, lineSaver);
		outGetter.getOutput();
	}
	
	private void validate() {
		//TODO
	}
	
	private void loadArgs(String[] args) {
		CommandLineParser parser = new PosixParser();
		CommandLine cmd = null;
		
		try {
			cmd = parser.parse(options, args);
		} catch (ParseException e) {
			System.err.println(e.getMessage());
			
			HelpFormatter formatter = new HelpFormatter();
			formatter.printHelp(getClass().getSimpleName(), options);
			
			System.exit(1);
		}
		
//		hadoopBin	= new File(cmd.getOptionValue(MainArgs.ARG_HADOOP_BIN));
		startTime	= Double.parseDouble(cmd.getOptionValue(MainArgs.ARG_START_TIME));
		numSeg		= Integer.parseInt(cmd.getOptionValue(MainArgs.ARG_NUM_SEG));
		numBuck		= 20;
		pairsFile	= new File(cmd.getOptionValue(MainArgs.ARG_PAIRS));
		rawOutDir	= new File(cmd.getOptionValue(MainArgs.ARG_OUT_HDFS));
		localDir	= new File(cmd.getOptionValue(MainArgs.ARG_OUT_LOCAL));
	}

	public static void main(String[] args) throws IOException, InterruptedException {
		new GetResults().run(args);
	}
}

/**
 * Comparator for output lines to store them in local fs properly.
 */
class LabelComparator implements Comparator<String> {

	private Map<String, Integer> pairs;
	private Set<String> dominants;
	private Map<String, Integer> names;
	
	public LabelComparator(Map<String, Integer> pairs, Set<String> dominants) {
		if (pairs == null)
			throw new IllegalArgumentException("pairs == null");
		
		if (dominants == null)
			throw new IllegalArgumentException("dominants == null");
		
		this.pairs = pairs;
		this.dominants = dominants;
		
		this.names = new HashMap<String, Integer>();
		names.put(MyLabels.HPD_MAX, 1);
		names.put(MyLabels.MEDIAN, 2);
		names.put(MyLabels.HPD_MIN, 3);
		
	}
	
	private static class RawLine implements Comparable<RawLine> {
		
		private static final int NOT_CMP = -1;	//value not to compare
		
		String line;		//raw line
		String label;		//first value of line
		String name;		//HPD_MAX, MEDIAN, HPD_MIN
		
		int pairN  = NOT_CMP;	//order of pairs (migrations)
		int dominN = NOT_CMP;	//order of dominant migration (0 or 1)
		int nameN  = NOT_CMP;	//order of HPD_MAX, MEDIAN, HPD_MIN
		
		public RawLine(String line) {
			if (line == null)
				throw new IllegalArgumentException("line == null");
			
			this.line = line;
		}

		private RawLine init(Map<String, Integer> pairs,
				Set<String> dominants,
				Map<String, Integer> names) {
			
			if (pairs == null)
				throw new IllegalArgumentException("pairs == null");
			if (dominants == null)
				throw new IllegalArgumentException("dominants == null");
			if (names == null)
				throw new IllegalArgumentException("names == null");
			
			
			int first = line.indexOf(Migrations.CSV_SEP);
			if (first == -1)
				throw new IllegalArgumentException("line: " + line);
			
			label = line.substring(0, first);
			pairN = label.contains(Migrations.PAIR_SEP) ? pairs.get(label) : NOT_CMP;
			
			// set dominant number for migrations records
			if (pairN != NOT_CMP) {
				dominN = dominants.contains(label) ? 0 : 1;
			}
			
			if (!label.equals(MyLabels.HISTOGRAM)) {
				int second = line.indexOf(Migrations.CSV_SEP, first + 1);
				if (second == -1)
					throw new IllegalArgumentException("line: " + line);
				
				name = line.substring(first+1, second);
				nameN = names.get(name);
			} else {
				// !!! HACK !!! nameN for histograms is the number of pair
				int second = line.indexOf(Migrations.CSV_SEP, first + 1);
				if (second == -1)
					throw new IllegalArgumentException("line: " + line);
				
				String pairStr = line.substring(first+1, second);
				nameN = pairs.get(pairStr);
			}
			
			return this;
		}

		@Override
		public int compareTo(RawLine other) {
			if (this.pairN == NOT_CMP && other.pairN == NOT_CMP) {
				int cmpLabel = this.label.compareTo(other.label);
				if (cmpLabel != 0)
					return cmpLabel;
				
				if (this.nameN < other.nameN)
					return -1;
				if (this.nameN > other.nameN)
					return 1;
				
				return 0;
			}
				
			
			if (this.pairN == NOT_CMP && other.pairN != NOT_CMP)
				return -1;
			
			if (this.pairN != NOT_CMP && other.pairN == NOT_CMP)
				return 1;
			
			// migrations.csv records
			
			if (this.pairN < other.pairN)
				return -1;
			if (this.pairN > other.pairN)
				return 1;
			
			if (this.dominN < other.dominN)
				return -1;
			if (this.dominN > other.dominN)
				return 1;
			
			int labelCmp = this.label.compareTo(other.label);
			if (labelCmp != 0)
				return labelCmp;
			
			if (this.nameN < other.nameN)
				return -1;
			if (this.nameN > other.nameN)
				return 1;
			
			return 0;
		}
	}
	
	@Override
	public int compare(String line1, String line2) {
		RawLine v1 = new RawLine(line1).init(pairs, dominants, names);
		RawLine v2 = new RawLine(line2).init(pairs, dominants, names);
		
		return v1.compareTo(v2);
	}
	
}

/**
 * Determines the name of output file according to label.
 */
class OutLinesGrouper extends LinesGrouper {

	private static String REALISED_FILE = "realised.csv";
	private static String POTENTIAL_FILE = "potential.csv";
	private static String REALISED_TO_POTENTIAL_FILE = "realised2potential.csv";
	private static String LINEAGES_FILE = "lineages.csv";
	private static String HISTOGRAM_FILE = "hist.csv";
	private static String MIGRATIONS_FILE = "migrations.csv";

	private final double startTime;
	private final int numSeg;
	private final int numBuck;
	
	public OutLinesGrouper(double startTime, int numSeg, int numBuck) {
		this.startTime = startTime;
		this.numSeg = numSeg;
		this.numBuck = numBuck;
	}
	
	@Override
	public String getFileName(String line) {
		String label = getLabel(line);
		
		if (label.equals(MyLabels.REALISED))
			return REALISED_FILE;
		if (label.equals(MyLabels.POTENTIAL))
			return POTENTIAL_FILE;
		if (label.equals(MyLabels.REALISED_TO_POTENTIAL))
			return REALISED_TO_POTENTIAL_FILE;
		if (label.equals(MyLabels.LINEAGES))
			return LINEAGES_FILE;
		if (label.equals(MyLabels.HISTOGRAM))
			return HISTOGRAM_FILE;
		
		return MIGRATIONS_FILE;
	}
	
	@Override
	public String getFileHeader(String fileName) {
		if (fileName.equals(REALISED_FILE))
			return header_TimeSeg();
		if (fileName.equals(POTENTIAL_FILE))
			return header_TimeSeg();
		if (fileName.equals(REALISED_TO_POTENTIAL_FILE))
			return header_TimeSeg();
		if (fileName.equals(LINEAGES_FILE))
			return header_TimeSeg();
		if (fileName.equals(HISTOGRAM_FILE))
			return header_Hist();
		if (fileName.equals(MIGRATIONS_FILE))
			return header_Migrations();
		
		throw new IllegalArgumentException("unknown file: " + fileName);
	}
	
	@Override
	public String transform(String line) {
		String label = getLabel(line);
		
		if (label.equals(MyLabels.REALISED))
			return transform_cutLabel_quoteParameter(line);
		if (label.equals(MyLabels.POTENTIAL))
			return transform_cutLabel_quoteParameter(line);
		if (label.equals(MyLabels.REALISED_TO_POTENTIAL))
			return transform_cutLabel_quoteParameter(line);
		if (label.equals(MyLabels.LINEAGES))
			return transform_cutLabel_quoteParameter(line);
		if (label.equals(MyLabels.HISTOGRAM))
			return transform_hist(line);
		
		return transform_migrations(line);
	}

	private String getLabel(String line) {
		int first = line.indexOf(Migrations.CSV_SEP);
		if (first == -1)
			throw new RuntimeException("first == -1");
		
		String label = line.substring(0, first);
		
		return label;
	}
	
	private String transform_cutLabel_quoteParameter(String line) {
		String[] split = line.split(Migrations.CSV_SEP);
		//quoteParameter
		split[1] = "\"" + split[1] + "\"";
		
		StringBuilder sb = new StringBuilder(split[1]);
		for (int i=2; i<split.length; ++i) {
			sb.append(Migrations.CSV_SEP);
			sb.append(split[i]);
		}
		
		return sb.toString();
	}
	
	private String transform_hist(String line) {
		String[] split = line.split(Migrations.CSV_SEP);
		
//		Europe-->SinoHimal  to   "Europe<->>SinoHimal"
		split[1] = "\"" + split[1].replace(Migrations.PAIR_SEP, Migrations.HIST_PAIR_SEP) + "\"";
		
		StringBuilder sb = new StringBuilder(split[1]);
		for (int i=2; i<split.length; ++i) {
			sb.append(Migrations.CSV_SEP);
			sb.append(split[i]);
		}
		
		return sb.toString();
	}
	
	private String transform_migrations(String line) {
		String[] split = line.split(Migrations.CSV_SEP);
		
		split[0] = "\"" + split[0] + "\"";
		split[1] = "\"" + split[1] + "\"";
		
		StringBuilder sb = new StringBuilder(split[0]);
		for (int i=1; i<split.length; ++i) {
			sb.append(Migrations.CSV_SEP);
			sb.append(split[i]);
		}
		
		return sb.toString();
	}
	
	private String header_TimeSeg() {
		StringBuilder sb = new StringBuilder("\"Parameter\"");
		
		double len = startTime / ((double)numSeg);
		double time = startTime;
		
		for (int i=0; i<numSeg; ++i) {
			time -= len;
			sb.append(Migrations.CSV_SEP);
			sb.append("\"" + time + "\"");
		}
		
		sb.append("\n");
		return sb.toString();
	}
	
	private String header_Hist() {
		StringBuilder sb = new StringBuilder("\"Route\"");
		
		double len = 2.0 / ((double)numBuck);
		double curr = -1.0 - len/2;
		for (int i=0; i<numBuck; ++i) {
			curr += len;
			sb.append(Migrations.CSV_SEP);
			sb.append(String.format(Locale.US, "\"%.2f\"", curr));
		}
		
		sb.append("\n");
		return sb.toString();
	}
	
	private String header_Migrations() {
		StringBuilder sb = new StringBuilder("\"Route\"" + Migrations.CSV_SEP + "\"Parameter\"");
		
		double len = startTime / ((double)numSeg);
		double time = startTime;
		
		for (int i=0; i<numSeg; ++i) {
			time -= len;
			sb.append(Migrations.CSV_SEP);
			sb.append("\"" + time + "\"");
		}

		sb.append("\n");
		return sb.toString();
	}
	
}