package tmhprediction.main;

import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;

import tmhprediction.eval.SolTmhEvaluator;
import tmhprediction.eval.TMHFinalEvaluator;
import tmhprediction.eval.TMHHelixEvaluator;
import tmhprediction.eval.TMHResidueEvaluator;
import tmhprediction.eval.WriteOutput;
import util.CopyInternalFiles;
import util.arff.FastaArffGenerator;
import util.arff.WrapperCaller;
import weka.core.Instances;
import weka.core.converters.ConverterUtils.DataSource;

public class TMHProgram2 {

	public enum SVM_Number {SVM1, SVM2};
	
//	private static String fileRootDirectory = "";
	private static Boolean showTimeInConsole = true;
	//private static String processId = "unknown";
	
	private static final String configPathInternal = "/data/";
	private static final String separator = System.getProperty("file.separator");
	private static final String configLvl1 = "features_manualglob21.conf";
	private static final String configLvl2 = "features_manual.conf";	

	public static HashMap<Integer, String> NumberToNameMap;
	public static HashMap<Integer, String> NumberToSequenceMap;

	private static boolean optionASet = false;
	private static String logFolder = "";
	
	/**
	 * This is 'Program2' from the exercise sheet
	 * in modelPath muessen die unterordner svm1 und svm2 sein. 
	 * beide muessen 0_model.svm, 1_model.svm und 2_model.svm enthalten
	 * svm1 muss zus�tzlich noch 0_cutoff, 1_cutoff und 2_cutoff enthalten
	 * in testFilePath muss der input fuer den Arff-gererator liegen (was genau das ist wei� hunni)
	 * @param args
	 * @throws IOException 
	 * @throws InterruptedException 
	 */
	@SuppressWarnings("static-access")
	public static void main(String[] args)
	{
		
		
		
		try {
			Options options = new Options();

		    Option optionModel = OptionBuilder
			    .withArgName("m")
			    .hasArg()
			    .withDescription("Path to the svm-models")
			    .create("m");
		    optionModel.setRequired(true);
		    Option optionOutput = OptionBuilder
			    .withArgName("o")
			    .hasArg()
			    .withDescription("Path to the output folder")
			    .create("o");
		    optionOutput.setRequired(true);
		    Option optionTestInput = OptionBuilder
				    .withArgName("t")
				    .hasArg()
				    .withDescription("Location of the test set file")
				    .create("t");
		    optionTestInput.setRequired(true);
		    Option optionPythonScript = OptionBuilder
				    .withArgName("p")
				    .hasArg()
				    .withDescription("Location of pp2features.py")
				    .create("p");
		    optionPythonScript.setRequired(true);
		    Option optionArffPath = OptionBuilder
				    .withArgName("a")
				    .hasArg()
				    .withDescription("(optional) Location of arffs generated by pp2features.py")
				    .create("a");
		    optionArffPath.setRequired(false);

		    options.addOption(optionModel);
		    options.addOption(optionOutput);
		    options.addOption(optionTestInput);
		    options.addOption(optionPythonScript);
		    options.addOption(optionArffPath);
		    
		    CommandLineParser parser = new PosixParser();
		    CommandLine cmd = null;
		    try {
			cmd = parser.parse(options, args);
		    } catch (ParseException e) {
			HelpFormatter helpFormatter = new HelpFormatter( );
		        helpFormatter.setWidth( 80 );
		        helpFormatter.printHelp("java -jar -Xmx3G ./program2.jar -o /mnt/opt/data/pp1_12_exercise/groups/77/program2/sampleOutput -t /mnt/opt/data/pp1_12_exercise/groups/77/program2/sampleInput/test.fasta -m /mnt/opt/data/pp1_12_exercise/groups/77/program2/sampleInput/ -p /mnt/opt/data/pp1_12_exercise/scripts/pp2features/pp2features.py", options);
		        System.exit(0);
			System.exit(1);
		    }
		    String outputPath = cmd.getOptionValue("o");
		    String testFilePath = cmd.getOptionValue("t");
		    String modelPath = cmd.getOptionValue("m");
		    String pp2featuresPath = cmd.getOptionValue("p");
		    		    
//		    fileRootDirectory = outputPath;
		    String processId = ManagementFactory.getRuntimeMXBean().getName();
		    
		    Date now = new Date();
		    SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd_HHmmss_");
			logFolder = outputPath+"/logs/"+formatter.format(now) + processId+"/";
			

		    logTime("Start");
		    logTime("outputPath: "+outputPath);
		    logTime("testFilePath: "+testFilePath);
		    logTime("modelPath: "+modelPath);
		    logTime("log: "+logFolder);
		    
		    try{
			    File f = new File(logFolder);
			    f.mkdirs();
			    f = new File(outputPath);
			    f.mkdirs();
			    		    
		    
		    } catch(Exception ex) {
		    	System.err.print("Couldn't create all necessary Directories: "+ex.getMessage());
		    	ex.printStackTrace();
		    }
		    
	//	    Example of how to use the FastaArffGenerator
		    
		    String svm1testArffPath;
		    String svm2testArffPath;
		    if(cmd.hasOption("a")) {
		    	optionASet = true;
//		    	svm1testArffPath = cmd.getOptionValue("a") + "/100/features_manual21.arff";
//		    	svm2testArffPath = cmd.getOptionValue("a") + "/100/features_manual.arff";
		    	svm1testArffPath = cmd.getOptionValue("a") + "/1_testset.arff";
		    	svm2testArffPath = cmd.getOptionValue("a") + "/2_testset.arff";
		    } else {
		    	svm1testArffPath = createArffForSVM(testFilePath,outputPath,pp2featuresPath,SVM_Number.SVM1);
		    	svm2testArffPath = createArffForSVM(testFilePath,outputPath,pp2featuresPath,SVM_Number.SVM2);
		    	
		    }

		    String firstLevelSvmModels[] = new String[3];
			firstLevelSvmModels[0] = modelPath+"/svm1/0_model.svm";
			firstLevelSvmModels[1] = modelPath+"/svm1/1_model.svm";
			firstLevelSvmModels[2] = modelPath+"/svm1/2_model.svm";

			String secondLevelSvmModels[] = new String[3];
			secondLevelSvmModels[0] = modelPath+"/svm2/0_model.svm";
			secondLevelSvmModels[1] = modelPath+"/svm2/1_model.svm";
			secondLevelSvmModels[2] = modelPath+"/svm2/2_model.svm";
			
			double cutOffs[] = new double[3];
			cutOffs[0] = readCutoffFromFile(modelPath+"/svm1/0_cutoff");
			cutOffs[1] = readCutoffFromFile(modelPath+"/svm1/1_cutoff");
			cutOffs[2] = readCutoffFromFile(modelPath+"/svm1/2_cutoff");
		    	
			// check if all files are there
			for(int i = 0; i < 3; i++) {
				if(!checkFileExists(firstLevelSvmModels[i])) {
					throw new Exception("File doesn't exist: "+firstLevelSvmModels[i]);
				}
				if(!checkFileExists(secondLevelSvmModels[i])) {
					throw new Exception("File doesn't exist: "+secondLevelSvmModels[i]);
				}
			}
			if(!checkFileExists(svm1testArffPath)) {
				throw new Exception("File doesn't exist: "+svm1testArffPath);
			}
			if(!checkFileExists(svm2testArffPath)) {
				throw new Exception("File doesn't exist: "+svm1testArffPath);
			}
			
		    runToolchain(svm1testArffPath,svm2testArffPath,firstLevelSvmModels,secondLevelSvmModels,cutOffs );
		    
		} catch(Exception ex) {
			ex.printStackTrace();
		}
	    
	    
	}
	
	
	private static boolean checkFileExists(String filename) {
		return (new File(filename)).exists();
	}
		
		
	/***
	 * reads first line of a file and converts it to a double number
	 * @param cutoffFileName
	 * @return value in file
	 * @throws Exception
	 */
	private static double readCutoffFromFile(String cutoffFileName) throws Exception {
	   FileInputStream fstream = new FileInputStream(cutoffFileName);
	   // Get the object of DataInputStream
	   DataInputStream in = new DataInputStream(fstream);
	   BufferedReader br = new BufferedReader(new InputStreamReader(in));
	   String strLine;
	   //Read first line
	   strLine = br.readLine();
	   in.close();
	   return Double.parseDouble(strLine);
	}

	/***
	 * generates an ARFF-file from input
	 * @param testFilePath
	 * @param outputPath
	 * @param svmNumber SVM1 or SVM2
	 * @return path to generated ARFF-file
	 * @throws Exception
	 */
	private static String createArffForSVM(String testFilePath, String outputPath, String pp2featuresPath, SVM_Number svmNumber) throws Exception {
		boolean isSVM1 = true;
		String outputArffName = "1_testset";
		if(svmNumber != SVM_Number.SVM1) {
			isSVM1 = false;
			outputArffName = "2_testset";
		}
		String configPath = outputPath + "/configs/";
		CopyInternalFiles.copyConfigFilesToDirectory(configPath +separator + configLvl1,configPath+separator  +configLvl2);
		
	    FastaArffGenerator generator = new FastaArffGenerator(testFilePath, outputPath, isSVM1);
	    generator.processFastaFile();
	    generator.generateArff();
	    generator.generatePPOutput();
	    
	    NumberToNameMap = generator.getNumberToNameMap();
	    NumberToSequenceMap = generator.getNumberToSequenceMap();
	    
	    System.out.println("Number to Seq map:");
	    for(Integer id : NumberToSequenceMap.keySet())
	    {
	    	System.out.println(id+ ":" + NumberToSequenceMap.get(id));
	    }
	    
	    System.out.println("Number to Name Map:");
	    for(Integer num : NumberToNameMap.keySet())
	    {
	    	System.out.println(num + ":" + NumberToNameMap.get(num));
	    }
	    
    
    WrapperCaller.callWrapper(outputArffName, outputPath+"/arffs/", outputPath+"/out/", configPath,pp2featuresPath, isSVM1);
	    
	    return outputPath+"/arffs/"+outputArffName+".arff";
	}
	
	
	// There are distinct arff files for the 2 separate tasks: soluble or not & find tmh helices
	// The arff files contain the same proteins but are composed of different features
	private static void runToolchain(String arffToTest0, String arffToTest1, String svm1Models[], String svm2Models[], double cutOffs[]) throws Exception {
		// read arff into Instances
		logTime("build Instances");
		Instances testSet0 = DataSource.read(arffToTest0);
		testSet0.setClass(testSet0.attribute(testSet0.numAttributes() - 1));
		
		// load model into the level1 SVMs
		// run testing against SVM1
		logTime("start with first SVM");
		TMHSVM1 level1svm = new TMHSVM1(svm1Models[0],svm1Models[1],svm1Models[2],testSet0,"1",cutOffs[0],cutOffs[1],cutOffs[2]);
		TMHResultMap svm1result = level1svm.getJuryResult();
		
		//doesn't work correct, because the jury decision contains only proteins not single residues
//		doStatistics("after_svm1",svm1result);
		

		// load model into level2 SVMs
		// run testing against SVM2
		// jury decision on results
		logTime("start with second SVM");
		Instances testSet1 = DataSource.read(arffToTest1);
		TMHSVM2 level2svm = new TMHSVM2(svm2Models[0],svm2Models[1],svm2Models[2],testSet1,"2",5000);
		TMHResultMap svm2result = level2svm.getJuryResult();
		
//		doStatistics("after_svm2",svm2result);

		if(optionASet)
		{
			TMHFinalEvaluator.printFinalOutput(svm1result,svm2result,logFolder+"/finalOutput.txt");
		}
		else
		{
			TMHFinalEvaluator.printFinalOutputNoObs(svm1result,svm2result,logFolder+"/finalOutput.txt", NumberToNameMap, NumberToSequenceMap);
		}
		System.out.println("The final result can be found in: "+logFolder+"/finalOutput.txt");
		
		TMHMergeSVMs merger = new TMHMergeSVMs(svm1result, svm2result);
		TMHResultMap finalResult = merger.getFinalDecision();
//		doStatistics("after_merging_1_and_2",finalResult);
		
		// some fancy stuff to make result better, e.g. delete helixes with length 1
		// maybe some other ideas?
		// what about a double threshold?
		
		
		logTime("end");
		
	}
	
	/**
	 * writes a log-message to console: just to see the progress
	 * @param msg
	 */
	public static void logTime(String msg) {
		if(showTimeInConsole) {
			//long now = System.currentTimeMillis();
			Date now = new Date();
			
			SimpleDateFormat formatter = new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss");
			
			System.out.println(formatter.format(now)+": "+msg);
		}
	}

	public static void doStatistics(String name, TMHResultMap resM) {
		Date now = new Date();
		SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd_HHmmss_");
		String prefix = formatter.format(now);
		
		File logDir = new File(logFolder);

		// if the directory does not exist, create it
		if (!logDir.exists())
		{
		    logDir.mkdirs(); 
		}
		
		logTime("Calculate per residue scores");
		TMHResidueEvaluator.calcPerResidueScores(resM, logFolder+prefix+name+"_scoresPerRes");
		
		logTime("Writing out predictions in human readable format");
		WriteOutput.writeObservedAndPredicted(resM, logFolder+prefix+name+"_readablePrediction");
		
		logTime("Calculating and writing out per segment scores");
		TMHHelixEvaluator.berechneFormeln(TMHHelixEvaluator.finalHelixEvaluation(resM), logFolder+"/"+prefix+name+"_scoresPerSeg");
		
		/*
		 * This only makes sens for soluble and is only temporary anyway
		 */
		logTime("Calculating distinction to soluble scores");
		HashMap<String, double[]> result0 = TMHResidueEvaluator.evaluateProteins(resM);
		SolTmhEvaluator.printStatistics(result0);
		
	}


}
