package tum.in.rost.pp;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.commons.lang3.StringUtils;

import weka.classifiers.functions.SMO;
import weka.core.Instances;

public class ProteinPrediction {

	public static final String DATASET_FILE_PATH = "tmps.arff";

	public static final String TEST_FILE_PATH = "test.arff";
	public static final String TRAIN_FILE_PATH = "train.arff";

	public static final String CROSS_TRAIN_FILE_PATH = "crossTrain.arff";

	public static final String ATRIBUTE_KEY = "@ATTRIBUTE ";

	public static final String HEADER_FILE_NAME_LINE = "@RELATION 'tmps.arff'";

	public static final String DATA_KEY = "@DATA";

	public static final String FIVE_TABS = "\t\t\t\t\t";

	public static String NEW_LINE = "\n";

	public static int counter = 0;

	public static Hashtable<String, Integer> attributeValueHashTable = new Hashtable<>();

	public static LinkedHashMap<String, List<String>> attributeToIndexList = new LinkedHashMap<>();

	public static final List<String> attributeList = new ArrayList<String>();

	public static final List<String> attributesWithCompleteWindowReferences = Arrays.asList(new String[] { "pssm", "perc", "psic" });

	public static final String aminoAcidList[] = { "A", "R", "N", "D", "C", "Q", "E", "G", "H", "I", "L", "K", "M", "F", "P", "S", "T", "W", "Y", "V" };

	public static final int aminoAcidCount = aminoAcidList.length;

	public static final String TRAIN_WEKA_INPUT_FILE_PATH = "trainWekaInput.arff";

	public static final String CROSSTRAIN_WEKA_INPUT_FILE_PATH = "crossTrainWekaInput.arff";

	public static final String TEST_WEKA_INPUT_FILE_PATH = "testInput.arff";

	public static final String INPUT_HEADER_FILE_NAME_LINE = "@RELATION 'WekaInput.arff'";

	public static final String SMO_MODEL_SERIALIZED_FILE = "smo_model.txt";

	public static Collection<String> specifiedAttrributes = Arrays.asList(new String[] {"pssm", "chemprop_hyd", "ri_acc", "psic_numSeq", "class"});

	public static int windowSize =25;

	public static List<String> ProteinName_Instance = new ArrayList<>();
	public static List<Integer> ProteinName_InstanceCount = new ArrayList<>();
	
	public static String ProteinName = "";

	public static int instanceCounter=0;
	
	public static Hashtable<String, String> attributeTypeHashTable = new Hashtable<>();

	public static List<String> atrributesWithoutCompleteWindowReferences = Arrays.asList(new String[] { "A_composition", "R_composition", "N_composition", "D_composition", "C_composition",
			"Q_composition", "E_composition", "G_composition", "Hcomposition", "I_composition", "L_composition", "K_composition", "M_composition", "F_composition", "P_composition", "S_composition",
			"T_composition", "W_composition", "Y_composition", "V_composition", "length_category1", "length_category2", "length_category3", "length_category4", "helix_composition1",
			"helix_composition2", "helix_composition3", "strand_composition1", "strand_composition2", "strand_composition3", " loop_composition1", "loop_composition2", "loop_composition3",
			"exposed_composition1", "exposed_composition2", "exposed_composition3", "intermediate_composition1", "intermediate_composition2", "intermediate_composition3", "buried_composition1",
			"buried_composition2", "buried_composition3", "class" });

	public static final void parseAttributeFile(String filename) throws IOException {
		int i = 0;
		BufferedReader bis = new BufferedReader(new FileReader(new File(filename)));
		String newLine;
		String attribute;
		while ((newLine = bis.readLine()) != null) {
			if (StringUtils.startsWith(newLine, ATRIBUTE_KEY)) {
				attribute = StringUtils.substringBetween(newLine, ATRIBUTE_KEY, " ");
				attributeValueHashTable.put(attribute, i);
				attributeTypeHashTable.put(attribute, StringUtils.substringAfterLast(newLine, " "));
				i++;
			}
			if (StringUtils.startsWith(newLine, DATA_KEY)) {
				break;
			}
		}
		bis.close();
	}

	public static void parseDataSet(String features[], BufferedWriter bisWri) throws IOException {
		String featureSet = "";
		if (counter == 0) {
			ProteinName = StringUtils.substringBeforeLast(features[0], "_");
		}
		counter++;
		Set attributeSet = attributeToIndexList.entrySet();
		Iterator itr = attributeSet.iterator();
		if(StringUtils.substringBeforeLast(features[0], "_").equals(ProteinName))
		{
			instanceCounter++;
		}
		else
		{
			ProteinName_InstanceCount.add(instanceCounter);
			ProteinName_Instance.add(ProteinName);
			ProteinName=StringUtils.substringBeforeLast(features[0], "_");
			instanceCounter=1;
		}
		while (itr.hasNext()) {
			Map.Entry me = (Map.Entry) itr.next();
			List<String> indexArray = (List<String>) me.getValue();
			for (int i = 0; i < indexArray.size(); i++) {
				int coloumnKey = attributeValueHashTable.get(indexArray.get(i)).intValue();
				featureSet += features[coloumnKey] + ",";
			}
		}
		bisWri.write((StringUtils.removeEndIgnoreCase(featureSet, ",")) + NEW_LINE);
	}

	public static LinkedList<String> getindexesForWindowSizeTotalSet(String key, BufferedWriter bisWriteTrain) throws IOException {
		LinkedList<String> indexes = new LinkedList<String>();
		if (windowSize % 2 == 1 && windowSize > 0 && windowSize <= 25) {
			int rangeMin = -(windowSize / 2);
			while (indexes.size() != windowSize * aminoAcidCount) {
				for (int k = 0; k < aminoAcidCount && indexes.size() < windowSize * aminoAcidCount; k++) {
					indexes.add(rangeMin + "_" + aminoAcidList[k] + "_" + key);
					bisWriteTrain
							.write(ATRIBUTE_KEY + rangeMin + "_" + aminoAcidList[k] + "_" + key + FIVE_TABS + attributeTypeHashTable.get(rangeMin + "_" + aminoAcidList[k] + "_" + key) + NEW_LINE);
				}
				rangeMin++;
			}
			return indexes;
		} else {
			throw new IllegalStateException();
		}
	}

	public static LinkedList<String> getindexesForSingleSetAttributes(String key, BufferedWriter bisWriteTrain) throws IOException {
		LinkedList<String> indexes = new LinkedList<String>();
		indexes.add(key);
		bisWriteTrain.write(ATRIBUTE_KEY + key + FIVE_TABS + attributeTypeHashTable.get(key) + NEW_LINE);
		return indexes;
	}

	public static LinkedList<String> getindexesForWindowSizeSingleSet(String key, BufferedWriter bisWriteTrain) throws IOException {
		LinkedList<String> indexes = new LinkedList<String>();
		if (windowSize % 2 == 1 && windowSize > 0 && windowSize <= 25) {
			int rangeMin = -(windowSize / 2);
			for (int i = rangeMin; i <= -(rangeMin); i++) {
				indexes.add(i + "_" + key);
				bisWriteTrain.write(ATRIBUTE_KEY + i + "_" + key + FIVE_TABS + attributeTypeHashTable.get(i + "_" + key) + NEW_LINE);
			}
			return indexes;
		} else {
			throw new IllegalStateException();
		}
	}

	public static void createNewTestArffFile(String fileIn, String fileOut) throws IOException {
		BufferedWriter bisWriteTrain = new BufferedWriter(new FileWriter(new File(fileOut)));
		bisWriteTrain.write(INPUT_HEADER_FILE_NAME_LINE + NEW_LINE);
		createHeadersForArffFile(bisWriteTrain);
		bisWriteTrain.write(DATA_KEY + NEW_LINE);
		parseInstanceFile(fileIn, bisWriteTrain);
		bisWriteTrain.close();
	}
	
	public static void createNewTestArffFileWithoutHeader(String fileIn, String fileOut) throws IOException {
		BufferedWriter bisWriteTrain = new BufferedWriter(new FileWriter(new File(fileOut)));
		bisWriteTrain.write(INPUT_HEADER_FILE_NAME_LINE + NEW_LINE);
		bisWriteTrain.write(DATA_KEY + NEW_LINE);
		parseInstanceFile(fileIn, bisWriteTrain);
		bisWriteTrain.close();
	}

	public static void createHeadersForArffFile(BufferedWriter bisWriteTrain) throws IOException {
		for (String key : specifiedAttrributes) {
			LinkedList<String> indexArray;
			if (attributesWithCompleteWindowReferences.contains(key)) {
				indexArray = getindexesForWindowSizeTotalSet(key, bisWriteTrain);

			} else if (atrributesWithoutCompleteWindowReferences.contains(key)) {
				indexArray = getindexesForSingleSetAttributes(key, bisWriteTrain);
			} else {
				indexArray = getindexesForWindowSizeSingleSet(key, bisWriteTrain);
			}
			attributeToIndexList.put(key, indexArray);
		}
	}

	public static void parseInstanceFile(String fileName, BufferedWriter bisWri) throws IOException {
		BufferedReader bis = new BufferedReader(new FileReader(new File(fileName)));
		String newLine;
		while ((newLine = bis.readLine()) != null) {
			if (!newLine.startsWith("@ATTRIBUTE") && !newLine.startsWith("@DATA") && !newLine.startsWith("@RELATION") && newLine.length() > 0) {
				String features[] = newLine.split(",");
				if (features.length == 2886) {
					parseDataSet(features, bisWri);
					break;
				} else {
					throw new IllegalStateException();
				}
			}
		}
		while ((newLine = bis.readLine()) != null) {
			String features[] = newLine.split(",");
			if (features.length == 2886) {
				parseDataSet(features, bisWri);
			} else {
				throw new IllegalStateException();
			}
		}
		bis.close();
	}

	public static void init(String file) throws IOException {
		DataSplit split = new DataSplit();
		split.splitdata(file);
		parseAttributeFile(file);
	}

	public SMO BESTSMOMODEL = new SMO();
	public static String DATALOCATION="";
	
	public static SMO retrievModel() throws FileNotFoundException, ClassNotFoundException, IOException {
		ObjectInputStream ois = new ObjectInputStream(new FileInputStream("svm.model"));
		SMO smo = (SMO) ois.readObject();
		ois.close();
		return smo;
	}
	
	public void predict(){
		//use the BESTSMOMODEL TO perform the evaluation
		try {
		Instances testData = new Instances(new FileReader(TEST_WEKA_INPUT_FILE_PATH));
		testData.setClassIndex(testData.numAttributes() - 1);
		BESTSMOMODEL = retrievModel();
		String protein = ProteinName_Instance.get(0);
		int numberCount = 0;
		int proteinCount = 0;
		double predictedClass = -1;
		int instanceCount = ProteinName_InstanceCount.get(0);
		for (int j = 0; j < testData.numInstances(); j++) {
			if(numberCount == 0) {
				System.out.print(">"+protein+"\n");
				numberCount++;
			} else if (numberCount < instanceCount) {
				numberCount++;
			} else {
				numberCount = 0;
				proteinCount++;
					if (proteinCount < ProteinName_Instance.size()) {
						protein = ProteinName_Instance.get(proteinCount);
						instanceCount = ProteinName_InstanceCount.get(proteinCount);
				}
					System.out.println("\n");
			}
			predictedClass = BESTSMOMODEL.classifyInstance(testData.instance(j));
			if (((int) predictedClass == 0)) {
				System.out.print("+");
			} else if ((int) predictedClass == 1) {
				System.out.print("-");
			}
		}
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	public static void main(String args[]) throws IOException, ClassNotFoundException {
		specifiedAttrributes = new ArrayList<>(Arrays.asList(new String[] {"pssm", "chemprop_hyd","i", "strand", "infPP", "prosite", "md_plus", "class"}));
		windowSize = 25;
		Long l = System.currentTimeMillis();
		init(DATASET_FILE_PATH);
		createNewTestArffFile(TEST_FILE_PATH, TEST_WEKA_INPUT_FILE_PATH);
		createNewTestArffFile(TRAIN_FILE_PATH, TRAIN_WEKA_INPUT_FILE_PATH);
		createNewTestArffFile(CROSS_TRAIN_FILE_PATH, CROSSTRAIN_WEKA_INPUT_FILE_PATH);
		SVM.trainSVM();
		ProteinPrediction pp = new ProteinPrediction();
		pp.predict();
		System.out.println("Counter: " + counter);
		System.out.println("Time taken for code to run: " + (System.currentTimeMillis() - l));
	}
}