package tools.hhsearch;

import java.io.BufferedReader;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.HashMap;

import com.sun.xml.internal.bind.v2.runtime.unmarshaller.XsiNilLoader.Array;


import tools.DistanceMatrix.DistanceMatrix;
import tools.DistanceMatrix.DistanceObject;

public class hhsUtils {

	/**
	 * @param args
	 */
	public static void main(String[] args) throws Exception{

		if(args.length>0){
			if(args[0].equals("tabulated")&&args.length>1){
				tabulated(args[1], true);
				for(int i=2;i<args.length;i++){
					tabulated(args[i], false);
				}
			}else if(args[0].equals("tabulatedTopOnly")&&args.length==2){
				tabulatedTopOnly(args[1]);
			}else if(args[0].equals("tabulatedTop")&&args.length==3){
				tabulatedTop(args[1],Integer.parseInt(args[2]));
			}else if(args[0].equals("toPhylip")&&args.length==2){
				toPhylip(args[1],new ArrayList<String>(),false,true);
			}else if(args[0].equals("toPhylipEuc")&&args.length==2){
				toPhylip(args[1],new ArrayList<String>(),true,true);
			}else if(args[0].equals("toIsodataEuc")&&args.length==2){
				toPhylip(args[1],new ArrayList<String>(),true,false);
			}else if(args[0].equals("toPhylipEucExclude")&&args.length==3){
				toPhylipExclude(args[1],args[2],true);
			}else if(args[0].equals("modelCluster")&&args.length==2){
				modelCluster(args[1]);
			}else if(args[0].equals("toTripletsEuc")&&args.length==4){
				toTriplets(args[1],new ArrayList<String>(),true,args[3],Double.parseDouble(args[2]),true);
			}else{
				System.err.println(printHelp());
				System.exit(616);
			}
		}else{
			System.err.println(printHelp());
			System.exit(616);
		}
	}

	private static String printHelp(){
		String help="Usage: hhsUtils <cmd> <input>\n";
		help+="where <cmd> is:\n";
		help+="tabulated - Prints the hhsFile(s) as a csv-table\n";
		help+="\t<input> = <hhsFile> ... <hhsFile>\n";
		help+="tabulatedTopOnly - Prints the hhsFile(s) as a csv-table. Removes hits with worse e-value if they overlap a better hit. If there two hits have the same e-value, all are kept. Overlap is defined to be occuring if at least 50% of the length of the shorter hit overlaps the other hit.\n";
		help+="\t<input> = <hhsFile>\n";
		help+="tabulatedTop - Prints the top (score) n hits of the hhsFile(s) as a csv-table. Excludes self hit \n";
		help+="\t<input> = <hhsFile> <n>\n";
		help+="toPhylip - creates a symetric distance matrix on phylip format from the scores. The distance between two hits is the weighted average (with respect to the maxscore for each model) normalized with respect to the average of the two maxima. Negative values are counted as zero. \n";
		help+="\t<input> = <hhsFile> \n";
		help+="toPhylipEuc - creates a symetric distance matrix on phylip format from the scores. The distance between two hits is the euclidian distance between the score vectors \n";
		help+="\t<input> = <hhsFile> \n";
		help+="toIsodataEuc - creates a symetric distance matrix on isodata format from the scores (hhsMatrix_matrix.csv and hhsMatrix_names.csv). The distance between two hits is the euclidian distance between the score vectors \n";
		help+="\t<input> = <hhsFile> \n";
		help+="toPhylipEucExclude - creates a symetric distance matrix on phylip format from the scores. The distance between two hits is the euclidian distance between the score vectors \n";
		help+="\t<input> = <hhsFile> <exclude file>\n";
		help+="modelCluster - takes a tabulated hhsFile and prints a list of the models together with a cluster number. Clusters are linked together by proteins containing several models\n";
		help+="\t<input> = <hhsFile>\n";
		help+="toTripletsEuc - creates an euclidian matrix from the scores and prints all triplets for which the two best scores differ more than the cutoff\n";
		help+="\t<input> = <hhsFile> <cutoff> <outfile>\n";
		
		
		
		return help;
	}
	
	private static void tabulatedTop(String hhsFile, int N)throws Exception{
		HashMap<String, ArrayList<hhsAlignment>> top= new HashMap<String, ArrayList<hhsAlignment>>();
		hhsAlignment ha;
		hhsParser hp= new hhsParser(new BufferedReader(new FileReader(hhsFile)));
		ArrayList<hhsAlignment> tmp;
		int n;
		for(;hp.hasMore();){
			ha=hp.nextAlignment();
			if (!ha.getQuery().equals(ha.getHit())) {
				if (top.containsKey(ha.getQuery())) {
					tmp = top.get(ha.getQuery());
					n = 0;
					for (; n < N && n < tmp.size(); n++) {
						if (ha.getScore() > tmp.get(n).getScore()) {
							break;
						}
					}
					if (n < N) {
						tmp.add(n, ha);
					}
					if (tmp.size() > N + 50) {
						for (; tmp.size() > N;) {
							tmp.remove(N);
						}
					}
					top.put(ha.getQuery(), tmp);
				} else {
					tmp = new ArrayList<hhsAlignment>();
					tmp.add(ha);
					top.put(ha.getQuery(), tmp);
				}
			}
		}
		//print
		for(String key : top.keySet()){
			tmp=top.get(key);
			for(int i=0;i<N&&i<tmp.size();i++){
				System.out.println(tmp.get(i).toString());
			}
		}
	}
	
	private static void modelCluster(String hhsCSVfile)throws Exception{
		HashMap<String, ArrayList<String>> query2model = new HashMap<String, ArrayList<String>>();
		HashMap<String, ArrayList<String>> model2query = new HashMap<String, ArrayList<String>>();
		ArrayList<String> models= new ArrayList<String>();
		ArrayList<String> inCluster;
		BufferedReader in= new BufferedReader(new FileReader(hhsCSVfile));
		in.readLine();//discard the header
		String[] l;
		for(String s=in.readLine();s!=null;s=in.readLine()){
			l=s.split("\t");
			if(l.length==14){
				if(!query2model.containsKey(l[0])){
					query2model.put(l[0], new ArrayList<String>());
				}
				if(!model2query.containsKey(l[2])){
					model2query.put(l[2], new ArrayList<String>());
				}
				if(!query2model.get(l[0]).contains(l[2])){
					query2model.get(l[0]).add(l[2]);
				}
				if(!model2query.get(l[2]).contains(l[0])){
					model2query.get(l[2]).add(l[0]);
				}
				if(!models.contains(l[2])){
					models.add(l[2]);
				}
			}
		}
		System.out.println("model\tcluster");
		for(int i=0;models.size()>0;i++){
			inCluster= new ArrayList<String>();
			inCluster.add(models.get(0));
			for(int j=0;j<inCluster.size();j++){
				for(String query:model2query.get(inCluster.get(j))){
					for(String model:query2model.get(query)){
						if(!inCluster.contains(model)){
							inCluster.add(model);
						}
					}
				}
			}
			//remove ahd print
			for(String remove:inCluster){
				System.out.println(remove+"\t"+i);
				models.remove(remove);
			}
		}
	}
	
	private static void tabulatedTopOnly(String hhsFile)throws Exception{
		hhsAlignment ha;
		hhsParser hp=new hhsParser(new BufferedReader(new FileReader(hhsFile)));
		HashMap<String, ArrayList<hhsAlignment>> hits= new HashMap<String, ArrayList<hhsAlignment>>();
		ArrayList<hhsAlignment> curList,overlapBetter;
		boolean worse;
		for(;hp.hasMore();){
			ha=hp.nextAlignment();
			if(hits.containsKey(ha.getQuery())){
				curList=hits.get(ha.getQuery());
				//check whether to include the new
				overlapBetter= new ArrayList<hhsAlignment>();
				worse=false;
				for (hhsAlignment ta : curList) {
					if(ha.overlap50(ta)){
						if(ha.getEvalue()<=ta.getEvalue()){
							if(ha.getEvalue()<ta.getEvalue()){
								overlapBetter.add(ta);
							}
						}else{
							worse=true;
							break;
						}
					}
				}
				if(!worse){
					for (hhsAlignment remove : overlapBetter) {
						curList.remove(remove);
					}
					curList.add(ha);
					hits.put(ha.getQuery(), curList);
				}
			}else{
				hits.put(ha.getQuery(), new ArrayList<hhsAlignment>());
				hits.get(ha.getQuery()).add(ha);
			}
		}
		System.out.println(new hhsAlignment().header());
		for (String key : hits.keySet()) {
			curList=hits.get(key);
			for (hhsAlignment h : curList) {
				System.out.println(h.toString());
			}
		}
	}
	
	private static void toTriplets(String hhsFile, ArrayList<String> exclude, boolean euc,String outfile,double cutoff,boolean biggerBetter)throws Exception{
		hhsAlignment ha;
		hhsParser hp=new hhsParser(new BufferedReader(new FileReader(hhsFile)));
		ArrayList<DistanceObject> list=new ArrayList<DistanceObject>();
		for(;hp.hasMore();){
			ha=hp.nextAlignment();
			if(!exclude.contains(ha.getQuery())&&!exclude.contains(ha.getHit())){
				list.add(ha.toDistanceObject());
			}
		}
		DistanceMatrix dm= new DistanceMatrix(list);
		if(euc){
			dm.transformEucDistSym();
		}else{
			dm.transformAndSymetricate();
		}
		dm.toTriplets(outfile, cutoff, biggerBetter);
	}
	
	private static void toPhylipExclude(String hhsFile,String excludeFile,boolean euc)throws Exception{
		ArrayList<String> exclude= new ArrayList<String>();
		BufferedReader in = new BufferedReader(new FileReader(excludeFile));
		for(String s=in.readLine();s!=null;s=in.readLine()){
//			System.err.println(s);
			if(s.length()>0){
				exclude.add(s);
//				System.err.println(s);
			}
		}
		toPhylip(hhsFile, exclude, euc,true);
	}
	
	private static void toPhylip(String hhsFile,ArrayList<String> exclude,boolean euc,boolean phyTisoF)throws Exception{
		hhsAlignment ha;
		hhsParser hp=new hhsParser(new BufferedReader(new FileReader(hhsFile)));
		ArrayList<DistanceObject> list=new ArrayList<DistanceObject>();
		for(;hp.hasMore();){
			ha=hp.nextAlignment();
			if(!exclude.contains(ha.getQuery())&&!exclude.contains(ha.getHit())){
				list.add(ha.toDistanceObject());
			}
		}
		DistanceMatrix dm= new DistanceMatrix(list);
		if(euc){
			dm.transformEucDistSym();
		}else{
			dm.transformAndSymetricate();
		}
		if(phyTisoF){
			dm.toPhylip();
		}else{
			dm.toIsodata();
		}
	}
	
	private static void tabulated(String hhsFile,boolean printHeader)throws Exception{
		hhsAlignment tmp;
		if(printHeader){
			tmp=new hhsAlignment();
			System.out.println(tmp.header());
		}
		hhsParser in=new hhsParser(new BufferedReader(new FileReader(hhsFile)));
		for(int i=0;in.hasMore();i++){
			tmp=in.nextAlignment();
			System.out.println(tmp.toString());
		}
	}
}
