/*    Copyright 2010 Tobias Marschall
 *
 *    This file is part of MoSDi.
 *
 *    MoSDi is free software: you can redistribute it and/or modify
 *    it under the terms of the GNU General Public License as published by
 *    the Free Software Foundation, either version 3 of the License, or
 *    (at your option) any later version.
 *
 *    MoSDi is distributed in the hope that it will be useful,
 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *    GNU General Public License for more details.
 *
 *    You should have received a copy of the GNU General Public License
 *    along with MoSDi.  If not, see <http://www.gnu.org/licenses/>.
 */

package mosdi.subcommands;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

import mosdi.discovery.EvaluatedPattern;
import mosdi.discovery.MatchCountSearch;
import mosdi.discovery.MotifFinder;
import mosdi.distributions.PoissonDistribution;
import mosdi.fa.Alphabet;
import mosdi.fa.CDFA;
import mosdi.fa.DFAFactory;
import mosdi.fa.GeneralizedString;
import mosdi.fa.IIDTextModel;
import mosdi.index.SuffixTree;
import mosdi.paa.ClumpSizeCalculator;
import mosdi.util.BitArray;
import mosdi.util.Combinatorics;
import mosdi.util.FileUtils;
import mosdi.util.Iupac;
import mosdi.util.Log;
import mosdi.util.LogSpace;
import mosdi.util.SequenceUtils;

public class DiscoverySubcommand extends Subcommand {
	
	@Override
	public String usage() {
		return
		super.usage()+" [options] <sequence(s)-file> <abelian_pattern>\n" +
		"\n" +
		"Options:\n" +
		"  -F: read patterns from file\n" +
		"  -r: simultaneously consider reverse complementary motif\n" +
		"  -t <pvalue-threshold>: search for patterns whose p-value is below threshold\n" +
		"                         (default: 1.0)\n" +
		"  -E <max_expectation>: discard patterns whose expectation is above given value\n" +
		"  -P: calculate and print p-value table\n" +
		"  -a: disable use of maxMatchCountAnnotations (slower)";
	}
	
	@Override
	public String description() {
		return "(DEPRECATED) Discovers motifs (restricted by abelian patterns) with low p-values.";
	}

	@Override
	public String name() {
		return "abelian-discovery";
	}

	@Override
	public int run(String[] args) {
		parseOptions(args, 2, "Frt:E:Pa");

		// Option dependencies
		// -- none --

		// Mandatory arguments
		String sequenceFile = getStringArgument(0);
		String pattern = getStringArgument(1);

		// Options
		boolean considerReverse = getBooleanOption("r", false);
		double pvalueThreshold = getRangedDoubleOption("t", 0.0, 1.0, 1.0);
		double maxExpectation = getRangedDoubleOption("E", Double.MIN_VALUE, Double.POSITIVE_INFINITY, Double.MAX_VALUE); 
		boolean pValueTable = getBooleanOption("P", false);
		boolean useMaxMatchCountAnnotation = getBooleanOption("a", true);
		boolean readPatternsFormFile = getBooleanOption("F", false);

		Alphabet iupacAlphabet = Alphabet.getIupacAlphabet();
		Alphabet dnaAlphabet = Alphabet.getDnaAlphabet();
		List<String> sequences = null;
		double[] charDist = null;

		// create list of patterns
		List<String> patternList = null;;
		if (readPatternsFormFile) {
			// read patterns from file
			patternList = FileUtils.readPatternFile(pattern);
		} else {
			patternList = new ArrayList<String>(1);
			patternList.add(pattern);
		}
		
		sequences = SequenceUtils.readSequences(sequenceFile);
		charDist = new IIDTextModel(dnaAlphabet, sequences).getCharacterDistribution();
		// total sequence length
		int sequenceLength = 0;
		for (String s : sequences) sequenceLength+=s.length();
		
		SuffixTree suffixTree = SuffixTree.buildSuffixTree(dnaAlphabet, sequences, considerReverse);
		int[] occurrenceCountAnnotation = suffixTree.calcOccurrenceCountAnnotation();
		
		Log.startTimer();
		BitArray[] generalizedAlphabet = Iupac.toGeneralizedString("ABCDGHKMNRSTVWY").getPositions();
		MotifFinder motifFinder = new MotifFinder(suffixTree, generalizedAlphabet, considerReverse);
		// key: patternLength, value: annotation of suffix tree nodes giving tha maximal possible number of matches 
		HashMap<Integer,int[]> suffixTreeAnnotations = new HashMap<Integer,int[]>(); 
		long n = 0;
		for (String s : patternList) {
			// parse abelian pattern
			int[] abelianPattern = Iupac.parseAbelianPattern(s);
			if (abelianPattern==null) {
				Log.errorln(String.format("Invalid pattern: %s",s));
				System.exit(1);
			}
			// calculate length
			int length = 0;
			for (int i : abelianPattern) length+=i;
			// calculate expectation
			StringBuilder sb = new StringBuilder();
			for (int c=0; c<abelianPattern.length; ++c) {
				for (int i=0; i<abelianPattern[c]; ++i) sb.append(iupacAlphabet.get(c));
			}
			double expectation = 0.0;
			GeneralizedString p = Iupac.toGeneralizedString(sb.toString());
			expectation+=p.getProbability(charDist);
			if (considerReverse) {
				p = Iupac.toGeneralizedString(Iupac.reverseComplementary(sb.toString()));
				expectation+=p.getProbability(charDist);
			}
			expectation*=sequenceLength-sequences.size()*(p.length()-1);

			// threshold based poisson tail probability
			PoissonDistribution pd = new PoissonDistribution(expectation/3.0);
			int threshold = pd.getQuantileByPValue(pvalueThreshold); 
			
			Log.printf(Log.Level.DEBUG, "Abelian Pattern: %s,  Expectation: %e,  Threshold: %d%n", s, expectation, threshold);
			
			if (expectation>maxExpectation) {
				Log.println(Log.Level.DEBUG, "Expectation too high, skipping");
				continue;
			}

			// calculate number of string in expanded abelian pattern
			double logNumber = Combinatorics.logMultinomial(p.length(), abelianPattern);
//			sb.append(String.format(" %d", (long)Math.round(Math.exp(logNumber))));
			
			int[] annotations = null;
			if (useMaxMatchCountAnnotation) {
				if (suffixTreeAnnotations.containsKey(length)) {
					annotations = suffixTreeAnnotations.get(length);
				} else {
					Log.printf(Log.Level.DEBUG, "Generating suffix tree annotations for length %d%n",length);
					Log.startTimer();
					annotations = suffixTree.calcMaxMatchCountAnnotation(length);
					Log.stopTimer("Calculation of suffix tree annotations");
					suffixTreeAnnotations.put(length, annotations);
				
				}
			}
			
			Log.startTimer();
			MatchCountSearch search = new MatchCountSearch(Math.max(threshold,(int)expectation), occurrenceCountAnnotation, annotations);
			motifFinder.findAbelianPatternInstances(abelianPattern, search);
			List<EvaluatedPattern> l = search.getResults();
			Log.stopTimer("Search for instances of abelian pattern in suffix tree");
			double timeInstanceSearch = Log.getLastPeriodCpu();

			Log.printf(Log.Level.DEBUG, "Identified %d/%d strings%n", l.size(),(long)Math.round(Math.exp(logNumber)));
//			for (SuffixTree.Matches m : l) {
//			Log.println(Log.Level.DEBUG, String.format("%s %d",iupacAlphabet.buildString(m.getPattern()), m.getMatches()));	
//			}
			Log.printf(Log.Level.VERBOSE, ">>> %s %e %d %d %d %t %n", s, expectation, threshold,l.size(), (long)Math.round(Math.exp(logNumber)), timeInstanceSearch);

			for (EvaluatedPattern m : l) {
				Log.startTimer();
				List<GeneralizedString> genStringList = new ArrayList<GeneralizedString>(1);
				String forwardPattern = iupacAlphabet.buildString(m.getPattern()); 
				genStringList.add(Iupac.toGeneralizedString(forwardPattern));
				if (considerReverse) {
					String reversePattern = Iupac.reverseComplementary(forwardPattern);
					if (forwardPattern.compareTo(reversePattern)>0) {
						Log.println(Log.Level.DEBUG, "forward pattern > reverse pattern --> skipping");
						Log.stopTimer("Total time for this motif");
						continue;
					}
					genStringList.add(Iupac.toGeneralizedString(reversePattern));
				}
				// create markov additive chain
				Log.startTimer();
				CDFA cdfa = DFAFactory.build(dnaAlphabet, genStringList, 50000);
				int states = cdfa.getStateCount();
				cdfa = cdfa.minimizeHopcroft();
				int statesMinimal = cdfa.getStateCount();
				// calculate clump size distribution
				ClumpSizeCalculator csc = new ClumpSizeCalculator(new IIDTextModel(dnaAlphabet.size(), charDist), cdfa, forwardPattern.length());
				double[] clumpSizeDist = csc.clumpSizeDistribution(8, 1e-30);
				Log.restartTimer("calculate clump size distribution");
				double timeClumpSizeDist = Log.getLastPeriodCpu();
				// calculate expected clump size
				double expectedClumpSize = 0.0;
				for (int i=1; i<clumpSizeDist.length; ++i) {
					expectedClumpSize+=clumpSizeDist[i]*i;
				}
				double lambda = expectation/expectedClumpSize;
				PoissonDistribution poissonDist = new PoissonDistribution(lambda);
				double[] dist = null;
				double pvalue = -1.0;
				// if p-value table is requested, calculate whole distribution
				if (pValueTable) {
					dist = poissonDist.compoundPoissonDistribution(clumpSizeDist, m.getScore());
					pvalue = dist[m.getScore()];
				} else {
					pvalue = poissonDist.compoundPoissonPValue(clumpSizeDist, m.getScore());
				}
				Log.stopTimer("distribution/p-value calculation by convolution");
				double timeConvolution = Log.getLastPeriodCpu();
				// check if logarithmic calculation is required
				boolean logarithmic = false;
				if (pvalue==0.0) {
					logarithmic = true;
					Log.println(Log.Level.DEBUG, "pvalue=0.0, recomputing in logarithmic domain");
					Log.startTimer();
					clumpSizeDist = csc.clumpSizeDistribution(16, 1e-300);
					Log.stopTimer("calculate longer clump size distribution");
//					Log.println(Log.Level.DEBUG, Arrays.toString(clumpSizeDist));
					timeClumpSizeDist = Log.getLastPeriodCpu();
					if (pValueTable) {
						dist = poissonDist.logCompoundPoissonDistribution(clumpSizeDist, m.getScore());
						pvalue = dist[m.getScore()];
					} else {
						pvalue = poissonDist.logCompoundPoissonPValue(clumpSizeDist, m.getScore());
					}
					timeConvolution = Log.getLastPeriodCpu();
					Log.printf(Log.Level.DEBUG, "log p-value: %e (%s)%n", pvalue, LogSpace.toString(pvalue));
				}
				// transform distribution into pvalue table
				if (pValueTable) {
					if (logarithmic) {
						for (int i=dist.length-2; i>=0; --i) dist[i]=LogSpace.logAdd(dist[i],dist[i+1]);
					} else {
						for (int i=dist.length-2; i>=0; --i) dist[i]+=dist[i+1];
					}
				}
				Log.stopTimer("Total time for this motif");
				double timeMotif = Log.getLastPeriodCpu();
				if (pvalue>pvalueThreshold) {
					Log.printf(Log.Level.DEBUG, "pvalue to high (%e, threshold is: %e) --> skipping motif%n",pvalue, pvalueThreshold);
				} else {
					sb = new StringBuilder();
					if (logarithmic) {
						sb.append(String.format(">>p_value>> %s LOG ", LogSpace.toString(pvalue)));
					} else {
						sb.append(String.format(">>p_value>> %e LIN ", pvalue));
					}
					sb.append(String.format(">>stats>> %s %d %d %d %d %e ", forwardPattern, genStringList.size(), states, statesMinimal, m.getScore(), expectation));
					sb.append(String.format(">>poisson>> %e %e ", lambda, expectedClumpSize));
					sb.append(Log.format(">>runtimes>> %t %t %t ", timeClumpSizeDist, timeConvolution, timeMotif));
					if (pValueTable) {
						sb.append(">>p_value_table>> ");
						for (double d : dist) sb.append(String.format("%e ", d));
					}
					if (sb.length()>0) Log.println(Log.Level.STANDARD, sb.toString());
				}
			}
			n+=l.size();
		}
		Log.stopTimer("Total time");
		double timeTotal = Log.getLastPeriodCpu();
		Log.printf(Log.Level.VERBOSE, ">>!>total_time>>: %t %n", timeTotal);
		Log.printf(Log.Level.DEBUG, "Total strings found: %d%n", n);
		return 0;
	}	
}
