/*
 * Saphre - Suffix Arrays for Phrase Extraction
 * Copyright (C) 2013 
 * Dale Gerdemann Tübingen, Germany 
 * Niko Schenk Frankfurt am Main, Germany
 * All rights reserved.
 *
 * This program is free software: you can redistribute it and/or modify it under
 * the terms of the GNU General Public License as published by the Free Software
 * Foundation, either version 3 of the License, or (at your option) any later
 * version.
 *
 * This program is distributed in the hope that it will be useful, but WITHOUT
 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
 * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
 * details.
 *
 * You should have received a copy of the GNU General Public License along with
 * this program. If not, see <http://www.gnu.org/licenses/>.
 *
 */
package collectors.impl;

import java.io.PrintWriter;
import util.sorting.Multiset;
import collectors.api.Collector1;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import util.Interval;
import saphre.core.Store;
import saphre.core.SuffixArray;

/**
 * Class which simulates lcp-interval tree traversal to collect maximal repeats
 * from a corpus.
 *
 * A maximal repeat is a repeat which - extended in one direction to either the
 * left or right - loses at least one of its occurrences. So, for example,
 * assume that "York" occurs 5 times in a corpus. Now, if "New York" also occurs
 * 5 times in the corpus, we say that "York" is NOT (left-)maximal! (because it
 * can be extended to the left by one token without losing any of the original
 * number of occurrences (5). Assume further, that if "York" again appeared 5
 * times, but "New York" only twice in the whole corpus, than "York" would be
 * maximal. (because extending it to the left results in a term frequency loss
 * of 3.
 *
 * Due to the nature of its construction, the lcp-interval tree gives us right
 * maximality for free, i.e. each non-trivial interval (repeat) is
 * right-maximal.
 *
 * Note, that we extended the notion of "maximality" to consider different
 * degrees of maximality. For example, a phrase which occurs 6 times in total
 * and which extends 3 times to the left to form the SAME extended n-gram, is
 * said to be 50% maximal. (cf. Schenk 2013)
 *
 * Run with: show:maximalsByThreshold minHowmaxL=0.95
 * 
 *
 * It implements the Collector interface.
 *
 * @author Dale Gerdemann, Niko Schenk
 */
public class MaximalsByThresholdCollector implements Collector1 {

    Store store;
    SuffixArray sa;
    double D;
    PrintWriter pw = null;
    private int minLengthOfPhrase;
    private int maxLengthOfPhrase;
    private int minTermFrequency;
    private String startsWith;
    private double minHowmaxL;
    private double minHowmaxR;
    private static int maximalcount = 0;
    private static int nonmaximalcount = 0;

    /**
     * Collect maximal phrases.
     *
     * @param store
     * @param sa
     * @param top
     * @param pw
     * @param args
     */
    public MaximalsByThresholdCollector(Store store, SuffixArray sa, PrintWriter pw, int aMinLengthOfPhrase,
            int aMaxLengthOfPhrase, int aMinTermFrequency, String aStartsWith, double aMinHowmaxL, double aMinHowmaxR) {
        this.store = store;
        this.sa = sa;
        //D = (double) store.numDocs();
        // Subtract 2! The program uses two initial and one final sentinel.
        D = (double) store.numDocs() - 2;
        this.pw = pw;

        this.minLengthOfPhrase = aMinLengthOfPhrase;
        this.maxLengthOfPhrase = aMaxLengthOfPhrase;
        this.minTermFrequency = aMinTermFrequency;
        this.startsWith = aStartsWith;
        this.minHowmaxL = aMinHowmaxL;
        this.minHowmaxR = aMinHowmaxR;
    }

    public void addPrefixArray(SuffixArray pa) {
    }

    /**
     *
     * @param inter
     * @param parentLcp
     * @param lcp
     * @param depth
     */
    @Override
    public void preAdd(Interval inter, int parentLcp, int lcp, int depth) {
    }

    /**
     *
     * @param inter
     * @param lcp
     * @param depth
     * @param docDist
     * @param leftContext
     */
    @Override
    public void add(Interval inter, int lcp, int depth, Multiset docDist,
            Multiset leftContext) {

        Multiset dd = inter.docDist(sa, store);
        int tf = inter.tf();
        int df = dd.size();

        // Limit on the length of phrase.
        if (lcp >= (minLengthOfPhrase)
                && lcp <= maxLengthOfPhrase
                && tf >= minTermFrequency) {

            int loc = sa.getSuftab()[inter.lb()];
            String ngram = store.toString(loc, (loc + lcp));

            int ngramsize = lcp;
            int[] generalngram = new int[ngramsize];
            int ngramindex = 0;
            // Collect this ngram. 
            // Every array index is occupied by an individual token/character.
            for (int l = 0; l < ngramsize; l++) {
                generalngram[l] = sa.getText()[loc + ngramindex];
                ngramindex++;
            }

            boolean isMaximal = false;

            // Most likely left variation.
            HashMap<Integer, Integer> mLv = new HashMap<Integer, Integer>();
            // Most likely right variation.
            HashMap<Integer, Integer> mRv = new HashMap<Integer, Integer>();


            boolean leftVariable = false;
            int b = store.text()[sa.getSuftab()[inter.lb()] - 1];
            for (int i = inter.lb(); i <= inter.rb(); i++) {
                int oneBefore = store.text()[sa.getSuftab()[i] - 1];
                addToMap(mLv, oneBefore);
                if (oneBefore != b) {
                    leftVariable = true;
                }
            }

            // Calculate howMaximality to the left.
            int beforePartWithMostOccurrences = 0;
            for (int word : mLv.keySet()) {
                int freq = mLv.get(word);
                if (freq > beforePartWithMostOccurrences) {
                    // Reset.
                    beforePartWithMostOccurrences = freq;
                }
            }
            // Say an ngram occurs 5 times, but extends to the left 4 times by the same extension.
            // We say the ngram is only 1 - 4/5 = 1 - 0.8 = 20% maximal.
            // Consequently non-maximal ngrams are always 0% maximal. (1 - x/x = 0.0).

            double howMaximalL =
                    1.0 - ((double) beforePartWithMostOccurrences / (double) tf);
            // Check degree of RIGHT maximality.
            boolean rightVariable = false;
            int c = store.text()[sa.getSuftab()[inter.lb()] + lcp];
            for (int i = inter.lb(); i <= inter.rb(); i++) {
                int oneAfter = store.text()[sa.getSuftab()[i] + lcp];
                addToMap(mRv, oneAfter);
                if (oneAfter != c) {
                    rightVariable = true;
                }
            }

            // Calculate howMaximality to the right.
            int beforePartWithMostOccurrencesR = 0;
            for (int word : mRv.keySet()) {
                int freq = mRv.get(word);
                if (freq > beforePartWithMostOccurrencesR) {
                    beforePartWithMostOccurrencesR = freq;
                }
            }
            double howMaximalR =
                    1.0 - ((double) beforePartWithMostOccurrencesR / (double) tf);



            if (leftVariable) {
                isMaximal = true;
                maximalcount++;
            } else {
                nonmaximalcount++;
            }


            // Sort left contexts by frequency.
            LinkedHashMap<String, Integer> mapL = sortHashMapByValues(mLv, false);
            Iterator itL = mapL.keySet().iterator();
            int mostL = (Integer) itL.next();
            String wordL = store.toString(mostL);

            // Sort right contexts by frequency.
            LinkedHashMap<String, Integer> mapR = sortHashMapByValues(mRv, false);
            Iterator itR = mapR.keySet().iterator();
            int mostR = (Integer) itR.next();
            String wordR = store.toString(mostR);




            if (isMaximal && ngram.startsWith(startsWith)
                    && howMaximalL >= minHowmaxL && howMaximalR >= minHowmaxR) {
                String rval = ngram + "\t" + isMaximal + "\t"
                        + String.format("%.4g", howMaximalL) + "\t"
                        + String.format("%.4g", howMaximalR) + "\t"
                        + wordL + "\t" + wordR + "\t" + ngramsize
                        + "\t" + tf + "\t" + df + "\n";

                pw.print(rval);
                System.out.print(rval);
            }
        }
    }

    /**
     *
     * @param inter
     * @param lcp
     * @param depth
     * @param docDist
     * @param leftContext
     */
    @Override
    public void addTrivial(Interval inter, int lcp, int depth,
            Multiset docDist, Multiset leftContext) {
    }

    /**
     *
     * @return
     */
    public static int getMaximalcount() {
        return maximalcount;
    }

    /**
     *
     * @return
     */
    public static int getNonMaximalcount() {
        return nonmaximalcount;
    }

    private void addToMap(HashMap<Integer, Integer> m, int aWord) {
        if (m.containsKey(aWord)) {
            // Get old contents.
            int freq = m.get(aWord);
            // Increment.
            freq++;
            // Add back.
            m.put(aWord, freq);
        } else {
            // New entry.
            m.put(aWord, 1);
        }
    }

    // Taken from http://stackoverflow.com/questions/8119366/sorting-hashmap-by-values
    /*
     * 
     */
    private static LinkedHashMap sortHashMapByValues(HashMap passedMap,
            boolean ascending) {

        List mapKeys = new ArrayList(passedMap.keySet());
        List mapValues = new ArrayList(passedMap.values());
        Collections.sort(mapValues);
        Collections.sort(mapKeys);

        if (!ascending) {
            Collections.reverse(mapValues);
        }

        LinkedHashMap someMap = new LinkedHashMap();
        Iterator valueIt = mapValues.iterator();
        while (valueIt.hasNext()) {
            Object val = valueIt.next();
            Iterator keyIt = mapKeys.iterator();
            while (keyIt.hasNext()) {
                Object key = keyIt.next();
                if (passedMap.get(key).toString().equals(val.toString())) {
                    passedMap.remove(key);
                    mapKeys.remove(key);
                    someMap.put(key, val);
                    break;
                }
            }
        }
        return someMap;
    }
}