#include "PerWordSpellChecker.h"
#include "direct_product.h"
#include "wordDistortionGenerator.h"

#include <algorithm>
#include <cmath>
#include <limits>

using std::vector;
using std::string;

#include <iostream>
namespace NSpellCheck {

PerWordSpellChecker::PerWordSpellChecker(Trie<int>* unigrams) {
    this->unigrams.reset(unigrams);
}

PerWordSpellChecker::~PerWordSpellChecker() {
}

double PerWordSpellChecker::languageModel(const Phrase& phrase) const {
    double res = 0.0;
    for (Phrase::const_iterator i = phrase.begin(); i != phrase.end(); ++i) {
        const int* freq = (*unigrams)[*i];
        if (freq) {
            res += log((double)*freq);
        }
    }
    res = exp(res / phrase.size());
    return res;
}

void PerWordSpellChecker::process(const Phrase& phrase, vector<Variation>* suggestions) const {
    vector<vector<Variation> > wordVariations(phrase.size());
    size_t maxWordVariations = (size_t)exp(log(5e7) / phrase.size());
    for (size_t i = 0; i < phrase.size(); ++i) {
        findWordVariations(phrase[i], &wordVariations[i]);
        if (wordVariations[i].size() > maxWordVariations) {
            wordVariations[i].resize(maxWordVariations);
        }
    }
    directProduct(wordVariations, suggestions);
    // At this point variations contain correction weight (error model)
    // Multiply the weight by language model
    for (vector<Variation>::iterator i = suggestions->begin(); i != suggestions->end(); ++i) {
        i->weight *= languageModel(i->phrase);
    }
    size_t maxCount = 3;
    if (suggestions->size() > maxCount) {
        std::nth_element(suggestions->begin(), suggestions->begin() + maxCount, suggestions->end(),
                ReverseVariationComparator());
        suggestions->resize(maxCount);
    }
}

void PerWordSpellChecker::findWordVariations(const string& word, vector<Variation>* variations) const {
    variations->clear();
    vector<string> edits;
    vector<double> weights;
    wordDistortionGenerator distortion;
    distortion.generateWeightedOneEditOperatonDistortion(word, edits, weights, true);
    //distortion.generateWeightedTwoEditOperatonLocalityDistortion(word, edits, weights, false);
    if (std::find(edits.begin(), edits.end(), word) == edits.end()) {
        edits.push_back(word);
        weights.push_back(0);
    }
    for (size_t i = 0; i < edits.size(); ++i) {
        const int* freq = (*unigrams)[edits[i]];
        if (freq && *freq >= 100) {
            Phrase phrase;
            phrase.push_back(edits[i]);
            double correctionWeight = exp(-20.0 * weights[i] / word.length()); // must be less than 1
            variations->push_back(Variation(correctionWeight, phrase));
        }
    }
    if (variations->empty()) {
      // fall back to baseline solution
      Phrase phrase;
      phrase.push_back(word);
      variations->push_back(Variation(1.0, phrase));
    }

    std::sort(variations->begin(), variations->end(), ReverseVariationComparator());
    size_t variationsToKeep = 1;
    // cut off when there is a huge drop of probability
    while (variationsToKeep < variations->size() && variationsToKeep < 5 &&
           (*variations)[variationsToKeep].weight * 10 >= (*variations)[variationsToKeep-1].weight)
    {
        ++variationsToKeep;
    }
    if (variations->size() > variationsToKeep) {
        variations->resize(variationsToKeep);
    }
}

}
