package algos;

import utils.Common;

import java.io.*;
import java.math.BigInteger;
import java.util.*;

import static utils.Common.*;

public class NGramModel {

    // TODO: some other data structure to minimize memory cost
    private final Map<List<Integer>, Map<Integer, Long>> ngrams;
    private final IVocabulary vocabulary;

    public int getnGramLength() {
        return nGramLength;
    }

    private final int nGramLength;


    /**
     * Number of bins that have r training instances in them.
     */
    private final Map<Long, Integer> nr;

    /**
     * Number of training instances
     */
    private long nTrains;

    /**
     * As i understood it, it is just all possible Ngrams in our Vocabulary = size of Vocabulary ^ nGramLength.
     */
    private BigInteger bins;

    private NGramModel(IVocabulary vocabulary, int nGramLength, Map<List<Integer>, Map<Integer, Long>> ngrams) {
        this.vocabulary = vocabulary;
        this.nGramLength = nGramLength;
        this.ngrams = ngrams;
        nr = newHashMap();
        calculateStats();
        BigInteger vocSize = BigInteger.valueOf(vocabulary.size());
        bins = vocSize.pow(nGramLength);
    }

    public long getNTrains() {
        return nTrains;
    }

    public BigInteger getBins() {
        return bins;
    }

    public int getNr(long r) {
        return nr.get(r);
    }


    private void calculateStats() {

        for (Map.Entry<List<Integer>, Map<Integer, Long>> i : ngrams.entrySet()) {
            for (Map.Entry<Integer, Long> j : i.getValue().entrySet()) {
                nTrains += j.getValue();
                Integer cur = nr.get(j.getValue());
                if (cur == null) {
                    cur = 0;
                }
                nr.put(j.getValue(), cur + 1);
            }
        }


    }


    /**
     * In our book this function was named C(w1...wn)
     */
    public long frequency(List<String> ngram) {
        if (ngram == null || ngram.size() < nGramLength) {
            return 0L;
        }
        List<Integer> cutted = newArrayList(nGramLength - 1);
        Iterator<String> iter = ngram.listIterator(ngram.size() - nGramLength);
        for (int i = 0; i < nGramLength - 1; i++) {
            cutted.add(vocabulary.getIndexFor(iter.next()));
        }
        String last = iter.next();
        long result = 0;
        Map<Integer, Long> map = ngrams.get(cutted);
        if (map != null) {
            Long l = map.get(last);
            result = l == null ? 0 : l;
        }
        ngram.add(last);
        return result;
    }


    private ThreadLocal<Map<Integer, Long>> current = new ThreadLocal<Map<Integer, Long>>();

    public void startPredict(List<String> history) {

        List<Integer> intHistory = newArrayList(nGramLength - 1);
        if (history.size() < nGramLength - 1) {
            current.set(null);
            return;
        }
        for (Iterator<String> iterator = history.listIterator(history.size() - nGramLength + 1); iterator.hasNext();) {
            int i = vocabulary.getIndexFor(iterator.next());
            if (i == IVocabulary.NOT_FOUND) {
                current.set(null);
                return;
            }
            intHistory.add(i);
        }
        current.set(ngrams.get(intHistory));
    }

    public long predict(int wordIndex) {

        if (current.get() == null) return 0L;
        Long result = current.get().get(wordIndex);
        return (result == null) ? 0L : result;
    }

    public void stopPredict() {
        current.remove();
    }

    public void addNgram(List<Integer> ngram, int lastWord, long freq) {
        Map<Integer, Long> map = ngrams.get(ngram);
        if (map == null) {
            map = newHashMap(1, 3.0f);
            ngrams.put(ngram, map);
        }
        Long prevFreq = map.get(lastWord);
        if (prevFreq == null) {
            prevFreq = 0L;
        }
        map.put(lastWord, prevFreq + freq);
        Integer cur = nr.get(freq);
        if (cur == null) {
            cur = 0;
        }
        nr.put(freq, cur - 1);
        nr.put(freq + prevFreq, cur + 1);
        nTrains += freq;
        BigInteger vocSize = BigInteger.valueOf(vocabulary.size());
        bins = vocSize.pow(nGramLength);
    }


    public static NGramModel createFrom(File file, IVocabulary vocabulary) throws IOException {
        BufferedReader reader = new BufferedReader(new FileReader(file));
        String line = reader.readLine();
        StringTokenizer tokenizer = new StringTokenizer(line);

        Map<List<Integer>, Map<Integer, Long>> ngrams = newHashMap();
        int nGramLength = tokenizer.countTokens() - 1;

        while (line != null) {
            tokenizer = new StringTokenizer(line);
            Long freq = new Long(tokenizer.nextToken());
            List<Integer> ngram = newArrayList(nGramLength - 1);
            for (int i = 0; i < nGramLength - 1; i++) {
                String word = tokenizer.nextToken();
                ngram.add(vocabulary.getIndexFor(word));
            }

            int lastWord = vocabulary.getIndexFor(tokenizer.nextToken());
            Map<Integer, Long> map = ngrams.get(ngram);
            if (map == null) {
                map = newHashMap(1, 3.0f);
                ngrams.put(ngram, map);
            }
            map.put(lastWord, freq);
            line = reader.readLine();
        }


        NGramModel model = new NGramModel(vocabulary, nGramLength, ngrams);

        return model;
    }

    public static NGramModel createFrom(String fileName, IVocabulary vocabulary) throws IOException {
        return createFrom(new File(fileName), vocabulary);
    }

    public static NGramModel createEmptyModel(IVocabulary vocabulary, int length) {
        return new NGramModel(vocabulary, length, Common.<List<Integer>, Map<Integer, Long>>newHashMap());
    }


    public void writeToFile(File file) throws IOException {
        BufferedWriter writer = new BufferedWriter(new FileWriter(file));
        for (Map.Entry<List<Integer>, Map<Integer, Long>> entry : ngrams.entrySet()) {
            StringBuilder builder = new StringBuilder(" ");
            for (Integer w : entry.getKey()) {
                builder.append(vocabulary.getWordAt(w)).append(" ");
            }
            String common = builder.toString();
            for (Map.Entry<Integer, Long> inner : entry.getValue().entrySet()) {
                writer.write(inner.getValue().toString());
                writer.write(common);
                writer.write(vocabulary.getWordAt(inner.getKey()));
                writer.newLine();
            }
        }
        writer.close();
    }


}
