package users;

import algos.*;
import utils.Common;

import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;

import static utils.Common.*;

public class UserStatistic {

    private final int userId;

    private File userDir;

    private EditableVoc userVoc;

    private Map<Integer, NGramModel> nGramModels;


    UserStatistic(int userId, Vocabulary baseVoc) {
        this.userId = userId;
        userDir = new File(Users.USER_FOLDER + "/" + userId);
        if (!userDir.exists()) {
            create();
        }
        userVoc = new EditableVoc(baseVoc);
        load();
    }

    private void create() {
        File userDir = new File(Users.USER_FOLDER + "/" + userId);
        userDir.mkdirs();
    }


    public IVocabulary getVocabulary() {
        return userVoc;
    }

    public Iterable<NGramModel> getAllNGramModels() {
        return nGramModels.values();
    }

    public long lastUsageTimeStamp() {
        return timeStamp;
    }

    private long timeStamp;

    public NGramModel getNGramModelForLength(int length) {
        timeStamp = System.currentTimeMillis();
        return nGramModels.get(length);
    }

    private void load() {
        List<File> existingFiles = newLinkedList();
        Set<Integer> emptyModels = newHashSet();
        for (int i = 1; i <= Users.userMaxNGramLength(); i++) {
            File file = new File(userDir, i + "");
            if (file.exists()) {
                existingFiles.add(file);
            } else {
                emptyModels.add(i);
            }
        }

        for (File f : existingFiles) {
            userVoc.addWordsFromFile(f);
        }

        nGramModels = newHashMap();
        for (File f : existingFiles) {
            try {
                NGramModel model = NGramModel.createFrom(f, userVoc);
                nGramModels.put(model.getnGramLength(), model);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        for (int i : emptyModels) {
            nGramModels.put(i, NGramModel.createEmptyModel(userVoc, i));
        }
    }


    private List<String> lastHistory;

    public void update(List<String> history) {
        if (history.equals(lastHistory) || history.isEmpty()) {
            return;
        }
        LinkedList<Integer> indexList = newLinkedList();
        for (String word : history) {
            userVoc.addWord(word);
            indexList.add(userVoc.getIndexFor(word));
        }
        int lastWord = indexList.removeLast();
        int size = indexList.size();
        for (int i = 1; i <= Users.userMaxNGramLength(); i++) {
            if (size >= i - 1) getNGramModelForLength(i).addNgram(indexList.subList(size - i + 1, size), lastWord, 1);
        }
        lastHistory = history;
    }

    public void unload() {
        System.out.println("unload for userId " + userId);
        for (NGramModel model : nGramModels.values()) {
            if (model.getNTrains() != 0) {
                try {
                    model.writeToFile(new File(userDir, model.getnGramLength() + ""));
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

}
