package engine;

import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import treetagger.FrenchTokenizer;
import treetagger.Normalizer;

public class Document {

    private String filePath;
    private HashMap<String, Integer> tfMap;//Terme Frequency
    private HashMap<String, Double> wieghts;//poids = tf.idf = tf*log10(N/df) avec N = nb docs

    public Document(String filePath) {
        this.filePath = filePath;
        this.tfMap = null;
    }

    public String getFilePath() {
        return filePath;
    }

    public HashMap<String, Double> getWieghts() {
        return wieghts;
    }

    public HashMap<String, Integer> getTfMap() {
        return tfMap;
    }

    //Affiche les poids du document
    @Override
    public String toString() {
        DecimalFormat df = new DecimalFormat("0.##");
        DecimalFormatSymbols dfs = new DecimalFormatSymbols(Locale.FRANCE);
        dfs.setDecimalSeparator('.');
        df.setDecimalFormatSymbols(dfs);

        String toPrint = "";

        toPrint += "{";
        for (Map.Entry<String, Double> entry : this.wieghts.entrySet()) {
            toPrint += df.format(entry.getValue()) + ", ";
        }
        toPrint = toPrint.substring(0, toPrint.length() - 1); //suppr dernière virgule
        toPrint += "}";

        return toPrint;
    }


    /*
     * Stock la tf Map dans l'instance de l'objet
     */
    public void fillTfMap() {
        //FrenchTokenizer
        Normalizer ft = new FrenchTokenizer();

        /*Normalizer ft = null;
         try {
         ft = new FrenchStemmer("ressources/frenchST.txt");
         } catch (IOException ex) {
         }*/


        this.tfMap = new HashMap<String, Integer>();

        int nbOccurencesMotCourant;

        /*try {*/
        //String text = FileTools.readTextFile(this.filePath);

        for (String mot : ft.normalize(this.filePath)) {
            if (tfMap.containsKey(mot)) {
                nbOccurencesMotCourant = tfMap.get(mot);
            } else {
                nbOccurencesMotCourant = 0;
            }

            tfMap.put(mot, nbOccurencesMotCourant + 1);
        }
        /*} catch (IOException ex) {
         Logger.getLogger(Document.class.getName()).log(Level.SEVERE, null, ex);
         }*/
    }

    /*
     * tfMap du document doit etre fill
     * dfMap du corpus doit etre fill
     * poids = w(t) = td.idf (t) où t est une terme (une clé de la map)
     * tf.idf = tf*log10(N/df) avec N = nb docs
     */
    public void fillWeigths(Corpus corpus) {
        if (this.tfMap == null || corpus.getDfMap() == null) {
            return;
        }

        this.wieghts = new HashMap<String, Double>();

        //Nombre de documents
        int N = corpus.getDfMap().size();

        double sum = 0;
        for (Map.Entry<String, Integer> entry : corpus.getTfMap().entrySet()) {
            double tf = (double) (this.tfMap.containsKey(entry.getKey()) ? this.tfMap.get(entry.getKey()) : 0);
            double df = (double) corpus.getDfMap().get(entry.getKey());
            double tfIdf = tf * Math.log10((double) N / df);
            //System.out.println(Math.log10((double) N / df));
            this.wieghts.put(entry.getKey(), tfIdf);
            sum += tfIdf;
            //System.out.println(entry.getKey() + "   :TF="+tf+"; N/Df ="+ Math.log10((double) N / (double) df));
        }

        //Normalisation en base 1
        for (Map.Entry<String, Double> entry : this.wieghts.entrySet()) {
            this.wieghts.put(entry.getKey(), entry.getValue() / sum);
        }
    }
}