/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package entityopinions;

import com.aliasi.chunk.Chunk;
import com.aliasi.chunk.ChunkFactory;
import com.aliasi.chunk.Chunker;
import com.aliasi.chunk.ChunkerEvaluator;
import com.aliasi.chunk.Chunking;
import com.aliasi.chunk.ChunkingEvaluation;
import com.aliasi.chunk.ChunkingImpl;
import com.aliasi.classify.JointClassification;
import com.aliasi.classify.LMClassifier;
import com.aliasi.classify.PrecisionRecallEvaluation;
import com.aliasi.lm.NGramProcessLM;
import com.aliasi.stats.MultivariateDistribution;
import com.aliasi.util.Strings;
import com.sleepycat.je.DatabaseException;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.logging.Level;
import java.util.logging.Logger;

/**
 *
 * @author Diogo
 */
public class EntityOpinionSupervisedBaseline implements Chunker, Serializable {

    transient public Cache cache;
    static final String PO_MODEL_PATH = "/home/dsantos/entityopinionsdropboxv4/modelos/polModel.pol";
    static final String SUBJ_MODEL_PATH = "/home/dsantos/entityopinionsdropboxv4/modelos/subjModel.subj";
    static final String NGRAMS_PATH = "/home/dsantos/entityopinionsdropboxv2/ngrams_sort";
    static final String CACHE_PATH = "/home/dsantos/entityopinionsdropbox/entityTesting/cache";
    static final String PROTOTYPE_PATH = "/home/dsantos/entityopinionsdropboxv4/modelos/lexwithsupervision.lexsuper";
    transient LMClassifier<NGramProcessLM, MultivariateDistribution> poModel;
    transient LMClassifier<NGramProcessLM, MultivariateDistribution> subjModel;

    public EntityOpinionSupervisedBaseline() throws DatabaseException, IOException, ClassNotFoundException{
        File f = new File(CACHE_PATH);
        cache = new Cache(f, 100);
        ObjectInputStream ois_po = new ObjectInputStream(new FileInputStream(new File(PO_MODEL_PATH)));
        ObjectInputStream ois_subj = new ObjectInputStream(new FileInputStream(new File(SUBJ_MODEL_PATH)));
        poModel = (LMClassifier) ois_po.readObject();
        subjModel = (LMClassifier) ois_subj.readObject();
    }

    @Override
    public Chunking chunk(CharSequence cSeq) {
        char[] cs = Strings.toCharArray(cSeq);
        return chunk(cs, 0, cs.length);
    }

    @Override
    public Chunking chunk(char[] chars, int start, int end) {
        ChunkingImpl chunkingImpl = new ChunkingImpl(chars, start, end);
        int startChunk = 0, endChunk = 0, startIndex = 0, endIndex = 0;

        String sentence = new String(chars);
        String sentenceEntities = "";

        String sentiment = sentenceSentiment(poModel, subjModel, sentence);
        try {
            sentenceEntities = entities(NGRAMS_PATH, sentence);
        } catch (Exception ex) {
            System.out.println(ex.getStackTrace());
        }

        while ((startIndex = sentenceEntities.indexOf("<chunk>")) != -1 && (endIndex = sentenceEntities.indexOf("</chunk>")) != -1) {
            startChunk = startIndex + endChunk;
            endChunk += endIndex - 7;

            System.out.println(sentence.substring(startChunk, endChunk));

            sentenceEntities = sentenceEntities.substring(endIndex + 8);
            Chunk chunk = ChunkFactory.createChunk(startChunk, endChunk, sentiment);
            chunkingImpl.add(chunk);
        }
        System.out.println(chunkingImpl);
        return chunkingImpl;
    }

    public String sentenceSentiment(LMClassifier poModel, LMClassifier subjModel, String sentence) {
        String sentiment = "";
        JointClassification subjClassification = subjModel.classify(sentence);
        if (subjClassification.category(0).equals("quote")) {
            sentiment = "ENTITY_NEU";
            return sentiment;
        }
        JointClassification poClassification = poModel.classify(sentence);
        if (poClassification.bestCategory().equals("neg")) {
            sentiment = "ENTITY_NEG";
        } else if (poClassification.bestCategory().equals("pos")) {
            sentiment = "ENTITY_POS";
        }

        return sentiment;
    }

    public String entities(String ngramPath, String sentence) throws Exception {
        String s;
        EntityOpinion e = new EntityOpinion(cache);
        s = e.recognizeEntities(ngramPath, sentence);
        return s;
    }

    public static void main(String args[]) throws DatabaseException, IOException, ClassNotFoundException, FileNotFoundException, Exception{
        //EntityOpinionSupervisedBaseline eosb = new EntityOpinionSupervisedBaseline();
        //ModelSerialization.writeModel(eosb, PROTOTYPE_PATH);

        //EntityOpinionSupervisedBaseline e = ModelSerialization.readLexSupervised(PROTOTYPE_PATH);
        //Chunking chunk = e.chunk("John and Matt ran.");
        
        File corpus = new File("c:/users/diogo/desktop/corpus/teste.xml");

        BufferedWriter out = new BufferedWriter(new FileWriter(new File("c:/users/diogo/desktop/lexsupervised.txt")));
        out.write("Precision\t\tRecall\t\tF1-score\t\tAccuracy\t\tPrecision_Pos\t\t"+
                "Recall_Pos\t\tF1-score_Pos\t\tAccuracy_Pos\t\tPrecision_Neg\t\tRecall_Neg"
                + "\t\tF1-score_Neg\t\tAccuracy_Neg\t\tPrecision_Neu"+
                "\t\tRecall_Neu\t\tF1-score_Neu\t\tAccuracy_Neu");
        out.newLine();

        EntityOpinionSupervisedBaseline e = ModelSerialization.readLexSupervised(PROTOTYPE_PATH);
        ChunkerEvaluator evaluator = new ChunkerEvaluator(e);
        evaluator.setVerbose(true);
        OpinionChunkParser parser = new OpinionChunkParser();
        parser.setHandler(evaluator);
        parser.parse(corpus);

        System.out.println("Results");
        ChunkingEvaluation ce = evaluator.evaluation();
        PrecisionRecallEvaluation preva = ce.precisionRecallEvaluation();
        PrecisionRecallEvaluation preva_pos = ce.perTypeEvaluation("ENTITY_POS").precisionRecallEvaluation();
        PrecisionRecallEvaluation preva_neg = ce.perTypeEvaluation("ENTITY_NEG").precisionRecallEvaluation();
        PrecisionRecallEvaluation preva_neu = ce.perTypeEvaluation("ENTITY_NEU").precisionRecallEvaluation();

        out.write(preva.precision() + "\t\t" + preva.recall() + "\t\t" +
                  preva.fMeasure() + "\t\t" + preva.accuracy() + "\t\t" +
                  preva_pos.precision() + "\t\t" + preva_pos.recall() + "\t\t" +
                  preva_pos.fMeasure() + "\t\t" + preva_pos.accuracy() + "\t\t" +
                  preva_neg.precision() + "\t\t" + preva_neg.recall() + "\t\t" +
                  preva_neg.fMeasure() + "\t\t" + preva_neg.accuracy() + "\t\t" +
                  preva_neu.precision() + "\t\t" + preva_neu.recall() + "\t\t" +
                  preva_neu.fMeasure() + "\t\t" +preva_neu.accuracy());
        out.newLine();
        out.close();
    }
}
