/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package entityopinions;

import com.aliasi.classify.JointClassification;
import com.aliasi.classify.LMClassifier;
import com.aliasi.crf.ChainCrfFeatureExtractor;
import com.aliasi.crf.ChainCrfFeatures;
import com.aliasi.hmm.HiddenMarkovModel;
import com.aliasi.hmm.HmmDecoder;
import com.aliasi.lm.NGramProcessLM;
import com.aliasi.stats.MultivariateDistribution;
import com.aliasi.tag.Tagger;
import com.aliasi.tag.Tagging;
import com.aliasi.tokenizer.IndoEuropeanTokenCategorizer;
import com.aliasi.util.AbstractExternalizable;
import com.aliasi.util.ObjectToDoubleMap;
import com.sleepycat.je.DatabaseException;
import entityopinions.ComputePMI.Kernel;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;

/**
 *
 * @author Diogo
 */
public class CrfFeatureExtractor implements ChainCrfFeatureExtractor<String>, Serializable{

    public String POS_HMM_PATH = "/home/dsantos/entityopinionsdropboxv5/modelos/pos-en-general-brown.HiddenMarkovModel";
    public String PO_MODEL_PATH = "/home/dsantos/entityopinionsdropboxv5/modelos/polModel.pol";
    public String SUBJ_MODEL_PATH = "/home/dsantos/entityopinionsdropboxv5/modelos/subjModel.subj";
    public String CACHE_PATH = "/home/dsantos/entityopinionsdropbox/entityTesting/cache";
    transient public Tagger<String> mPosTagger;
    transient public Cache mCache;
    transient LMClassifier<NGramProcessLM,MultivariateDistribution> mPoModel;
    transient LMClassifier<NGramProcessLM,MultivariateDistribution> mSubjModel;
    static HashMap<String, String> poLexicon = null;



    public CrfFeatureExtractor() throws IOException, ClassNotFoundException, DatabaseException{
        @SuppressWarnings("unchecked") // req for deserialize
        HiddenMarkovModel posHmm = (HiddenMarkovModel) AbstractExternalizable.readObject(new File(POS_HMM_PATH));
        mPosTagger = new HmmDecoder(posHmm);
        mCache = new Cache(new File(CACHE_PATH), 100);
        ObjectInputStream ois_po = new ObjectInputStream(new FileInputStream(new File(PO_MODEL_PATH)));
        mPoModel = (LMClassifier<NGramProcessLM,MultivariateDistribution>) ois_po.readObject();
        ObjectInputStream ois_subj = new ObjectInputStream(new FileInputStream(new File(SUBJ_MODEL_PATH)));
        mSubjModel = (LMClassifier<NGramProcessLM,MultivariateDistribution>) ois_subj.readObject();

    }

    public ChainCrfFeatures<String> extract(List<String> tokens, List<String> tags){
        ChainCrfFeatures<String> ccf = null;
        try {
            ccf =  new ChunkerFeatures(tokens, tags);
        } catch (Exception ex) {
           System.out.println("Exception: " + ex.getMessage());
        }
        return ccf;
    }

    Object writeReplace() {
        return this;
    }

    class ChunkerFeatures extends ChainCrfFeatures<String> {
        {
            try{
                if (poLexicon == null) {
                    poLexicon = (HashMap) AbstractExternalizable.readObject(new File("/home/dsantos/entityopinionsdropboxv5/Lexico/lexicon.hash"));
                }
            } catch(Exception e){
                System.out.println("Exception: " + e.getMessage());
            }
        }
        private final Tagging<String> mPosTagging;
        private final String mSentence;
        private String mSentencePO;
        private String mSentenceSubj;
        //private int mLex;

        public ChunkerFeatures(List<String> tokens, List<String> tags) throws FileNotFoundException, IOException, ClassNotFoundException, DatabaseException, Exception {
            super(tokens,tags);
            List<String> tokensWithoutLEX = tokensWithLEXtoTokens(tokens);
            mPosTagging = new HmmDecoder((HiddenMarkovModel) AbstractExternalizable.readObject(new File(POS_HMM_PATH))).tag(tokensWithoutLEX);
            mSentence = createSentence(tokensWithoutLEX);
            mSentencePO = sentencePO(mSentence);
            mSentenceSubj = sentenceSubj(mSentence);
            tokensWithoutLEX.clear();
        }

        @Override
        public Map<String, ? extends Number> nodeFeatures(int n) {
            ObjectToDoubleMap<String> feats = new ObjectToDoubleMap<String>();

            boolean bos = n == 0;
            boolean eos = (n + 1) >= numTokens();

            String token = normedToken(n);
            String prevToken = bos ? null : normedToken(n-1);
            String nextToken = eos ? null : normedToken(n+1);

            String posTag = mPosTagging.tag(n);
            String prevPosTag = bos ? null : mPosTagging.tag(n-1);
            String nextPosTag = eos ? null : mPosTagging.tag(n+1);

            String tokenPoLexicon = poLexicon(n);
            String prevTokenPoLexicon = bos ? null : poLexicon(n-1);
            String nextTokenPoLexicon = eos ? null : poLexicon(n+1);

            int lex = lex(n);
            int prevLex = bos ? 0 : lex(n-1);
            int nextLex = eos ? 0 : lex(n+1);
            
            double pmiExcellent = 0, prevPmiExcellent = 0, nextPmiExcellent = 0, pmiPoor = 0,
                    prevPmiPoor = 0, nextPmiPoor = 0;
            try {
                pmiExcellent = pmiExcellent(n);
                prevPmiExcellent = bos ? 0 : pmiExcellent(n-1);
                nextPmiExcellent = eos ? 0 : pmiExcellent(n+1);
                pmiPoor = pmiPoor(n);
                prevPmiPoor = bos ? 0 : pmiPoor(n-1);
                nextPmiPoor = eos ? 0 : pmiPoor(n+1);
            } catch (Exception ex) {
                System.out.println("Exception: " + ex.getMessage());
            }
            
            
            if (bos) feats.set("BOS", 1.0);
            if (eos) feats.set("EOS", 1.0);
            if (!bos && !eos) feats.set("!BOS!EOS", 1.0);

            feats.set("TOK_" + token, 1.0);
            if (!bos) feats.set("TOK_PREV_" + prevToken, 1.0);
            if (!eos) feats.set("TOK_NEXT_" + nextToken, 1.0);

            feats.set("TOK_POS_" + posTag, 1.0);
            if (!bos) feats.set("TOK_POS_PREV_" + prevPosTag, 1.0);
            if (!eos) feats.set("TOK_POS_NEXT_" + nextPosTag, 1.0);

            feats.set("TOK_LEX_" + lex, 1.0);
            if (!bos) feats.set("TOK_LEX_PREV_" + prevLex, 1.0);
            if (!eos) feats.set("TOK_LEX_NEXT_" + nextLex, 1.0);

            
            feats.set("SENTENCE_PO_" + mSentencePO, 1.0);
            
            feats.set("SENTENCE_SUBJ_" + mSentenceSubj, 1.0);

            //feats.set("NUMBER_LEX_ENTITIES_" + mLex, 1.0);

            feats.set("TOK_PMI_EXCELLENT_" + pmiExcellent, 1.0);
            if(!bos) feats.set("TOK_PMI_EXCELLENT_PREV_" + prevPmiExcellent, 1.0);
            if(!eos) feats.set("TOK_PMI_EXCELLENT_NEXT_" + nextPmiExcellent, 1.0);

            feats.set("TOK_PMI_POOR_" + pmiPoor, 1.0);
            if(!bos) feats.set("TOK_PMI_POOR_PREV_" + prevPmiPoor, 1.0);
            if(!eos) feats.set("TOK_PMI_POOR_NEXT_" + nextPmiPoor, 1.0);

            feats.set("TOK_PO_LEXICON_" + tokenPoLexicon, 1.0);
            if(!bos) feats.set("TOK_PO_LEXICON_PREV_" + prevTokenPoLexicon, 1.0);
            if(!eos) feats.set("TOK_PO_LEXICON_NEXT_" + nextTokenPoLexicon, 1.0);
            return feats;
        }

        @Override
        public Map<String, ? extends Number> edgeFeatures(int n, int k) {
             ObjectToDoubleMap<String> feats = new ObjectToDoubleMap<String>();
            feats.set("PREV_TAG_" + tag(k), 1.0);
            feats.set("PREV_TAG_TOKEN_CAT_"  + tag(k) + "_" + tokenCat(n-1), 1.0);
            return feats;
        }

        public String normedToken(int n) {
            String token=null;
            if ((CharSequence)token(n) instanceof TokenWithLEX)
                token=((TokenWithLEX)((CharSequence)token(n))).toString();
            else
                token=token(n);
            return token.replaceAll("\\d+","*$0*").replaceAll("\\d","D");
        }

        public String tokenCat(int n) {
            String token=null;
            if ((CharSequence)token(n) instanceof TokenWithLEX)
                token=((TokenWithLEX)((CharSequence)token(n))).toString();
            else
                token=token(n);
            return IndoEuropeanTokenCategorizer.CATEGORIZER.categorize(token);
        }

        public String createSentence(List<String> tokens){
            String sentence = "";
            Iterator<String> it = tokens.iterator();
            int whiteSpaces = 0;
            while(it.hasNext()){
                if(whiteSpaces < tokens.size() - 1){
                    sentence += it.next() + " ";
                    whiteSpaces++;
                }else{
                    sentence += it.next();
                }
            }
            return sentence;
        }

        public List<String> tokensWithLEXtoTokens(List<String> tokensWithLEX){
            List<String> tokens = new ArrayList<String>();
            for(int pos = 0; pos < tokensWithLEX.size(); pos++){
                tokens.add(((TokenWithLEX)((CharSequence)tokensWithLEX.get(pos))).toString());
            }
            return tokens;
        }

        public String sentenceSubj(String sentence) throws FileNotFoundException, IOException, ClassNotFoundException{
            JointClassification jc = mSubjModel.classify(sentence);
            return jc.bestCategory();

        }

        public String sentencePO(String sentence) throws FileNotFoundException, IOException, ClassNotFoundException{
            JointClassification jc = mPoModel.classify(sentence);
            return jc.bestCategory();
        }

        public double pmiExcellent(int n) throws IOException, DatabaseException{
            String token=null;
            if ((CharSequence)token(n) instanceof TokenWithLEX)
                token=((TokenWithLEX)((CharSequence)token(n))).toString();
            else
                token=token(n);
            double pmi = 0;
            ComputePMI cpmi = new ComputePMI(mCache, Kernel.Gaussian);
            pmi = cpmi.normalizedPMI(token, "excellent");
            return pmi;
        }

        public double pmiPoor(int n) throws IOException, DatabaseException{
            String token=null;
            if ((CharSequence)token(n) instanceof TokenWithLEX)
                token=((TokenWithLEX)((CharSequence)token(n))).toString();
            else
                token=token(n);
            double pmi = 0;
            ComputePMI cpmi = new ComputePMI(mCache, Kernel.Gaussian);
            pmi = cpmi.normalizedPMI(token, "poor");
            return pmi;
        }

        public int lex(int n) {
            int lex = 0;
            if ((CharSequence)token(n) instanceof TokenWithLEX)
                lex = ((TokenWithLEX)((CharSequence)token(n))).getLEXResult();
            return lex;
        }

        public String poLexicon(int n){
            String token=null;
            if ((CharSequence)token(n) instanceof TokenWithLEX)
                token=((TokenWithLEX)((CharSequence)token(n))).toString();
            else
                token=token(n);
            String wordPo = "";
            wordPo = poLexicon.get(token);
            return wordPo;
        }

    }

    

    public static void main(String args[]) throws IOException, ClassNotFoundException, DatabaseException{
        List<String> tokens = new ArrayList<String>();
        tokens.add("I");
        tokens.add("abandon");
        tokens.add("soup");
        tokens.add(".");
        //HiddenMarkovModel posHmm = (HiddenMarkovModel) AbstractExternalizable.readObject(POS_HMM_FILE);
        //Tagger<String> p = new HmmDecoder(posHmm);
        //System.out.println(p.tag(tokens));
        CrfFeatureExtractor cfe = new CrfFeatureExtractor();
        ChunkerFeatures ccf = (ChunkerFeatures) cfe.extract(tokens, null);
        //String sentence = ccf.createSentence(tokens);
        //String subjectivity = ccf.sentenceSubj(sentence);
        //String po = ccf.sentencePO(sentence);
        String xpto = ccf.poLexicon(1);
        System.out.println(xpto);
    }
}







