package xdoc;

import com.aliasi.chunk.Chunk;
import com.aliasi.chunk.Chunker;
import com.aliasi.chunk.ConfidenceChunker;
import com.aliasi.chunk.ChunkFactory;
import com.aliasi.chunk.Chunking;
import com.aliasi.chunk.ChunkingImpl;
import com.aliasi.chunk.CharLmHmmChunker;
import com.aliasi.chunk.HmmChunker;
import com.aliasi.util.Strings;
import java.util.Iterator;
import com.aliasi.hmm.HiddenMarkovModel;
import com.aliasi.hmm.HmmDecoder;


import com.aliasi.tokenizer.TokenizerFactory;
import com.aliasi.tokenizer.Tokenizer;
import com.aliasi.dict.MapDictionary;
import com.aliasi.dict.DictionaryEntry;
import com.aliasi.dict.ExactDictionaryChunker;
import com.aliasi.tracker.EntityPhraseChunker;

import java.util.Arrays;
import java.util.Set;
import java.util.HashSet;
import java.util.HashMap;
import java.util.Map;

import org.xml.sax.InputSource;
import org.xml.sax.SAXException;

import java.io.IOException;

public class AceEntityPhraseChunker extends EntityPhraseChunker {
    Double mCutoff;
    public AceEntityPhraseChunker (Chunker neSpeculativeChunker,
                                   HmmDecoder posTagger,
                                   ExactDictionaryChunker dictionary,
                                   String[] stopPhrases,
                                   String[] stopSubstrings,
                                   Map<String,Integer> phraseCounts,
                                   Double cutoff) {

        super(neSpeculativeChunker,posTagger,dictionary,stopPhrases,stopSubstrings,phraseCounts);
        mCutoff = cutoff;
    }

    public Chunking chunk( char[] characters, int start, int end) {
        String text = String.valueOf(characters);
        ChunkingImpl chunking = new ChunkingImpl(characters, 0, characters.length);
        Set<Chunk> entityChunkSet = new HashSet<Chunk>();

        add(entityChunkSet,mNePronounChunker,text);
        addDictionaryWithPosPhraseCountFilter(entityChunkSet,text);

        if (mCutoff == null) {
            addSpeculative(entityChunkSet,(Chunker) mNeSpeculativeChunker,text);
        }
        else {
            addSpeculativeConfidence(entityChunkSet,(ConfidenceChunker) mNeSpeculativeChunker,text, mCutoff);
        }
        for (Chunk chunk : entityChunkSet)
            chunking.add(chunk);
        return chunking;
    }

    protected void addSpeculative(Set<Chunk> chunkSet, Chunker chunker, String input) {
        if (chunker == null) return;
        Set<Chunk> nextChunkSet = chunker.chunk(input).chunkSet();
        if (nextChunkSet == null) return; // probably not nec.
        for (Chunk chunk : nextChunkSet) {
            if (chunk.end() - chunk.start() < 2)
                continue;
            String text = input.substring(chunk.start(),chunk.end());
            if (mStopPhraseSet.contains(text)) continue;
            if (overlap(chunk,chunkSet)) continue;
            boolean pass = true;
            for (int i = 0; i < mStopSubstringList.length; ++i) {
                if (text.contains(mStopSubstringList[i]))
                    pass = false;
            }
            if (pass) {
                Chunk normChunk = ChunkFactory.createChunk(chunk.start(),chunk.end(),chunk.type());
                chunkSet.add(normChunk);
            }
        }
    }

    protected void addSpeculativeConfidence(Set<Chunk> chunkSet, ConfidenceChunker chunker, String input, double confidenceCutoff) {
        if (chunker == null) return;
        //        Set<Chunk> nextChunkSet = chunker.chunk(input).chunkSet();
        char[] cs = input.toCharArray();
        Iterator it = chunker.nBestChunks(cs,0,cs.length,100);
        while (it.hasNext()) {
            Chunk chunk = (Chunk) it.next();
            if (chunk.end() - chunk.start() < 2)
                continue;
            
            double conf = Math.pow(2.0,chunk.score());
            String text = input.substring(chunk.start(),chunk.end());
            /*            System.out.println("## "
                           + Strings.decimalFormat(conf,"0.0000",12)
                           + " " + chunk.type()
                           + " " + text);
            */
            if (conf < confidenceCutoff) {
                break;
            }
            Chunk normChunk = ChunkFactory.createChunk(chunk.start(),chunk.end(),chunk.type());
            chunkSet.add(normChunk);
            /*
            if (mStopPhraseSet.contains(text)) continue;

            if (overlap(chunk,chunkSet)) continue;
            boolean pass = true;
            for (int i = 0; i < mStopSubstringList.length; ++i) {
                if (text.contains(mStopSubstringList[i]))
                    pass = false;
            }

            if (pass) {
                Chunk normChunk = ChunkFactory.createChunk(chunk.start(),chunk.end(),chunk.type());
                chunkSet.add(normChunk);
            }
            */
        }
    }
}
