/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package com.aliasi.chunk;

import com.aliasi.chunk.BioTagChunkCodec.NBestIterator;
import com.aliasi.tag.StringTagging;
import com.aliasi.tag.TagLattice;
import com.aliasi.tag.Tagging;
import com.aliasi.tokenizer.Tokenizer;
import com.aliasi.tokenizer.TokenizerFactory;
import com.aliasi.util.AbstractExternalizable;
import com.aliasi.util.Iterators;
import com.aliasi.util.Strings;
import entityopinions.TokenWithLEX;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;

/**
 *
 * @author Diogo
 */
public class BioTagChunkCodecWithLEX extends AbstractTagChunkCodec implements Serializable {

    private TokenizerFactory mTokenizerFactory;
    private String mOutTag = "O";
    private String mInTagPrefix = "I_";
    private String mBeginTagPrefix = "B_";

    public BioTagChunkCodecWithLEX() {
        super();
    }

    public BioTagChunkCodecWithLEX(TokenizerFactory tokenizerFactory, boolean enforceConsistency) {
        super(tokenizerFactory, enforceConsistency);
        mTokenizerFactory = tokenizerFactory;
    }

    public BioTagChunkCodecWithLEX(TokenizerFactory tokenizerFactory,
            boolean enforceConsistency,
            String beginTagPrefix,
            String inTagPrefix,
            String outTag) {
        super(tokenizerFactory, enforceConsistency);
        mOutTag = outTag;
        mBeginTagPrefix = beginTagPrefix;
        mInTagPrefix = inTagPrefix;
    }

    public Tagging toTagging(Chunking chunking) {
        if (mTokenizerFactory == null) {
            String msg = "Tokenizer factory must be non-null to convert chunking to tagging.";
            throw new UnsupportedOperationException(msg);
        }
        enforceConsistency(chunking);
        List<String> tokens = new ArrayList<String>();
        List<String> tags = new ArrayList<String>();
        toTagging(chunking, tokens, tags, null, null);


        List<TokenWithLEX> tokenWithLEXList = new ArrayList<TokenWithLEX>();
        Set<Chunk> chunkSet = chunking.chunkSet();
        Chunk[] chunks = chunkSet.toArray(new Chunk[chunkSet.size()]);
        Arrays.sort(chunks, Chunk.TEXT_ORDER_COMPARATOR);


        String[] tokenList = tokens.toArray(new String[tokens.size()]);//tagging.tokens().toArray(new String[tagging.tokens().size()]);
        String[] tokenTags = tags.toArray(new String[tags.size()]);//tagging.tags().toArray(new String[tagging.tags().size()]);

        TokenWithLEX elem = null;
        
        for (int i = 0; i < tokenList.length; i++) {
            elem = null;
            String a = tokenTags[i].substring(0, 1);
            if (tokenTags[i].substring(0, 1).equals("O")) {
                elem = new TokenWithLEX(tokenList[i], 0);
                tokenWithLEXList.add(elem);
                continue;
            }
            if (tokenTags[i].substring(0, 1).equals("B") || tokenTags[i].substring(0, 1).equals("I")) {
                elem = new TokenWithLEX(tokenList[i], 1);
                tokenWithLEXList.add(elem);
            }
        }


        return new Tagging(tokenWithLEXList, tags);
    }

    public void toTagging(Chunking chunking,
            List<String> tokenList,
            List<String> tagList,
            List<Integer> tokenStartList,
            List<Integer> tokenEndList) {
        char[] cs = Strings.toCharArray(chunking.charSequence());
        Set<Chunk> chunkSet = chunking.chunkSet();
        Chunk[] chunks = chunkSet.toArray(new Chunk[chunkSet.size()]);
        Arrays.sort(chunks, Chunk.TEXT_ORDER_COMPARATOR);
        int pos = 0;
        for (Chunk chunk : chunks) {
            String type = chunk.type();
            int start = chunk.start();
            int end = chunk.end();
            outBioTag(cs, pos, start, tokenList, tagList, tokenStartList, tokenEndList);
            chunkBioTag(cs, type, start, end, tokenList, tagList, tokenStartList, tokenEndList);
            pos = end;
        }
        outBioTag(cs, pos, cs.length, tokenList, tagList, tokenStartList, tokenEndList);
    }

    void outBioTag(char[] cs, int start, int end,
            List<String> tokenList, List<String> tagList,
            List<Integer> tokenStartList, List<Integer> tokenEndList) {
        int length = end - start;
        Tokenizer tokenizer = mTokenizerFactory.tokenizer(cs, start, length);
        String token;
        while ((token = tokenizer.nextToken()) != null) {
            tokenList.add(token);
            addOffsets(tokenizer, start, tokenStartList, tokenEndList);
            tagList.add(mOutTag);
        }
    }

    void chunkBioTag(char[] cs, String type, int start, int end,
            List<String> tokenList, List<String> tagList,
            List<Integer> tokenStartList, List<Integer> tokenEndList) {
        int length = end - start;
        Tokenizer tokenizer = mTokenizerFactory.tokenizer(cs, start, length);
        String firstToken = tokenizer.nextToken();
        if (firstToken == null) {
            String msg = "Chunks must contain at least one token."
                    + " Found chunk with yield=|" + new String(cs, start, length) + "|" + type + " " + start + " " + end;
            throw new IllegalArgumentException(msg);
        }
        tokenList.add(firstToken);
        addOffsets(tokenizer, start, tokenStartList, tokenEndList);
        String beginTag = mBeginTagPrefix + type;
        tagList.add(beginTag);
        String inTag = mInTagPrefix + type;
        String token;
        while ((token = tokenizer.nextToken()) != null) {
            tokenList.add(token);
            addOffsets(tokenizer, start, tokenStartList, tokenEndList);
            tagList.add(inTag);
        }
    }

    void addOffsets(Tokenizer tokenizer,
            int offset,
            List<Integer> tokenStartList, List<Integer> tokenEndList) {
        if (tokenStartList == null) {
            return;
        }
        int start = tokenizer.lastTokenStartPosition() + offset;
        int end = tokenizer.lastTokenEndPosition() + offset;
        tokenStartList.add(start);
        tokenEndList.add(end);
    }

    public Iterator<Chunk> nBestChunks(TagLattice<String> lattice,
            int[] tokenStarts,
            int[] tokenEnds,
            int maxResults) {

        if (maxResults < 0) {
            String msg = "Require non-negative number of results.";
            throw new IllegalArgumentException(msg);
        }
        if (tokenStarts.length != lattice.numTokens()) {
            String msg = "Token starts must line up with num tokens."
                    + " Found tokenStarts.length=" + tokenStarts.length
                    + " lattice.numTokens()=" + lattice.numTokens();
            throw new IllegalArgumentException(msg);
        }
        if (tokenEnds.length != lattice.numTokens()) {
            String msg = "Token ends must line up with num tokens."
                    + " Found tokenEnds.length=" + tokenEnds.length
                    + " lattice.numTokens()=" + lattice.numTokens();
            throw new IllegalArgumentException(msg);
        }
        for (int i = 1; i < tokenStarts.length; ++i) {
            if (tokenStarts[i - 1] > tokenStarts[i]) {
                String msg = "Token starts must be in order."
                        + " Found tokenStarts[" + (i - 1) + "]=" + tokenStarts[i - 1]
                        + " tokenStarts[" + i + "]=" + tokenStarts[i];
                throw new IllegalArgumentException(msg);
            }
            if (tokenEnds[i - 1] > tokenEnds[i]) {
                String msg = "Token ends must be in order."
                        + " Found tokenEnds[" + (i - 1) + "]=" + tokenEnds[i - 1]
                        + " tokenEnds[" + i + "]=" + tokenEnds[i];
                throw new IllegalArgumentException(msg);
            }
        }
        if (lattice.numTags() == 0) {
            return Iterators.<Chunk>empty();
        }
        for (int i = 0; i < tokenStarts.length; ++i) {
            if (tokenStarts[i] > tokenEnds[i]) {
                String msg = "Token ends must not precede starts."
                        + " Found tokenStarts[" + i + "]=" + tokenStarts[i]
                        + " tokenEnds[" + i + "]=" + tokenEnds[i];
                throw new IllegalArgumentException(msg);
            }
        }
        return new NBestIterator(lattice, tokenStarts, tokenEnds, maxResults,
                mBeginTagPrefix, mInTagPrefix, mOutTag);
    }

    boolean legalTagSingle(String tag) {
        return mOutTag.equals(tag)
                || tag.startsWith(mBeginTagPrefix)
                || tag.startsWith(mInTagPrefix);
    }

    boolean legalTagPair(String tag1, String tag2) {
        // B_X, I_X -> I_X, B_Y, O
        // O -> B_Y, O
        if (!legalTagSingle(tag1)) {
            return false;
        }
        if (!legalTagSingle(tag2)) {
            return false;
        }
        if (tag2.startsWith(mInTagPrefix)) {
            return tag1.endsWith(tag2.substring(mInTagPrefix.length()));
        }
        return true;
    }

    public boolean legalTagSubSequence(String... tags) {
        if (tags.length == 0) {
            return true;
        }
        if (tags.length == 1) {
            return legalTagSingle(tags[0]);
        }
        for (int i = 1; i < tags.length; ++i) {
            if (!legalTagPair(tags[i - 1], tags[i])) {
                return false;
            }
        }
        return true;
    }

    public boolean legalTags(String... tags) {
        return legalTagSubSequence(tags)
                && (tags.length == 0 || !tags[0].startsWith(mInTagPrefix));
    }

    public Set<String> tagSet(Set<String> chunkTypes) {
        Set<String> tagSet = new HashSet<String>();
        tagSet.add(mOutTag);
        for (String chunkType : chunkTypes) {
            tagSet.add(mBeginTagPrefix + chunkType);
            tagSet.add(mInTagPrefix + chunkType);
        }
        return tagSet;
    }

    public StringTagging toStringTagging(Chunking chunking) {
        if (mTokenizerFactory == null) {
            String msg = "Tokenizer factory must be non-null to convert chunking to tagging.";
            throw new UnsupportedOperationException(msg);
        }
        enforceConsistency(chunking);
        List<String> tokenList = new ArrayList<String>();
        List<String> tagList = new ArrayList<String>();
        List<Integer> tokenStartList = new ArrayList<Integer>();
        List<Integer> tokenEndList = new ArrayList<Integer>();
        toTagging(chunking, tokenList, tagList,
                tokenStartList, tokenEndList);
        StringTagging tagging = new StringTagging(tokenList,
                tagList,
                chunking.charSequence(),
                tokenStartList,
                tokenEndList);
        return tagging;
    }

    public Chunking toChunking(StringTagging tagging) {
        enforceConsistency(tagging);
        ChunkingImpl chunking = new ChunkingImpl(tagging.characters());
        for (int n = 0; n < tagging.size(); ++n) {
            String tag = tagging.tag(n);
            if (mOutTag.equals(tag)) {
                continue;
            }
            if (!tag.startsWith(mBeginTagPrefix)) {
                if (n == 0) {
                    String msg = "First tag must be out or begin."
                            + " Found tagging.tag(0)=" + tagging.tag(0);
                    throw new IllegalArgumentException(msg);
                }
                String msg = "Illegal tag sequence."
                        + " tagging.tag(" + (n - 1) + ")=" + tagging.tag(n - 1)
                        + " tagging.tag(" + n + ")=" + tagging.tag(n);
                throw new IllegalArgumentException(msg);
            }
            String type = tag.substring(2);
            int start = tagging.tokenStart(n);
            int startPos = n;
            String inTag = mInTagPrefix + type;
            while ((n + 1) < tagging.size() && inTag.equals(tagging.tag(n + 1))) {
                ++n;
            }
            int end = tagging.tokenEnd(n);
            int endPos = n;
            Chunk chunk = ChunkFactory.createChunk(start, end, type);
            chunking.add(chunk);
            //Chunk chunkPos = ChunkFactory.createChunk(startPos, endPos, type + "Pos");
            //chunking.add(chunkPos);
        }
        return chunking;
    }

    static class Serializer extends AbstractExternalizable {

        static final long serialVersionUID = -2473387657606045149L;
        private final BioTagChunkCodecWithLEX mCodec;

        public Serializer() {
            this(null);
        }

        public Serializer(BioTagChunkCodecWithLEX codec) {
            mCodec = codec;
        }

        public void writeExternal(ObjectOutput out)
                throws IOException {

            out.writeBoolean(mCodec.mEnforceConsistency);
            out.writeObject(Boolean.TRUE); // signals 3.9.2 encoding for bw compatibility
            out.writeObject(mCodec.mTokenizerFactory != null
                    ? mCodec.mTokenizerFactory
                    : Boolean.FALSE); // false just a dummy object
            out.writeUTF(mCodec.mBeginTagPrefix);
            out.writeUTF(mCodec.mInTagPrefix);
            out.writeUTF(mCodec.mOutTag);
        }

        public Object read(ObjectInput in) throws IOException, ClassNotFoundException {

            boolean enforceConsistency = in.readBoolean();
            Object obj = in.readObject();
            if (Boolean.TRUE.equals(obj)) {
                // 3.9.2 serialization
                Object obj2 = in.readObject();
                @SuppressWarnings("unchecked")
                TokenizerFactory tf = Boolean.FALSE.equals(obj2)
                        ? null
                        : (TokenizerFactory) obj2;
                String beginTagPrefix = in.readUTF();
                String inTagPrefix = in.readUTF();
                String outTag = in.readUTF();
                return new BioTagChunkCodecWithLEX(tf, enforceConsistency,
                        beginTagPrefix, inTagPrefix, outTag);

            }
            // pre 3.9.2 serialization for backward compatibility
            @SuppressWarnings("unchecked")
            TokenizerFactory tf = Boolean.FALSE.equals(obj)
                    ? null
                    : (TokenizerFactory) obj;
            return new BioTagChunkCodecWithLEX(tf, enforceConsistency);
        }
    }
}



