package com.biboheart.adapter.compiler.lexical;

import com.biboheart.adapter.compiler.enums.CharType;
import com.biboheart.adapter.compiler.enums.TokenState;
import com.biboheart.adapter.compiler.enums.TokenType;
import com.biboheart.adapter.compiler.token.Token;
import com.biboheart.adapter.compiler.utils.TokenUtils;
import com.biboheart.adapter.support.enums.LogicalEnum;
import com.biboheart.adapter.support.utils.ObjectTypeUtils;

final class TokenizerLogical implements Tokenizer {
    private TokenBuffer tokenBuffer;
    private TokenState state;

    @Override
    public TokenState entry(char ch, int offset, String content) {
        if ('&' != ch && '|' != ch && '!' != ch && '>' != ch && '<' != ch && '=' != ch) {
            state = TokenState.STATE_FAIL;
            return TokenState.STATE_FAIL;
        }
        state = TokenState.STATE_CONTINUE;
        tokenBuffer = new TokenBuffer(offset);
        tokenBuffer.push(ch);
        char next = TokenUtils.getNext(content, offset);
        String buffer = tokenBuffer.buffer();
        String pre = buffer + next;
        CharType charType = TokenUtils.getCharType(ch);
        if (CharType.CHAR_EOF.equals(charType) || !LogicalEnum.contains(pre)) {
            state = TokenState.STATE_END;
        }
        return state;
    }

    @Override
    public TokenState getState() {
        return state;
    }

    @Override
    public TokenState analyse(char ch, int offset, String content) {
        if (TokenState.STATE_END.equals(state)) {
            return TokenState.STATE_END_BACK;
        }
        if (!TokenState.STATE_CONTINUE.equals(state)) {
            return state;
        }
        CharType charType = TokenUtils.getCharType(ch);
        String buffer = tokenBuffer.buffer();
        String pre = buffer + ch;
        if (!CharType.CHAR_CHARACTER.equals(charType) || !LogicalEnum.contains(pre)) {
            if (!LogicalEnum.contains(buffer)) {
                state = TokenState.STATE_FAIL;
                return state;
            } else {
                state = TokenState.STATE_END_BACK;
                return state;
            }
        }
        tokenBuffer.push(ch);
        char next = TokenUtils.getNext(content, offset);
        buffer = tokenBuffer.buffer();
        pre = buffer + next;
        charType = TokenUtils.getCharType(ch);
        if (CharType.CHAR_EOF.equals(charType) || !LogicalEnum.contains(pre)) {
            state = TokenState.STATE_END;
        }
        return state;
    }

    @Override
    public Token generateToken() {
        if (!TokenState.STATE_END.equals(state) && !TokenState.STATE_END_BACK.equals(state)) {
            return null;
        }
        String value = ObjectTypeUtils.convertType(tokenBuffer.value(TokenType.TOKEN_OPERATOR), String.class);
        return new Token(tokenBuffer.getStart(),
                tokenBuffer.getEnd(),
                TokenType.TOKEN_OPERATOR,
                value,
                value,
                tokenBuffer.buffer());
    }
}
