package com.biboheart.sc.lexer;

import com.biboheart.sc.lexer.tokenizer.TokenizerLocator;
import com.biboheart.sc.support.definition.CharType;
import com.biboheart.sc.support.definition.TokenState;
import com.biboheart.sc.support.definition.TokenType;
import com.biboheart.sc.support.exception.LexerException;
import com.biboheart.sc.support.utils.LetterUtils;
import com.biboheart.sc.token.Token;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class LexerDefault implements Lexer {
    private final String content;
    private char ch = 0; // 当前字符
    private int pos = 0; // 当前位置
    private int nextPos = 0; // 下个位置
    private final Map<Tokenizer, Integer> tokenizerEndMap = new HashMap<>();

    public LexerDefault(String content) {
        this.content = content;
    }

    @Override
    public Token nextToken() {
        if (null == content || content.isEmpty()) {
            return null;
        }
        readChar();
        skipWhiteSpace();
        CharType charType = LetterUtils.getCharType(ch);
        switch (charType) {
            case CHAR_TYPE_EOF:
                return newToken(TokenType.TOKEN_EOF, "", pos, nextPos);
            case CHAR_TYPE_DOT:
                return newToken(TokenType.TOKEN_DOT, String.valueOf(ch), pos, nextPos);
            case CHAR_TYPE_PLUS:
                return newToken(TokenType.TOKEN_PLUS, String.valueOf(ch), pos, nextPos);
            case CHAR_TYPE_MINUS:
                return newToken(TokenType.TOKEN_MINUS, String.valueOf(ch), pos, nextPos);
            case CHAR_TYPE_ASTERISK:
                return newToken(TokenType.TOKEN_ASTERISK, String.valueOf(ch), pos, nextPos);
            case CHAR_TYPE_SLASH:
                return newToken(TokenType.TOKEN_SLASH, String.valueOf(ch), pos, nextPos);
            case CHAR_TYPE_LB:
                return newToken(TokenType.TOKEN_LB, String.valueOf(ch), pos, nextPos);
            case CHAR_TYPE_RB:
                return newToken(TokenType.TOKEN_RB, String.valueOf(ch), pos, nextPos);
            case CHAR_TYPE_LSB:
                return newToken(TokenType.TOKEN_LSB, String.valueOf(ch), pos, nextPos);
            case CHAR_TYPE_RSB:
                return newToken(TokenType.TOKEN_RSB, String.valueOf(ch), pos, nextPos);
            default:
                int start = pos;
                String literal = read();
                int end = pos;
                unReadChar();
                return newToken(TokenType.TOKEN_INTEGER, literal, start, end);
        }
    }

    private void skipWhiteSpace() {
        while (CharType.CHAR_TYPE_SPACE.equals(LetterUtils.getCharType(ch))) {
            readChar();
        }
    }

    private void readChar() {
        if (nextPos >= content.length()) {
            ch = 0;
        } else {
            ch = content.charAt(nextPos);
        }
        pos = nextPos;
        nextPos ++;
    }

    private void unReadChar() {
        nextPos = pos;
        pos --;
    }

    private boolean isDigit(char ch) {
        return CharType.CHAR_TYPE_NUMBER.equals(LetterUtils.getCharType(ch));
    }

    private String readNumber() {
        int p = pos;
        while (isDigit(ch)) {
            readChar();
        }
        return content.substring(p, pos);
    }

    private String read() {
        int p = pos;
        List<Tokenizer> tokenizerList = TokenizerLocator.getSharedInstance().locate(ch);
        if (null == tokenizerList || tokenizerList.isEmpty()) {
            throw new LexerException("分词出错,不可识别的符号" + ch + "[" + pos + "]");
        }
        while (tokenizerList.size() > 1 || tokenizerEndMap.isEmpty()) {
            readChar();
            updateTokenizer(tokenizerList);
            if (tokenizerList.isEmpty()) {
                throw new LexerException("分词出错,不可识别的符号" + ch + "[" + pos + "]");
            }
        }
        Tokenizer tokenizer = tokenizerList.get(0);
        pos = tokenizerEndMap.get(tokenizer);
        nextPos = pos + 1;
        tokenizerEndMap.clear();
        return content.substring(p, pos);
    }

    private Token newToken(TokenType type, String literal, int start, int end) {
        Token token = new Token();
        token.setType(type);
        token.setName(type.getName());
        token.setLiteral(literal);
        token.setOffset(start);
        token.setEndOffset(end);
        return token;
    }

    private void updateTokenizer(List<Tokenizer> tokenizerList) {
        int i = tokenizerList.size() - 1;
        while (i >= 0) {
            Tokenizer tokenizer = tokenizerList.get(i);
            TokenState state = tokenizer.getState(ch);
            if (TokenState.TOKEN_STATE_ERROR.equals(state)) {
                tokenizerList.remove(i);
            } else if (TokenState.TOKEN_STATE_END.equals(state)) {
                tokenizerEndMap.put(tokenizer, pos);
            }
            i --;
        }
    }
}
