package com.biboheart.adapter.compiler.lexical;

import com.biboheart.adapter.compiler.Lexical;
import com.biboheart.adapter.compiler.enums.CharType;
import com.biboheart.adapter.compiler.enums.TokenState;
import com.biboheart.adapter.compiler.token.Token;
import com.biboheart.adapter.compiler.utils.TokenUtils;
import lombok.Getter;

import java.util.*;

public class LexicalDefault implements Lexical {
    @Override
    public List<Token> analysis(String content) {
        Scanner scanner = new Scanner(content);
        scanner.run();
        return scanner.getTokenList();
    }

    private static class Scanner {
        @Getter
        private final List<Token> tokenList = new ArrayList<>();
        private final Map<String, Tokenizer> tokenizerMap = new HashMap<>();
        private final List<String> tokenizerNameList = Arrays.asList("tokenizerDelimiters", "tokenizerNull", "tokenizerKeyword",
                "tokenizerVariable", "tokenizerOperator", "tokenizerLogical",
                "tokenizerNumber", "tokenizerString");
        private final String content;

        Scanner(String content) {
            this.content = content;
        }

        public void run() {
            if (content.isEmpty()) {
                return;
            }
            int offset = 0;
            int step = 1;
            while (offset < content.length()) {
                char ch = content.charAt(offset);
                CharType charType = TokenUtils.getCharType(ch);
                if (tokenizerMap.isEmpty() && CharType.CHAR_SPACE.equals(charType)) {
                    // 路过空格
                    offset += 1;
                    continue;
                }
                step = updateTokenizer(offset, ch);
                offset += step;
            }
        }

        private int updateTokenizer(int offset, char ch) {
            int res = 1;
            if (tokenizerMap.isEmpty()) {
                tokenizerMap.put("tokenizerDelimiters", new TokenizerDelimiters());
                tokenizerMap.put("tokenizerKeyword", new TokenizerKeyword());
                tokenizerMap.put("tokenizerLogical", new TokenizerLogical());
                tokenizerMap.put("tokenizerNull", new TokenizerNull());
                tokenizerMap.put("tokenizerNumber", new TokenizerNumber());
                tokenizerMap.put("tokenizerOperator", new TokenizerOperator());
                tokenizerMap.put("tokenizerString", new TokenizerString());
                tokenizerMap.put("tokenizerVariable", new TokenizerVariable());
                for (String key : tokenizerNameList) {
                    Tokenizer tokenizer = tokenizerMap.get(key);
                    if (TokenState.STATE_FAIL.equals(tokenizer.entry(ch, offset, this.content))) {
                        tokenizerMap.remove(key);
                    }
                }
                List<String> keyList = new ArrayList<>(tokenizerMap.keySet());
                for (String key : keyList) {
                    Tokenizer tokenizer = tokenizerMap.get(key);
                    TokenState tokenState = tokenizer.getState();
                    if (TokenState.STATE_END_BACK.equals(tokenState) || TokenState.STATE_END.equals(tokenState)) {
                        if (tokenizerMap.size() == 1 || topTokenizer(key)) {
                            tokenList.add(tokenizer.generateToken());
                            tokenizerMap.clear();
                            res = TokenState.STATE_END_BACK.equals(tokenState) ? 0 : 1;
                            break;
                        }
                    } else if (TokenState.STATE_FAIL.equals(tokenState)) {
                        tokenizerMap.remove(key);
                    }
                }
            } else {
                List<String> keyList = new ArrayList<>(tokenizerMap.keySet());
                for (String key : keyList) {
                    Tokenizer tokenizer = tokenizerMap.get(key);
                    TokenState tokenState = tokenizer.analyse(ch, offset, this.content);
                    if (TokenState.STATE_END_BACK.equals(tokenState) || TokenState.STATE_END.equals(tokenState)) {
                        if (tokenizerMap.size() == 1 || topTokenizer(key)) {
                            tokenList.add(tokenizer.generateToken());
                            tokenizerMap.clear();
                            res = TokenState.STATE_END_BACK.equals(tokenState) ? 0 : 1;
                            break;
                        }
                    } else if (TokenState.STATE_FAIL.equals(tokenState)) {
                        tokenizerMap.remove(key);
                    }
                }
            }
            return res;
        }

        private boolean topTokenizer(String key) {
            for (String s : tokenizerNameList) {
                if (tokenizerMap.containsKey(s)) {
                    return s.equals(key);
                }
            }
            return true;
        }
    }
}
