package tools;

import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class LexicalAnalyzer {
    public enum TokenType {
        // 关键字
        INT("\\bint\\b"),
        PRINTF("\\bprintf\\b"),
        WHILE("\\bwhile\\b"),
        IF("\\bif\\b"),
        ELSE("\\belse\\b"),

        // 标识符
        ID("\\b[a-zA-Z_]\\w*\\b"),

        // 运算符
        ASSIGN("="),
        LE("<="), GE(">="), EQ("=="), NE("!="),
        LT("<"), GT(">"),

        INCREMENT("\\+\\+"), DECREMENT("--"),
        PLUS("(?<!\\+)\\+"),       // 前面不能是 +
        MINUS("(?<!-)-"),          // 前面不能是 -








        MULTIPLY("\\*"),
        DIVIDE("/"),
        MOD("%"),


        // 常量
        NUMBER("\\b\\d+\\b"),

        // 分隔符
        LPAREN("\\("), RPAREN("\\)"),
        LBRACE("\\{"), RBRACE("\\}"),
        SEMI(";"), COMMA(","),

        // 空白字符(将被忽略)
        WHITESPACE("\\s+"),

        // 其他
        UNKNOWN(".+?");

        public final String pattern;

        TokenType(String pattern) {
            this.pattern = pattern;
        }
    }


    //内置类
    public static class Token {
        public final TokenType type;
        public final String value;
        public final int position;

        public Token(TokenType type, String value, int position) {
            this.type = type;
            this.value = value;
            this.position = position;
        }

        @Override
        public String toString() {
            return String.format("(%s, %s)@%d", type.name(), value, position);
        }

        public String lexeme() {
            return value;
        }
    }



    //字符串转tokens列表
    public static List<Token> analyze(String sourceCode) {
        List<Token> tokens = new ArrayList<>();

        // 构建完整的正则表达式模式
        StringBuilder tokenPatternsBuffer = new StringBuilder();
        // 在 analyze 方法中添加调试代码
        for (TokenType tokenType : TokenType.values()) {
            String testPattern = String.format("(?<%s>%s)", tokenType.name(), tokenType.pattern);
            try {
                Pattern.compile(testPattern);
            } catch (Exception e) {
                throw new RuntimeException("模式校验失败: " + tokenType.name() + " -> " + testPattern, e);
            }
        }

        for (TokenType tokenType : TokenType.values()) {
            // 正确构建命名捕获组
            tokenPatternsBuffer.append(String.format("|(?<%s>%s)",
                    tokenType.name(),
                    tokenType.pattern));
        }

        // 移除开头的"|"并编译正则表达式
        String tokenPatterns = tokenPatternsBuffer.substring(1);
        Pattern pattern;
        try {
            pattern = Pattern.compile(tokenPatterns);
        } catch (Exception e) {
            throw new RuntimeException("正则表达式编译失败: " + tokenPatterns, e);
        }


        //进行匹配
        Matcher matcher = pattern.matcher(sourceCode);

        while (matcher.find()) {
            if (matcher.group(TokenType.WHITESPACE.name()) != null) {
                continue; // 忽略空白字符
            }

            boolean matched = false;
            for (TokenType tokenType : TokenType.values()) {
                if (tokenType == TokenType.WHITESPACE) continue;

                try {
                    if (matcher.group(tokenType.name()) != null) {
                        tokens.add(new Token(
                                tokenType,
                                matcher.group(tokenType.name()),
                                matcher.start()
                        ));
                        matched = true;
                        break;
                    }
                } catch (IllegalArgumentException e) {
                    // 处理可能的组名问题
                    throw new RuntimeException("无效的捕获组名称: " + tokenType.name(), e);
                }
            }

            if (!matched) {
                // 处理未匹配的字符
                tokens.add(new Token(
                        TokenType.UNKNOWN,
                        sourceCode.substring(matcher.start(), matcher.end()),
                        matcher.start()
                ));
            }
        }

        return tokens;
    }
}