# sql_compiler.py
from lexer import Lexer
from my_parser import Parser
from semantic_analyzer import SemanticAnalyzer

# 全局 analyzer 实例，保持状态
analyzer = SemanticAnalyzer()

def run(text, filename="<stdin>"):
    # 1. 词法分析
    lexer = Lexer(filename, text)
    tokens, error = lexer.make_tokens()
    if error:
        print("词法错误:", error.as_string())
        return None, error

    print("词法分析结果:")
    for tok in tokens:
        if tok.type != 'EOF':
            print(f"{tok.type:<15} {tok.value}")

    # 2. 语法分析
    parser = Parser(tokens)
    ast = parser.parse()
    if ast.error:
        print("\n语法错误:", ast.error.as_string())
        return None, ast.error

    print("\n语法分析成功，AST:")
    print(ast.node)

    # 3. 语义分析（复用全局 analyzer）
    result = analyzer.analyze(ast.node)
    print("\n语义分析结果:")
    print(result)

    return ast.node, None

if __name__ == "__main__":
    print("SQL 编译器启动！输入 SQL 语句测试（输入 'quit' 退出）:")
    while True:
        try:
            text = input("\nsql > ").strip()
            if text == 'quit':
                break
            if not text:
                continue
            run(text)
        except EOFError:
            break
        except KeyboardInterrupt:
            print("\n再见！")
            break