from typing import List, Optional, Any, Dict
from enum import Enum
from .lexer import Token, TokenType, Lexer

class ASTNodeType(Enum):
    """
    AST 节点类型枚举，覆盖所有支持的 SQL 语法结构。
    设计说明：便于后续扩展更多 SQL 语句和表达式类型。
    """
    CREATE_TABLE = "CREATE_TABLE"
    INSERT = "INSERT"
    SELECT = "SELECT"
    DELETE = "DELETE"
    COLUMN_DEF = "COLUMN_DEF"
    COLUMN_REF = "COLUMN_REF"
    VALUE = "VALUE"
    BINARY_OP = "BINARY_OP"
    TABLE_REF = "TABLE_REF"
    USE_DATABASE = "USE_DATABASE"
    CREATE_DATABASE = "CREATE_DATABASE"
    STAR = "STAR"

class ASTNode:
    """
    抽象语法树节点，包含类型、值和子节点。
    设计说明：树结构便于递归遍历和后续优化、执行计划生成。
    """
    def __init__(self, node_type: ASTNodeType, value: Any = None, children: List['ASTNode'] = None):
        self.node_type = node_type
        self.value = value
        self.children = children or []
    
    def __str__(self, level=0):
        ret = "  " * level + f"{self.node_type}: {self.value}\n"
        for child in self.children:
            ret += child.__str__(level + 1)
        return ret
    
    def __repr__(self):
        return self.__str__()

class ParserError(Exception):
    """
    语法分析异常，包含错误位置，便于调试。
    错误提示格式：[错误类型, 位置, 原因]
    """
    def __init__(self, message, token=None, error_type="SYNTAX_ERROR"):
        if token:
            super().__init__(f"{message} at line {token.line}, column {token.column}")
            self.line = token.line
            self.column = token.column
        else:
            super().__init__(message)
            self.line = 0
            self.column = 0
        
        self.error_type = error_type
        self.message = message
        self.token = token
    
    def to_dict(self):
        """输出标准错误格式"""
        return {
            "error_type": self.error_type,
            "position": f"line {self.line}, column {self.column}",
            "reason": self.message,
            "expected": getattr(self, 'expected', None)
        }

class Parser:
    """
    SQL 语法分析器。
    负责将 Token 列表转为 AST，支持 CREATE/INSERT/SELECT/DELETE 等语句。
    设计决策：
      - 单一入口 parse，支持批量语句。
      - 采用递归下降法，便于扩展和维护。
      - 明确异常处理，便于定位语法错误。
      - 预留扩展点（如参数化、复杂表达式、子查询等）。
    """
    def __init__(self, tokens: List[Token]):
        self.tokens = tokens
        self.current_pos = 0
        self.current_token = self.tokens[0] if tokens else None
    
    def parse(self) -> List[ASTNode]:
        """
        主入口：将 Token 列表解析为 AST 节点列表。
        支持多条语句（以分号分隔）。
        """
        statements = []
        while self.current_token and self.current_token.token_type != TokenType.EOF:
            statement = self.parse_statement()
            statements.append(statement)
            if self.current_token.token_type == TokenType.SEMICOLON:
                self.advance()
        return statements
    
    def parse_statement(self) -> ASTNode:
        """
        解析单条 SQL 语句，根据首 Token 分派到具体方法。
        """
        if self.current_token.token_type == TokenType.CREATE:
            # CREATE DATABASE 或 CREATE TABLE
            if self.peek().token_type == TokenType.DATABASE:
                return self.parse_create_database()
            elif self.peek().token_type == TokenType.TABLE:
                return self.parse_create_table()
            else:
                raise ParserError(f"Expected DATABASE or TABLE, got {self.peek().token_type}", self.peek())
        elif self.current_token.token_type == TokenType.USE:
            return self.parse_use_database()
        elif self.current_token.token_type == TokenType.INSERT:
            return self.parse_insert()
        elif self.current_token.token_type == TokenType.SELECT:
            return self.parse_select()
        elif self.current_token.token_type == TokenType.DELETE:
            return self.parse_delete()
        else:
            raise ParserError(f"Unexpected token: {self.current_token.lexeme}", self.current_token)

    def parse_create_database(self) -> ASTNode:
        """
        解析 CREATE DATABASE 语句
        """
        self.expect(TokenType.CREATE)
        self.expect(TokenType.DATABASE)
        db_name = self.expect(TokenType.IDENTIFIER).lexeme
        return ASTNode(ASTNodeType.CREATE_DATABASE, value=db_name)

    def parse_use_database(self) -> ASTNode:
        """
        解析 USE 语句
        """
        self.expect(TokenType.USE)
        db_name = self.expect(TokenType.IDENTIFIER).lexeme
        return ASTNode(ASTNodeType.USE_DATABASE, value=db_name)
    
    def parse_create_table(self) -> ASTNode:
        """
        解析 CREATE TABLE 语句，支持多列定义。
        支持如 name VARCHAR(50) 语法。
        """
        self.expect(TokenType.CREATE)
        self.expect(TokenType.TABLE)
        table_name = self.expect(TokenType.IDENTIFIER).lexeme
        self.expect(TokenType.LEFT_PAREN)
        columns = []
        while self.current_token.token_type != TokenType.RIGHT_PAREN:
            col_name = self.expect(TokenType.IDENTIFIER).lexeme
            col_type_token = self.expect([TokenType.INT, TokenType.VARCHAR])
            col_type = col_type_token.token_type
            col_type_extra = None
            # 支持 VARCHAR(n) 语法
            if col_type == TokenType.VARCHAR and self.current_token.token_type == TokenType.LEFT_PAREN:
                self.advance()
                length_token = self.expect(TokenType.NUMBER)
                col_type_extra = int(length_token.lexeme)
                self.expect(TokenType.RIGHT_PAREN)
            col_def = ASTNode(ASTNodeType.COLUMN_DEF, value={"name": col_name, "type": col_type, "length": col_type_extra})
            columns.append(col_def)
            if self.current_token.token_type == TokenType.COMMA:
                self.advance()
        self.expect(TokenType.RIGHT_PAREN)
        return ASTNode(ASTNodeType.CREATE_TABLE, value=table_name, children=columns)
    
    def parse_insert(self) -> ASTNode:
        """
        解析 INSERT INTO 语句，支持可选列名和多值插入。
        """
        self.expect(TokenType.INSERT)
        self.expect(TokenType.INTO)
        table_name = self.expect(TokenType.IDENTIFIER).lexeme
        # 解析列名列表（可选）
        columns = []
        if self.current_token.token_type == TokenType.LEFT_PAREN:
            self.advance()
            while self.current_token.token_type != TokenType.RIGHT_PAREN:
                col_name = self.expect(TokenType.IDENTIFIER).lexeme
                columns.append(ASTNode(ASTNodeType.COLUMN_REF, value=col_name))
                if self.current_token.token_type == TokenType.COMMA:
                    self.advance()
            self.expect(TokenType.RIGHT_PAREN)
        self.expect(TokenType.VALUES)
        self.expect(TokenType.LEFT_PAREN)
        values = []
        while self.current_token.token_type != TokenType.RIGHT_PAREN:
            if self.current_token.token_type == TokenType.NUMBER:
                value = ASTNode(ASTNodeType.VALUE, value=int(self.current_token.lexeme))
                self.advance()
            elif self.current_token.token_type == TokenType.STRING:
                value = ASTNode(ASTNodeType.VALUE, value=self.current_token.lexeme)
                self.advance()
            else:
                raise ParserError("Expected number or string value", self.current_token)
            values.append(value)
            if self.current_token.token_type == TokenType.COMMA:
                self.advance()
        self.expect(TokenType.RIGHT_PAREN)
        return ASTNode(ASTNodeType.INSERT, value=table_name, children=[*columns, *values])
    
    def parse_select(self) -> ASTNode:
        """
        解析 SELECT 语句，支持列列表、FROM、可选 WHERE 子句。
        支持 SELECT * 语法。
        """
        self.expect(TokenType.SELECT)
        columns = []
        # 支持 SELECT * 或 SELECT col1, col2
        if self.current_token.token_type == TokenType.STAR:
            columns.append(ASTNode(ASTNodeType.STAR, value="*"))
            self.advance()
        else:
            while self.current_token.token_type != TokenType.FROM:
                if self.current_token.token_type == TokenType.IDENTIFIER:
                    col_ref = ASTNode(ASTNodeType.COLUMN_REF, value=self.current_token.lexeme)
                    columns.append(col_ref)
                    self.advance()
                elif self.current_token.token_type == TokenType.COMMA:
                    self.advance()
                else:
                    raise ParserError("Expected column name, comma, or *", self.current_token)
        self.expect(TokenType.FROM)
        table_name = self.expect(TokenType.IDENTIFIER).lexeme
        table_ref = ASTNode(ASTNodeType.TABLE_REF, value=table_name)
        # 解析WHERE子句（可选）
        where_clause = None
        if self.current_token and self.current_token.token_type == TokenType.WHERE:
            self.advance()
            where_clause = self.parse_expression()
        return ASTNode(ASTNodeType.SELECT, value=None, children=[
            table_ref,
            *columns,
            *([] if where_clause is None else [where_clause])
        ])
    
    def parse_delete(self) -> ASTNode:
        """
        解析 DELETE FROM 语句，支持可选 WHERE 子句。
        """
        self.expect(TokenType.DELETE)
        self.expect(TokenType.FROM)
        table_name = self.expect(TokenType.IDENTIFIER).lexeme
        table_ref = ASTNode(ASTNodeType.TABLE_REF, value=table_name)
        # 解析WHERE子句（可选）
        where_clause = None
        if self.current_token and self.current_token.token_type == TokenType.WHERE:
            self.advance()
            where_clause = self.parse_expression()
        return ASTNode(ASTNodeType.DELETE, value=None, children=[
            table_ref,
            *([] if where_clause is None else [where_clause])
        ])
    
    def parse_expression(self) -> ASTNode:
        """
        解析简单表达式（仅支持二元运算）。
        设计说明：便于后续扩展为表达式树。
        """
        left = self.parse_primary()
        if self.current_token and self.current_token.token_type in [
            TokenType.EQUALS, TokenType.GREATER_THAN, TokenType.LESS_THAN,
            TokenType.GREATER_EQUALS, TokenType.LESS_EQUALS, TokenType.NOT_EQUALS
        ]:
            op_token = self.current_token
            self.advance()
            right = self.parse_primary()
            return ASTNode(
                ASTNodeType.BINARY_OP,
                value=op_token.token_type,
                children=[left, right]
            )
        return left
    
    def parse_primary(self) -> ASTNode:
        """
        解析主表达式（列引用、常量）。
        """
        if self.current_token.token_type == TokenType.IDENTIFIER:
            node = ASTNode(ASTNodeType.COLUMN_REF, value=self.current_token.lexeme)
            self.advance()
            return node
        elif self.current_token.token_type == TokenType.NUMBER:
            node = ASTNode(ASTNodeType.VALUE, value=int(self.current_token.lexeme))
            self.advance()
            return node
        elif self.current_token.token_type == TokenType.STRING:
            node = ASTNode(ASTNodeType.VALUE, value=self.current_token.lexeme)
            self.advance()
            return node
        else:
            raise ParserError("Expected identifier, number or string", self.current_token)
    
    def advance(self):
        """
        移动到下一个 Token。
        """
        self.current_pos += 1
        if self.current_pos < len(self.tokens):
            self.current_token = self.tokens[self.current_pos]
        else:
            self.current_token = None

    def peek(self) -> Optional[Token]:
        """
        查看下一个 Token，不移动当前位置。
        """
        if self.current_pos + 1 < len(self.tokens):
            return self.tokens[self.current_pos + 1]
        return None
    
    def expect(self, expected_types):
        """
        检查当前 Token 是否为期望类型，若是则返回并前进，否则抛出异常。
        支持单类型或类型列表。
        """
        if isinstance(expected_types, TokenType):
            expected_types = [expected_types]
        if not self.current_token or self.current_token.token_type not in expected_types:
            expected_str = " or ".join([t.value for t in expected_types])
            raise ParserError(f"Expected {expected_str}, got {self.current_token.token_type if self.current_token else 'EOF'}", self.current_token)
        token = self.current_token
        self.advance()
        return token
