from new_rmasm.asts import *


class AstParser:
    def __init__(self, lexer: Lexer):
        self.lexer = lexer
        self.stack: List[LexToken] = []
        for i in lexer:
            self.stack.append(i)
        self.ind = 0

    def error(self, token: LexToken, message: str) -> RGrammarError:
        return RGrammarError(token.source, token.line_num, message)

    def peek(self, offset: int = 1) -> Union[None, LexToken]:
        ind = self.ind + offset
        if 0 <= ind < len(self.stack):
            return self.stack[ind]
        return None

    def current(self) -> Union[None, LexToken]:
        return self.peek(0)

    def eatt(self, token: LexToken):
        self.eat(token.ttype, token.code)

    def eat(self, ttype: LexTokenType, code: str = None):
        cur = self.current()
        if ttype == cur.ttype:
            if code:
                if code == cur.code:
                    self.ind += 1
                    return
            else:
                self.ind += 1
                return
        raise self.error(self.current(), f"无法匹配到目标：<{ttype.name}> 类型的 <{code}>")

    def id_ast(self) -> IdAST:
        cur = self.current()
        if cur.ttype != LexTokenType.id:
            raise self.error(cur, f"<{cur.code}> 不是有效的名称。")
        res = IdAST(cur)
        root = res
        self.eatt(cur)
        while self.current().match(LexTokenType.symbol, '.'):
            self.eatt(self.current())
            subordinate = self.id_ast()
            subordinate.set_parent(res)
            res = subordinate
        return root

    def name_id_ast(self) -> IdAST:
        cur = self.current()
        if cur.ttype != LexTokenType.id:
            raise self.error(cur, f"<{cur.code}> 不是有效的名称。")
        res = IdAST(cur)
        self.eatt(cur)
        return res

    def raw_factor_ast(self) -> RawFactorAST:
        cur = self.current()
        if not cur.match(LexTokenType.bracket, '['):
            raise self.error(f"不是 raw factor 的开始符号：{cur.code}")
        self.eatt(cur)
        res = []
        while True:
            cur = self.current()
            if cur.match(LexTokenType.bracket, ']'):
                self.eatt(cur)
                break
            res.append(cur)
            self.eatt(cur)
        return RawFactorAST(cur, res)

    def factor_ast(self) -> Union[FactorAST, FuncAST]:
        cur = self.current()
        res: FactorAST

        if cur.ttype == LexTokenType.string:
            self.eatt(cur)
            return StringAST(cur)
        elif cur.ttype == LexTokenType.number:
            self.eatt(cur)
            return NumberAST(cur)
        elif cur.ttype == LexTokenType.id:
            id_ast = self.id_ast()
            if self.current().match(LexTokenType.bracket, '('):
                args = self.args_ast()
                return FuncAST(id_ast.token, id_ast, args)
            return id_ast
        elif cur.match(LexTokenType.bracket, '['):
            return self.raw_factor_ast()
        else:
            raise self.error(cur, f"在 <{cur.code}> 附近匹配不到 数字、字符串 或 单词。")

    def expr_ast(self) -> Union[BinaryAST, UnaryAST, FactorAST]:
        if kw.is_operator(self.current().code):
            cur = self.current()
            if cur.code == '-':
                raise self.error(cur, "负号不能作为运算符！它只可以做减号。")
            self.eatt(cur)
            res: UnaryAST = UnaryAST(cur, self.factor_ast())
        else:
            res: FactorAST = self.factor_ast()
        if kw.is_operator(self.current().code) or self.current().code in kw.set_operators:
            cur = self.current()
            self.eatt(cur)
            res: BinaryAST = BinaryAST(cur, res, self.expr_ast())
        return res

    def tag_ast(self) -> TagAST:
        name = self.name_id_ast()
        cur = self.current()
        if cur.match(LexTokenType.symbol, ':'):
            self.eatt(cur)
            return TagAST(cur, name.str())
        if cur.match(LexTokenType.symbol, '?:'):
            self.eatt(cur)
            return TagAST(cur, name.str())
        raise self.error(cur, f'错误的标签结束符号：{cur.code}')

    def args_ast(self) -> List[AST]:
        cur = self.current()
        if not (cur.ttype == LexTokenType.bracket and cur.code == '('):
            raise self.error(cur, f"错误的参数开始符号：<{cur.code}>")
        self.eatt(cur)

        res: List[AST] = []

        while True:
            if self.current().match(LexTokenType.bracket, ')'):
                self.eatt(self.current())
                return res
            if self.current().match(LexTokenType.bracket, '{'):
                res.append(self.segment_ast())
            elif self.current().match(LexTokenType.bracket, '('):
                tok = self.current()
                args = self.args_ast()
                seg = self.segment_ast()
                res.append(DefineMacroAST(tok, '', args, seg))
            else:
                res.append(self.expr_ast())
            if self.current().match(LexTokenType.symbol, ','):
                self.eatt(self.current())
                continue

        raise self.error(self.current(), "参数列表未顺利结束。")

    def define_macro_ast(self) -> DefineMacroAST:
        token = self.current()
        self.eat(LexTokenType.keyword, 'define')
        name = self.name_id_ast()
        if not isinstance(name, IdAST):
            raise self.error(name.token, f"宏的名字不应该出现 <{name.token.code}>。")
        if self.current().match(LexTokenType.bracket, '('):
            args = self.args_ast()
        else:
            args = []
        body = self.segment_ast()
        return DefineMacroAST(token, name.str(), args, body)

    def define_var_list_ast(self) -> List[Union[IdAST, BinaryAST]]:
        res: List[Union[IdAST, BinaryAST]] = []
        while True:
            name = self.name_id_ast()
            if self.current().match(LexTokenType.symbol, '='):
                op = self.current()
                self.eatt(op)
                value = self.expr_ast()
                res.append(BinaryAST(op, name, value))
            else:
                res.append(name)
            if not self.current().match(LexTokenType.symbol, ','):
                break
            self.eat(LexTokenType.symbol, ',')
        return res

    def define_temp_var_ast(self) -> DefineTempVarAST:
        cur = self.current()
        self.eat(LexTokenType.keyword, 'let')
        ls = self.define_var_list_ast()
        return DefineTempVarAST(cur, ls)

    def while_ast(self) -> WhileAST:
        cur = self.current()
        self.eat(LexTokenType.keyword, 'while')
        condition = self.expr_ast()
        body = self.segment_ast()
        return WhileAST(cur, condition, body)

    def for_ast(self) -> ForAST:
        cur = self.current()
        self.eat(LexTokenType.keyword, 'for')
        init = self.primary_ast()
        self.eat(LexTokenType.symbol, ';')
        condition = self.expr_ast()
        self.eat(LexTokenType.symbol, ';')
        end = self.primary_ast()
        body = self.segment_ast()
        return ForAST(cur, init, condition, end, body)

    def jump_ast(self) -> JumpAST:
        cur = self.current()
        self.eat(LexTokenType.keyword, 'jump')
        direction: str = ''
        if self.current().ttype == LexTokenType.symbol:
            if self.current().code in ['+', '-']:
                direction = self.current().code
                self.eatt(self.current())
            else:
                raise self.error(self.current(),
                                 f"{self.current().code} 是错误的跳跃方向符号。")

        target = self.name_id_ast()
        if self.current().match(LexTokenType.keyword, 'if'):
            self.eatt(self.current())
            condition = self.expr_ast()
        else:
            condition = None
        return JumpAST(cur, direction, target.str(), condition)

    def end_ast(self) -> EndAST:
        cur = self.current()
        if not cur.match(LexTokenType.keyword, 'end'):
            raise self.error(cur, "end 语句只写一个 end 就好了。")
        self.eatt(cur)
        return EndAST(cur)

    def if_ast(self) -> IfAST:
        token = self.current()
        self.eat(LexTokenType.keyword, 'if')
        condition = self.expr_ast()
        then_body = self.segment_ast()
        if self.current().match(LexTokenType.keyword, 'else'):
            self.eatt(self.current())
            if self.current().match(LexTokenType.keyword, 'if'):
                else_body = self.if_ast()
            elif self.current().match(LexTokenType.bracket, '{'):
                else_body = self.segment_ast()
            else:
                raise self.error(self.current(), "else 语句格式错误。")
        else:
            else_body = None
        return IfAST(token, condition, then_body, else_body)

    def import_ast(self) -> ImportAST:
        cur = self.current()
        self.eat(LexTokenType.keyword, 'import')
        factor = self.factor_ast()
        if not isinstance(factor, StringAST):
            raise self.error(factor.token, "import 语句需要用字符串指定路径。")
        return ImportAST(cur, factor)

    def primary_ast(self) -> AST:
        cur = self.current()
        if cur.ttype == LexTokenType.keyword:
            if cur.code == 'if':
                return self.if_ast()
            if cur.code == 'let':
                return self.define_temp_var_ast()
            if cur.code == 'jump':
                return self.jump_ast()
            if cur.code == 'while':
                return self.while_ast()
            if cur.code == 'for':
                return self.for_ast()
            if cur.code == 'define':
                return self.define_macro_ast()
            if cur.code == 'import':
                return self.import_ast()
            if cur.code == 'end':
                return self.end_ast()
            raise self.error(cur, f"未能实现的关键字：<{cur.code}>")
        if cur.match(LexTokenType.bracket, '{'):
            return self.segment_ast()
        peek = self.peek()
        if peek.match(LexTokenType.symbol, ':') or peek.match(LexTokenType.symbol, '?:'):
            return self.tag_ast()

        return self.expr_ast()

    def segment_ast(self) -> SegmentAST:
        cur = self.current()
        if not (cur.ttype == LexTokenType.bracket and cur.code == '{'):
            raise self.error(cur, f"这是干什么的符号？<{cur.code}>")
        self.eatt(cur)

        ast = SegmentAST(cur)
        while True:
            cur = self.current()
            # peek = self.peek()
            if cur.ttype == LexTokenType.bracket and cur.code == '}':
                self.eatt(cur)
                return ast
            ast.asts.append(self.primary_ast())
