
from token import tok_name
from typing import Union, List

from .actions import (
    DirectorAction, Success, Fail, Original,
    SyntaxErr,
)
from .tokenizer import AbstractTokenizer, Token


TOK_DEBUG = True


class BaseParser:
    def __init__(self, tokenizer: AbstractTokenizer, filename: str):
        self._memo = {}
        self._tokenizer = tokenizer

        self.filename = filename

    def __now_tok(self):
        return self._tokenizer.peek()

    def _pos(self):
        return self._tokenizer.pos()

    def _reset(self, pos: int):
        self._tokenizer.set_pos(pos)

    def expect(self, tok: Union[int, str], must=False) -> Union[Token, None]:
        if isinstance(tok, int):
            if (now_tok := self.__now_tok()).type == tok:
                self._tokenizer.next_token()
                return now_tok
            if must:
                self._syntax_error(f'except {tok} but got {tok_name[now_tok.type]}')
            return None
        if (now_tok := self.__now_tok()).string == tok:
            self._tokenizer.next_token()
            return now_tok
        return None

    def lookahead(self, func, args, negative: bool) -> bool:
        pos = self._pos()
        res = func(*args)
        self._reset(pos)
        return True if (res is not None) != negative else None

    def loop(self, func, args, min_len: int) -> List:
        pos = self._pos()
        results = []
        while (result := func(*args)) is not None:
            results.append(result)
        
        if len(results) >= min_len:
            return results

        self._reset(pos)
        return None
    
    def peek(self) -> Token:
        return self._tokenizer.peek()

    def gather(self, sep_func_info, node_func_info):
        sep_func, sep_args = sep_func_info
        node_func, node_args = node_func_info

        if (n := node_func(*node_args)) is None:
            return None

        res = [n]

        while sep_func(*sep_args) is not None:
            if (n := node_func(*node_args)) is None:
                return res
            res.append(n)
        return res

    def forced(self, result, expected: str):
        if result is None:
            self.raise_syntax_error_from_last_token(
                'expect %s' % expected
            )
        return result

    def _director(self, result, func):
        action = func(result, result is not None)

        if not isinstance(action, DirectorAction):
            return result

        if isinstance(action, Success):
            if action.value is not ...:
                return action.value
            return result

        if isinstance(action, Fail):
            return action.value

        if isinstance(action, Original):
            return result

        if isinstance(action, SyntaxErr):
            return self.raise_syntax_error_from_last_token(
                action.msg
            )

    def _syntax_error(self, msg: str):
        err = SyntaxError()
        err.msg = msg + (
            f'  (cur token = {self.__now_tok()})' if TOK_DEBUG else '')
        err.filename = self.filename
        err.lineno = self.__now_tok().line

        raise err

    def raise_syntax_error_from_last_token(self, msg=None):
        if len(self._tokenizer._tokens) == 0:
            info = self._tokenizer.peek()
        else:
            info = self._tokenizer._tokens[-1]
        err = SyntaxError('invalid syntax' if msg is None else str(msg))
        err.lineno, err.offset = info.start
        err.end_lineno, err.end_office = info.end
        err.text = info.line
        err.filename = self.filename
        raise err

