
from abc import ABCMeta
from io import StringIO
from tokenize import TokenInfo, COMMENT, NL, ERRORTOKEN, generate_tokens
from typing import Generator, List

from .token import Token


class AbstractTokenizer(metaclass=ABCMeta):
    def pos(self) -> int:
        pass

    def set_pos(self, pos: int):
        pass

    def peek(self) -> Token:
        pass

    def next_token(self) -> Token:
        pass

    def init_by_source(self, source: str):
        pass

    @staticmethod
    def new_from_source(source: str) -> 'AbstractTokenizer':
        pass


class Tokenizer(AbstractTokenizer):
    def __init__(self, token_generator: Generator):
        self._token_generator = token_generator
        self._pos = 0
        self._tokens: List[TokenInfo] = []

    @staticmethod
    def new_from_source(source: str) -> 'Tokenizer':
        return Tokenizer(generate_tokens(StringIO(source)))

    def pos(self) -> int:
        return self._pos

    def set_pos(self, pos: int):
        self._pos = pos

    def peek(self) -> TokenInfo:
        while (tok := self._peek()).type in (COMMENT, NL) or \
                tok.type == ERRORTOKEN and tok.string.isspace():
            self._pos += 1
        return tok

    def _peek(self) -> TokenInfo:
        if len(self._tokens) == self._pos:
            self._tokens.append(next(self._token_generator))
        return self._tokens[self._pos]

    def next_token(self) -> TokenInfo:
        """
        :return: get current token and move cursor to next token.
        """
        tok = self.peek()
        self._pos += 1
        return tok

