import string
from typing import *
from enum import Enum
from new_rmasm.exceptions import *
from new_rmasm import utils
from new_rmasm import keywords as kw


class LexTokenType(Enum):
    keyword = 0
    number = 1
    string = 2
    id = 3
    symbol = 4
    bracket = 5


class LexToken:
    def __init__(self, code: str, line_num: int, source: str, ttype: LexTokenType):
        self.source: str = source
        self.ttype: LexTokenType = ttype
        self.line_num: int = line_num
        self.code: str = code

    def get_value(self) -> Union[float, str, None]:
        """ 如果是字符串或数值类型，则获取 str 或 float
        """
        if self.ttype == LexTokenType.string:
            return self.code[1:-1]
        if self.ttype == LexTokenType.number:
            if utils.is_integer(self.code):
                return int(self.code)
            if utils.is_float(self.code):
                return float(self.code)
            if utils.is_hex(self.code):
                return int(self.code[2:], 16)
            if utils.is_binary(self.code):
                return int(self.code[:-1].replace('_', ''), 2)
        raise RLexerError(self.source, self.line_num, f"{self.ttype.name} 类型 Token 不支持 get_value()")

    def is_factory(self) -> bool:
        return self.ttype in [LexTokenType.string, LexTokenType.number, LexTokenType.id]

    def match(self, t: LexTokenType, c: str) -> bool:
        return self.ttype == t and self.code == c

    def __str__(self):
        return f'<LexToken {self.ttype.name} "{self.code}" ({self.source}:{self.line_num})>'


class Lexer:
    def __init__(self, code: str, source: str = ''):
        self.source = source
        self.code = code
        self.ind: int = 0
        self.line_num: int = 1
        self.stack: List[LexToken] = []

    def error(self, message: str) -> RLexerError:
        return RLexerError(self.source, self.line_num, message)

    def make_token(self, code: str) -> LexToken:
        ttype: LexTokenType
        if utils.is_string(code):
            ttype = LexTokenType.string
        elif utils.is_number(code):
            ttype = LexTokenType.number
        elif code in kw.keywords:
            ttype = LexTokenType.keyword
        elif code in kw.symbols:
            ttype = LexTokenType.symbol
        elif code in kw.brackets:
            ttype = LexTokenType.bracket
        elif utils.match_id(code):
            ttype = LexTokenType.id
        else:
            raise self.error(f"不支持的东西：{code}")

        return LexToken(code, self.line_num, self.source, ttype)

    def eat(self, chars: str) -> NoReturn:
        for c in chars:
            if self.current() == c:
                self.ind += 1
                if c == '\n':
                    self.line_num += 1
            else:
                raise self.error(f"没有匹配到目标字符串：{chars}")

    def peek(self, offset: int = 1) -> Union[None, str]:
        ind = self.ind + offset
        if 0 <= ind < len(self.code):
            return self.code[ind]
        return None

    def current(self) -> Union[None, str]:
        return self.peek(offset=0)

    def next(self) -> Union[LexToken, None]:
        buf: str = ''
        if self.stack:
            return self.stack.pop(0)
        while True:
            cur = self.current()
            if cur is None:
                if buf:
                    return self.make_token(buf)
                return None
            # 单行注释
            if cur == '#' or cur == '/' and self.peek() == '/':
                if buf:
                    self.stack.append(self.make_token(buf))
                    buf = ''
                while True:
                    cur = self.current()
                    if cur is None:
                        break
                    self.eat(cur)
                    if cur == '\n':
                        break
                if self.stack:
                    break
                continue
            # 多行注释
            if cur == '/' and self.peek() == '*':
                if buf:
                    self.stack.append(self.make_token(buf))
                    buf = ''
                while True:
                    cur = self.current()
                    if cur is None:
                        break
                    if cur == '*' and self.peek() == '/':
                        self.eat(cur + self.peek())
                        break
                    self.eat(cur)
                if self.stack:
                    break
                continue
            # 字符串
            if cur in '"':
                if buf:
                    raise self.error(f"字符串前面不应该出现 {buf}。")
                start = cur
                buf += cur
                self.eat(cur)
                while True:
                    cur = self.current()
                    if cur == '\\':
                        buf += cur
                        buf += self.peek()
                        self.eat(buf[-2:])
                        continue
                    if cur == '\n':
                        raise self.error("字符串中不能换行！使用 \\n 代表回车。")
                    if cur is None:
                        raise self.error("字符串没有结束。")
                    buf += cur
                    self.eat(cur)
                    if cur == start:
                        break
                self.stack.append(self.make_token(buf))
                break
            # 空白符
            if cur in ' \t\n':
                if buf:
                    self.stack.append(self.make_token(buf))
                self.eat(cur)
                if self.stack:
                    break
                continue
            # . 可能是小数点，也可能是单独的符号
            if cur == '.':
                if utils.is_integer(buf) or not buf:
                    buf += '.'
                    self.eat(cur)
                    continue
                if utils.is_float(buf):
                    raise self.error(f"数字 {buf} 不能再出现多余的小数点。")
                self.stack.append(self.make_token(buf))
                self.stack.append(self.make_token(cur))
                self.eat(cur)
                break
            # 括号们
            if cur in kw.brackets:
                if buf:
                    self.stack.append(self.make_token(buf))
                self.stack.append(self.make_token(cur))
                self.eat(cur)
                break
            # 如果进入了操作符号
            if kw.is_operator_start(cur):
                if buf:
                    self.stack.append(self.make_token(buf))
                    buf = ''
                buf += cur
                self.eat(cur)
                while kw.operator_has_next(buf):
                    if not kw.is_operator_start(buf + self.current()):
                        break
                    buf += self.current()
                    self.eat(self.current())
                if buf:
                    if buf == '-' and self.current() in string.digits:
                        continue
                    self.stack.append(self.make_token(buf))
                else:
                    raise self.error("没能成功构建符号。" + f"卡在了 {buf}" if buf else "")
                break

            buf += cur
            self.eat(cur)

        return self.stack.pop(0)

    def __iter__(self) -> 'Lexer':
        return self

    def __next__(self) -> LexToken:
        n = self.next()
        if n:
            return n
        else:
            raise StopIteration()


if __name__ == '__main__':
    lex = Lexer(r'''
    a += -2
    ''')
    for i in lex:
        print(i.code.ljust(20) + i.ttype.name + '\t\t' + str(i.line_num))
