from ast import literal_eval
from tokenize import (
    NAME, STRING, NUMBER, NEWLINE, DEDENT, INDENT,
    ENDMARKER, TokenInfo
)
from typing import List

from .memo import memoize
from .parser import BaseParser

from .grammar import (
    Grammar,
    Rule,
    Meta,
    Alternative,
    Item,
    Action,
)


class GrammarParser(BaseParser):
    """
    classic LL Parser
    """

    def __now_tok(self) -> TokenInfo:
        return self._tokenizer.peek()

    def __next_tok(self):
        return self._tokenizer.next_token()

    def parse_grammar(self) -> Grammar:
        meta_list = []

        while self.__now_tok().string == '@':
            meta_list.append(self._parse_meta())

        rules = self._parse_rules()

        return Grammar(meta_list, rules)

    def _parse_meta(self) -> Meta:
        self.expect('@')
        if not (tok := self.expect(NAME)):
            self._syntax_error('rule meta except NAME')
        meta_name = tok.string

        if not (tok := (False
                        or self.expect(STRING)
                        or self.expect(NUMBER))):
            self._syntax_error('content of meta only support NUMBER or STRING')
        meta_content = literal_eval(tok.string)

        self.expect(NEWLINE)

        return Meta(meta_name, meta_content)

    def _parse_rules(self) -> List[Rule]:
        rules = []

        while self.__now_tok().type != ENDMARKER:
            rules.append(self._parse_rule())

        return rules

    def _parse_rule(self) -> Rule:
        if not (tok := self.expect(NAME)):
            self._syntax_error('require rule name')
        rule_name = tok.string

        if not self.expect(':'):
            self._syntax_error('except NAME')

        if not self.expect(NEWLINE):
            alts = self._parse_alts()
            self.expect(NEWLINE)
            self.expect(DEDENT)
            return Rule(rule_name, [alts])

        if not self.expect(INDENT):
            self._syntax_error('except indent')

        alts_list = self._parse_alts_list()
        self.expect(DEDENT)

        return Rule(rule_name, alts_list)

    def _parse_alts_list(self) -> List[List[Alternative]]:
        alts_list = []
        self.expect('|', must=True)

        while alts := self._parse_alts():
            alts_list.append(alts)
            self.expect(NEWLINE)

            if not self.expect('|'):
                break

        return alts_list

    def _parse_alts(self) -> List[Alternative]:
        alts = []
        while (alt := self._parse_alt()) is not None:
            alts.append(alt)
        return alts

    def _parse_alt(self) -> Alternative:
        if not (items := self._parse_items()):
            return None  # fail

        action = None
        if self.__now_tok().string == '{':
            action = self._parse_action()
        return Alternative(items, action)

    def _parse_items(self) -> List[Item]:
        items = []
        while (item := self._parse_item()) is not None:
            items.append(item)
        return items

    def _parse_item(self) -> Item:
        if (tok := self.__now_tok()).type == STRING:
            self.__next_tok()
            return Item(literal_eval(tok.string), tok.type, None)

        if not (may_alias := self.expect(NAME)):
            return None  # fail

        if self.__now_tok().string == '=':
            self.__next_tok()
            if not (value := (False
                    or self.expect(STRING)
                    or self.expect(NAME))):
                self._syntax_error('except NAME or STRING')
            return Item(value.string, NAME, may_alias.string)

        return Item(may_alias.string, NAME, None)

    def _parse_action(self) -> Action:
        self.__next_tok()
        return Action(self._parse_expr_string())

    def _parse_expr_string(self) -> str:
        result = ''
        while (tok := self.__now_tok()).string != '}':
            if tok.string == '{':
                self.__next_tok()
                result += f'{{{self._parse_expr_string()}}}'
            if tok.type == ENDMARKER:
                self._syntax_error('EOF while parsing action')
            result += tok.string + ' '
            self.__next_tok()
        self.__next_tok()
        return result


def main():
    from argparse import ArgumentParser
    from tokenize import generate_tokens
    from .tokenizer import Tokenizer
    from .formatter import _format_object

    a_parser = ArgumentParser()
    a_parser.add_argument(
        'grammar', nargs='?', help='grammar file (default = memodeath/meta.gram)',
        default='memodeath/meta.gram')

    args = a_parser.parse_args()
    gram_file = args.grammar

    with open(gram_file, encoding='UTF-8') as f:
        gram = GrammarParser(
            Tokenizer(generate_tokens(f.readline)),
            gram_file
        ).parse_grammar()

        print(_format_object(gram))


if __name__ == '__main__':
    main()
