from contextlib import contextmanager
from token import STRING, NUMBER, NAME
from typing import TextIO, TypeVar, Tuple, Dict, Set

from .exceptions import GeneratorException
from .grammar import *
from .sccutils import strongly_connected_components

_TEST_FAILED = False

_BASIC_HANDER = '''
from memodeath.memo import memoize, memoize_for_left_recursion
from memodeath.parser import BaseParser
from memodeath.objects import NONE
'''

_generate_test_main = lambda cls_name, test_method, print_result=True: f'''
def main():
    from argparse import ArgumentParser
    from io import StringIO
    from tokenize import generate_tokens

    from memodeath.formatter import format_object
    from memodeath.tokenizer import Tokenizer

    a_parser = ArgumentParser()
    a_parser.add_argument(
        'file', nargs='?', help='grammar file (default = memodeath/meta.gram)',
        default='memodeath/meta.gram')

    args = a_parser.parse_args()
    file = args.file

    with open(file, encoding='UTF-8') as f:
        gram = {cls_name}(
            Tokenizer(generate_tokens(f.readline)),
            file
        ).{test_method}()

        {'print(format_object(gram))' if print_result else ''}


if __name__ == '__main__':
    main()
'''

T = TypeVar('T')


def _big_union(target: List[List[T]]) -> List[T]:
    result = []
    for x in target:
        result += x
    return result


def _set_big_union(target: Set[Set[T]]) -> Set[T]:
    result = set()
    for x in target:
        result |= x
    return result


def _find_item_may_first(items: List[Item]) -> List:
    results = []

    for item in items:
        val = item.value
        if type(val) in (Maybe, Lookahead, Forced, Loop):
            results.append(val.basic)
            continue

        if isinstance(val, Gather):
            return results.append(val.basic) or results

        if isinstance(val, Basic):
            return results.append(val) or results

    return results


def _prepare_scc(rules: List[Rule]):
    edges: Dict[str, Set[str]] = {}

    for rule in rules:
        alts = _big_union(rule.alts)

        edges[rule.name] = set()

        for alt in alts:
            _firsts = _find_item_may_first(alt.items)
            for first_val in _firsts:
                if first_val.type != STRING and \
                        (val := first_val.value).islower():
                    edges[rule.name].add(val)

    # __import__('pprint').pprint(edges)

    return set(edges), edges


def _is_left_recursion(rule: Rule):
    # TODO: finish it with a real left recursion checking
    for alts in rule.alts:
        for alt in alts:
            first = alt.items[0].value
            if isinstance(first, Basic):
                if first.value == rule.name:
                    return True
            elif isinstance(first, str):
                if first == rule.name:
                    return True
    return False


def _check_cut(items: List[Item]) -> bool:
    return any((isinstance(x.value, Cut) for x in items))


class Generator:
    INDENT = '    '

    def __init__(self, ):
        self.file: TextIO = None
        self.grammar: Grammar = None
        self._left_recursive_rule_names: Set[str] = None
        self.__indent_string = ''
        self._parser_cls_name = ''

    @contextmanager
    def __indent(self):
        old_indent = self.__indent_string
        try:
            self.__indent_string += self.INDENT
            yield
        finally:
            self.__indent_string = old_indent

    def __fprint(self, msg: str, file=..., end='\n'):
        if file is ...:
            file = self.file
        file.write(f'{self.__indent_string}{msg}{end}')

    def prepare_left_recursive_names(self, grammar):
        try:
            cycles = strongly_connected_components(*_prepare_scc(grammar.rules))
            return tuple(_set_big_union(cycles))
        except KeyError as e:
            raise GeneratorException(
                'failed to process the SCC graph, maybe the rule \'%s\' is undefined' %
                e.args)

    def _generate_rules(self, rules: List[Rule]):
        for rule in rules:
            self._generate_rule(rule)
            self.__fprint('')

    def _generate_rule(self, rule: Rule):
        if rule.name in self._left_recursive_rule_names:
            self.__fprint('@memoize_for_left_recursion')
        else:
            self.__fprint('@memoize')

        self.__fprint(f'def rule_{rule.name}(self):')
        with self.__indent():
            self.__fprint('_pos = self._pos()')

            all_alts = _big_union(rule.alts)

            if any((_check_cut(x.items) for x in all_alts)):
                self.__fprint('_cut_ = False')

            for alts in all_alts:
                self.__fprint('if (True')

                with self.__indent():
                    for i, item in enumerate(alts.items):
                        _expr, _alias = self._generate_item(item, i)
                        self.__fprint('and %s' % _expr)

                self.__fprint('):')

                with self.__indent():
                    if alts.action is None:
                        self.__fprint(f'return {_alias if _alias else "True"}')
                    else:
                        self.__fprint(f'_result = {alts.action.string}')
                        self.__fprint('if _result is not None:')
                        self.__fprint(f'{self.INDENT}return _result')

                self.__fprint('')
                self.__fprint('self._reset(_pos)\n')

                if _check_cut(alts.items):
                    self.__fprint('if _cut_: return None')

            if _TEST_FAILED and rule.name == 'rule':
                self.__fprint(
                    f'assert 0, \'{rule.name} failed at \\\'%s\\\'\' % self.peek().string')
            self.__fprint('return None')

    def _generate_item(
            self, item: Item, index: int, for_director=False) -> Tuple[str, str]:
        if isinstance(item, Item0):
            return self._generate_item(
                Item(Basic(item.value, item.item_type), item.alias),
                index
            )

        val, alias = item.value, item.alias

        expr_str = ''
        var = ''

        if not for_director and item.director is not None:
            _expr_str, _var = self._generate_item(item, index, True)
            return f'({"%s := " % _var if _var else ""}' + \
                   'self._director(%s, lambda RESULT, OK: %s)) is not None' % \
                   (_expr_str, item.director.string), _var

        if isinstance(val, Basic):
            expr_str, var = self._generate_basic(val, index)
        elif isinstance(val, Maybe):
            expr_str, var = self._generate_maybe(val, index)
        elif isinstance(val, Loop):
            _func, _args = self.__get_basic_call_info(val.basic)
            expr_str, var = f'self.loop({_func}, ({_args}), {val.min_len})', 'loop'
        elif isinstance(val, Lookahead):
            _func, _args = self.__get_basic_call_info(val.basic)
            expr_str, var = f'self.lookahead({_func}, ({_args}), {val.negative})', \
                            'loop'
        elif isinstance(val, Gather):
            expr_str, var = self._generate_gather(val)
        elif isinstance(val, Forced):
            expr_str, var = self._generate_forced(val, index)
        elif isinstance(val, Cut):
            expr_str = '(_cut_ := True)'

        if alias:
            var = alias

        if for_director:
            return expr_str, var

        return f'({"%s := " % var if var else ""}{expr_str}) is not None', var

    def _generate_forced(self, force: Forced, index: int) -> Tuple[str, str]:
        basic_expr, var = self._generate_basic(force.basic, index)
        basic_str = force.basic.value
        expr = f'self.forced({basic_expr}, {repr(basic_str)})'
        return expr, var

    def _generate_gather(self, gather: Gather):
        _sep_func, _sep_args = self.__get_basic_call_info(gather.separator)
        _bas_func, _bas_args = self.__get_basic_call_info(gather.basic)

        return f'self.gather(({_sep_func}, ({_sep_args})), ({_bas_func}, ({_bas_args})))', \
               'gather'

    def _generate_maybe(self, maybe: Maybe, index: int) -> Tuple[str, str]:
        basic = maybe.basic
        expr_str, alias = self._generate_basic(basic, index)

        return '(%s or NONE)' % expr_str, alias

    def __get_basic_call_info(self, basic: Basic) -> Tuple[str, str]:
        if basic.type == STRING:
            func = 'self.expect'
            args = f'{repr(basic.value)},'
        else:
            assert basic.type == NAME, \
                'invalid item type: %s' % basic.type
            if basic.value.isupper():
                func = 'self.expect'
                args = f'{basic.value},'
            else:
                func = f' self.rule_{basic.value}'
                args = ''
        return func, args

    def _generate_basic(self, basic: Basic, index: int) -> Tuple[str, str]:
        func, args = self.__get_basic_call_info(basic)
        expr_str = f'{func}({args})'
        check_none = False
        alias = f'_unnamed_{index}'

        if basic.type == NAME:
            if not basic.value.isupper():
                check_none = True
            alias = basic.value.lower()

        return expr_str, alias

    def _generate_metas_head(self, metas: List[Meta]):
        for meta in metas:
            if meta.meta_name == 'subheader':
                self.file.write(meta.meta_content)
            elif meta.meta_name == 'parserclass':
                self._parser_cls_name = meta.meta_content

    def _generate_metas_tail(self, metas: List[Meta]):
        for meta in metas:
            if meta.meta_name == 'subtail':
                self.file.write(meta.meta_content)
            if meta.meta_name == 'boot':
                self.__fprint(
                    f'__BOOTINFO__ = {self._parser_cls_name}, \'{meta.meta_content}\'')

    def generate(
            self, cls_name: str, grammar: Grammar, file: TextIO, test_main=False):
        self.file = file
        self.grammar = grammar

        self._left_recursive_rule_names = self.prepare_left_recursive_names(grammar)

        self._parser_cls_name = cls_name

        self.__fprint('\n# This parser is generated by memodeath!!')
        self.__fprint(_BASIC_HANDER)

        self._generate_metas_head(grammar.metas)

        self.__fprint(f'\n# parser class: {self._parser_cls_name}\n')

        with self.__indent():
            self.__fprint(f'\nclass {self._parser_cls_name}(BaseParser):')
            self._generate_rules(grammar.rules)

        self._generate_metas_tail(grammar.metas)

        if test_main:
            self.__fprint(
                _generate_test_main(self._parser_cls_name, 'rule_grammar', True)
            )


def main():
    from argparse import ArgumentParser
    from io import StringIO
    from tokenize import generate_tokens

    from .tokenizer import Tokenizer

    a_parser = ArgumentParser()
    a_parser.add_argument(
        'grammar', nargs='?', help='grammar file (default = memodeath/meta.gram)',
        default='memodeath/meta.gram')
    a_parser.add_argument(
        '-x', action='store_true', help='use backup Grammar parser')
    a_parser.add_argument(
        '-nt', action='store_true', help='do not generate test main')
    a_parser.add_argument(
        '-np', action='store_true', help='(debug only) do not print source code')

    args = a_parser.parse_args()

    gram_file = args.grammar

    with open(gram_file, encoding='UTF-8') as f:
        if args.x:
            from .gramparser0 import GrammarParser
            gram = GrammarParser(
                Tokenizer(generate_tokens(f.readline)),
                gram_file
            ).parse_grammar()
        else:
            from .gramparser import Dodoco
            gram = Dodoco(
                Tokenizer(generate_tokens(f.readline)),
                gram_file
            ).rule_grammar()

        string = StringIO()
        gen = Generator()
        gen.generate('Dodoco', gram, string, test_main=not args.nt)

        if not args.np:
            print(string.getvalue())


if __name__ == '__main__':
    main()
