
from io import StringIO
from typing import TextIO, Any

from .exceptions import PlaygroundError
from .generator import Generator
from .grammar import Grammar
from .gramparser import Dodoco
from .parser import BaseParser
from .tokenizer import AbstractTokenizer, Tokenizer as _BuiltinTokenizer


class _GenerationMiddle:
    def __init__(self, cls_name: str, run_func):
        self.__run_func = run_func
        self.__cls_name = cls_name

    def to_source(self) -> str:
        string_buffer = StringIO()
        self.__run_func(string_buffer)
        return string_buffer.getvalue()
    
    def write_to(self, writable: TextIO):
        self.__run_func(writable)

    def get_playground(self) -> '_Playground':
        source = (lambda _buf: 
                  self.__run_func(_buf) or _buf.getvalue())(StringIO())
        env = {}
        exec(compile(source, '<playground>', 'exec'), env)
        return _Playground(self.__cls_name, env)

    def __str__(self) -> str:
        return f'<GenerationMiddle of \'{self.__cls_name}\'>'
    
    __repr__ = __str__


class Generation:
    def __init__(self, class_name: str):
        self.class_name = class_name
        self.__gen = Generator()
    
    def generate_from_grammar(self, grammar: Grammar) -> _GenerationMiddle:
        return _GenerationMiddle(
            self.class_name,
            lambda writable: 
                self.__gen.generate(self.class_name, grammar, writable)
        )


class _Playground:
    def __init__(self, parser_cls_name: str, parser_source_env: dict):
        self.__env = parser_source_env
        self.__cls_name = parser_cls_name

    def get_parser_class(self) -> BaseParser:
        cls = self.__env.get(self.__cls_name)
        if cls is None:
            raise PlaygroundError('parser class not found')
        return cls
    
    def boot(self, input: str = ..., tokenizer: AbstractTokenizer = ...) -> Any:
        cls, rule_name = self.__env.get('__BOOTINFO__', (None, None))
        if cls is None:
            raise PlaygroundError('this grammar file is not bootable')
            
        if input is ...:
            input = ''
        
        if tokenizer is ...:
            from tokenize import generate_tokens
            tokenizer = _BuiltinTokenizer(generate_tokens(StringIO(input).readline))
        else:
            tokenizer.init_by_source(input)

        method = getattr(cls(tokenizer, '<playground input>'), f'rule_{rule_name}')
        if method is None:
            raise PlaygroundError('rule to boot is not found')
        
        return method()
        

def grammar_from_source(
        source: str, filename: str = '<source>', 
        tokenizer: AbstractTokenizer = ...) -> Grammar:
    if tokenizer is ...:
        from tokenize import generate_tokens
        tokenizer = _BuiltinTokenizer(generate_tokens(StringIO(source).readline))
    else:
        tokenizer.init_by_source()

    dodoco = Dodoco(tokenizer, filename)
    if (gram := dodoco.rule_grammar()) is None:
        dodoco.raise_syntax_error_from_last_token()
    return gram


__all__ = (
    'grammar_from_source', 
    'Generation', 
)
