"""
Frontend Factory


@author: Ruyman Reyes Castro <rreyes@ull.es>

"""


import sys
import os
import config

import copy


class GeneralError(Exception):
    def __init__(self, str):
        self.str = str
        
    def __str__(self):
        print ":: " + str(self.str)


# These module-scope dictionaries holds the cache for the different parser/lexer/prepro classes
parser_cache = {}
lexer_cache = {}
prepro_cache = {}



class LanguageNotDefined(Exception):
    pass


class FrontendFactory(object):
    """ Dynamic creation of lexer and parser. 
    """

    def getInstalledFrontends(self):
        installed = set()
        for elem in os.listdir(config.FRONTEND_DIR):
            if os.path.isdir(os.path.join(config.FRONTEND_DIR, elem)):
                if os.path.isfile(os.path.join(config.FRONTEND_DIR,str(elem) + '/_' + str(elem).lower() + '_ast.cfg')):
                    installed.add(elem)
        return installed
    def checkLanguageList(self, languages):
        installed = self.getInstalledFrontends()
        for elem in languages:
            if elem not in installed:
                raise GeneralError('Language ' + str(elem) + " not installed. Try one of : " + str(installed))
        return True
       
    def getLexerFromLanguageList(self, languages):
        if "-".join(languages) in lexer_cache:
            return lexer_cache["-".join(languages)]
        # Check that we have modules for listed languages
        self.checkLanguageList(languages)
        lexer_class_tuple = ()
        tmp = copy.copy(languages)
        tmp.reverse()
        tokens = []
        states = tuple()
        for name in tmp:
#            print " Adding " + str(name) + " to language classes" 
            myclass = __import__(config.FRONTEND_MODULE + '.' + str(name) + "." + str(name).lower() + '_lexer', globals(), locals(), [str(name),])
            lexer_class = getattr(myclass, name + 'Lexer')
#            print "Class Tokens " + str(lexer_class.get_tokens()) + " Type : " + str(type(lexer_class.get_tokens()))
            tokens += lexer_class.get_tokens()
            states += lexer_class.get_states()
#            print "Total Tokens " + str(tokens)
            lexer_class_tuple += (lexer_class,)

        def get_tokens(self,):
            return tokens
        
        def get_states(self,):
            return states

#        print " Tokens for : " + str(languages)
#        print " are :"  + str(get_tokens(1))
#        import pdb
#        pdb.set_trace()
        my_lexer = type("CurrentLexer", lexer_class_tuple, {})
        static_get_tokens = staticmethod(get_tokens).__get__(None, my_lexer)
        static_get_states = staticmethod(get_states).__get__(None, my_lexer)
        setattr(my_lexer, 'get_tokens', static_get_tokens)
        setattr(my_lexer, 'get_states', static_get_states)

        lexer_cache["-".join(languages)] = my_lexer
        return my_lexer
       
       
    def getParserFromLanguageList(self, languages,):
        if "-".join(languages) in parser_cache:
            return parser_cache["-".join(languages)]
        # Check that we have modules for listed languages
        self.checkLanguageList(languages)
        parser_class_tuple = ()
        tmp = copy.copy(languages)
        tmp.reverse()
        for name in tmp:
            myclass = __import__(config.FRONTEND_MODULE + '.' + str(name) + "." + str(name).lower() + '_parser', globals(), locals(), [str(name),])
            parser_class = getattr(myclass, name + 'Parser')
            parser_class_tuple += (parser_class,)
        # print parser_class_tuple
        # Create the virtual class
        my_parser = type("CurrentParser", parser_class_tuple, {})
        parser_cache["-".join(languages)] = my_parser
        return my_parser
    
    def getPreproFromLanguageList(self, languages, includes=[]):
# Cannot use prepro cache with include list
#        if len(languages) > 1:
#            raise NotImplemented
#        if "-".join(languages) in prepro_cache:
#            return prepro_cache["-".join(languages)]
        self.checkLanguageList(languages)
        name = languages[0]
        myclass = __import__(config.FRONTEND_MODULE + "." + str(name) + "." + str(name).lower() + '_prepro', globals(), locals(), [str(name),])
        if len(includes) != 0:
            myclass.INCLUDEPATH = includes
        prepro_class = getattr(myclass, name + 'Prepro')
        prepro_cache["-".join(languages)] = prepro_class
        return prepro_class
    
    def parse_with_language_list(self, source, source_name, languages, includes=[]):
        if languages is None or len(languages) == 0:
            raise LanguageNotDefined
        # TODO: Implement a Flightweight pattern here , reuse the parsers when languages are the same!
        prepro_class = self.getPreproFromLanguageList(['C99',], includes)
        lexer_class = self.getLexerFromLanguageList(languages)
        parser_class = self.getParserFromLanguageList(languages,)
        prepro = prepro_class()
        if config.MULTIPLE_FILE_PREPRO:
            ## TODO: New implementation in preprocess
            stripped_source = prepro.preprocess2(source)
        else:
            ## The following line is the old preprocessor
            stripped_source = prepro.preprocess(source)
        parser = parser_class(lex_optimize = config.LEX_OPTIMIZE, yacc_optimize = config.YACC_OPTIMIZE, lexer_class = lexer_class, yacc_debug=config.YACC_DEBUG)
        ast = parser.parse(stripped_source, source_name)
        return [ast, prepro_class, lexer_class, parser_class,]       

#from yacf.Frontend.InternalRepr import CompoundAnnotation

# TODO: IMPLEMENT THIS PRORPERLY
# TODO: Move this to Shortcuts
def partial_parse(source, source_name = "Partial", languages = ['C99',]):
        ff = FrontendFactory()
        c99_prepro_class = ff.getPreproFromLanguageList(languages)
        c99_lexer_class = ff.getLexerFromLanguageList(languages)
        c99_parser_class = ff.getParserFromLanguageList(languages)
        parser = c99_parser_class(lex_optimize = config.LEX_OPTIMIZE, yacc_optimize = config.YACC_OPTIMIZE, lexer_class = c99_lexer_class, yacc_debug=config.YACC_DEBUG)
        stripped_source = c99_prepro_class().preprocess("int main() { " + source + "; }")
        # print "Striped source:" + str(stripped_source)
        ast = parser.parse(stripped_source, source_name)
        # Snippets should be annotated with stmts and decls
        # CompoundAnnotation().visit(ast)
        
        return ast.ext[-1].body.stmts[-1]


