#! /usr/bin/env python
#
# URL:      http://www.fiber-space.de
# Author:   Kay Schluehr <kay@fiber-space.de>
# Creation: 15 Oct 2009

from __future__ import with_statement

import os
import string
from langscape.util import get_traceback
from langscape.util.path import path
import langscape.base.loader as loader

own_path      = path(__file__).dirname()
parent_path   = own_path.dirname()
langlets_path = parent_path.joinpath("langlets")

class LangletGenerator(object):
    def __init__(self, kwds):
        self.name = kwds["langlet_name"].strip()
        self.options = kwds

    def update_options(self):
        '''
        Overrides empty options settings by parent options.
        '''
        if self.options["parent"]:
            pth, module = loader.find_langlet(self.options["parent"])
            if self.options["prompt"] == "> ":
                self.options["prompt"] = module.config.prompt
            if self.options["target"] == "":
                self.options["target"] = module.config.target_langlet
            if self.options["source_ext"] == "":
                self.options["source_ext"] = module.config.source_ext
            if self.options["compiled_ext"] == "":
                self.options["compiled_ext"] = module.config.compiled_ext
            if self.options["target_compiler"] == "":
                self.options["target_compiler"] = module.config.target_compiler
        if self.options["target"] == "":
            self.options["target"] = self.name

    def run(self):
        self.create_files()
        self.print_status()

    def create_new_langlet_counter(self, default = -1):
        '''
        Updates langscape/ls_id file by incrementing offset counter by 1.
        '''
        count = default
        fs_path  = parent_path.joinpath("ls_id")
        if count<0:
            with open(fs_path) as fs:
                for line in fs.readlines():
                    if line.startswith("langlet_cnt"):
                        count = int(line.split("langlet_cnt = ")[1])
        count+=1

        with open(fs_path,"w") as fs:
            print >> fs, "# -- defines langlet counter --"
            print >> fs, "# This was automatically generated by langscape.base.langlet_gen.py"
            print >> fs, "# Hack at your own risk."
            print >> fs, ""
            print >> fs, "langlet_cnt = %d"%count
        return count

    def create_langlet_data(self, pth_langlet_py):
        from langscape.base.grammar_gen import ParserGrammar, LexerGrammar

        LexerGrammar(self.name, pth_langlet_py, {"build_langlet": True}).load_grammar()
        ParserGrammar(self.name, pth_langlet_py, {"build_langlet": True}).load_grammar()

        module_path, langlet = loader.find_langlet(self.name)
        langlet_obj = langlet.Langlet()
        langlet_obj.package = loader.get_package(module_path)
        loader.load_nfas(langlet.Langlet(), True, True)


    def create_langlet_config_py(self, langlet_id, pth_conf):
        # create new conf.py file
        d_langlet_config = {
            "LANGLET_ID" : str(langlet_id),
            "COMPILED_EXT": '"'+self.options["compiled_ext"]+'"',
            "SOURCE_EXT": '"'+self.options["source_ext"]+'"',
            "PROMPT": '"'+self.options["prompt"]+'"' if self.options["prompt"] else '"> "',
            "LANGLET_NAME": '"'+self.name+'"',
            "PARENT_LANGLET": '"'+self.options["parent"]+'"',
            "TARGET_LANGLET": '"'+self.options["target"]+'"' if self.options["target"] else '"'+self.name+'"',
            "TARGET_COMPILER": '"'+self.options["target_compiler"]+'"' if self.options["target_compiler"] else '"default"'
        }
        with open(pth_conf) as f_conf:
            st = string.Template(f_conf.read())
            return st.substitute(**d_langlet_config)

    def update_lex_token(self, langlet_id, pth_lex_token_template):
        # update LANGLET_ID in lex_token.py
        with open(pth_lex_token_template) as f:
            st = string.Template(f.read())
            return st.substitute(LANGLET_ID = str(langlet_id))

    def create_files(self):
        self.update_options()
        if self.options["location"]:
            loc = path(self.options["location"])
            if loc.basename() != self.name:
                pth_langlet = loc.joinpath(self.name)
            else:
                pth_langlet = loc
        else:
            pth_langlet = langlets_path.joinpath(self.name)

        self.new_langlet_pth = pth_langlet
        if pth_langlet.exists():
            raise IOError("Langlet directory already exists: '%s'"%pth_langlet)
        try:
            pth_langlet_module = pth_langlet.joinpath("langlet.py")

            count  = self.create_new_langlet_counter()
            lnlt_id = count*(10**4)

            pth_template = parent_path.joinpath("langlet_template")
            pth_conf = pth_template.joinpath("langlet_config.strtempl")

            pth_langlet_py = pth_template.joinpath("langlet.strtempl")
            langlet_py = ""

            pth_template.copytree(pth_langlet)

            if self.options["parent"]:
                parent_langlet   = path(loader.find_langlet(self.options["parent"])[1].__file__).dirname()
                grammar_gen_path = parent_langlet.joinpath("parsedef", "GrammarGen.g")
                grammar_gen_path.copy(pth_langlet.joinpath("parsedef", "GrammarBase.g"))
                token_gen_path = parent_langlet.joinpath("lexdef", "TokenGen.g")
                token_gen_path.copy(pth_langlet.joinpath("lexdef", "TokenBase.g"))

            # operate on destination folder...
            pth_conf = pth_langlet.joinpath("langlet_config.py")
            pth_conf_template = pth_langlet.joinpath("langlet_config.strtempl")
            with open(pth_conf,"w") as f_conf:
                f_conf.write(self.create_langlet_config_py(lnlt_id, pth_conf_template))
            pth_langlet.joinpath("run.py").rename(pth_langlet.joinpath("run_"+self.name+".py"))
            pth_langlet.joinpath("langlet_config.strtempl").remove()

            # update LANGLET_ID in lex_token.py
            pth_lex_token = pth_langlet.joinpath("lexdef","lex_token.py")
            pth_lex_token_template = path(pth_lex_token.replace(".py",".strtempl"))
            with open(pth_lex_token, "w") as f_lex:
                f_lex.write(self.update_lex_token(lnlt_id, pth_lex_token_template))

            self.create_langlet_data(pth_conf)
            pth_lex_token_template.remove()
        except Exception, e:
            print get_traceback()
            self.new_langlet_pth.rmtree()
            exit(2)


    def print_status(self):
        print "New langlet '%s' created:"%self.name
        print
        f_1, f_2 = self.new_langlet_pth.splitall()[-3:-1]
        s = "    [%s]+-[%s]"%(f_1,f_2)
        print s
        white = " "*(s.find("+-")+3)
        print white+"+- [%s]"%self.name
        print white+"    +- __init__.py"
        print white+"    +- cstfunction.py"
        print white+"    +- langlet.py"
        print white+"    +- langlet_config.py"
        print white+"    +- postlexer.py"
        print white+"    +- run_%s.py"%self.name
        print white+"    +- transformer.py"
        print white+"    +- [cstdef]"
        print white+"        +- __init__.py"
        print white+"        +- lex_symbol.py"
        print white+"        +- lex_token.py"
        print white+"        +- lex_nfa.py"
        print white+"    +- [lexdef]"
        print white+"        +- __init__.py"
        print white+"        +- lex_symbol.py"
        print white+"        +- lex_token.py"
        print white+"        +- lex_nfa.py"
        print white+"        +- TokenGen.g"
        print white+"        +- TokenBase.g"
        print white+"        +- TokenExt.g"
        print white+"    +- [parsedef]"
        print white+"        +- __init__.py"
        print white+"        +- parse_symbol.py"
        print white+"        +- parse_token.py"
        print white+"        +- parse_nfa.py"
        print white+"        +- GrammarGen.g"
        print white+"        +- GrammarBase.g"
        print white+"        +- GrammarExt.g"
        print white+"    +- [reports]"
        print white+"    +- [tests]"


