#!/usr/bin/env python3
# -*- coding:utf-8 -*-

import os
import sys

try:
    from .parser_common import *
    from .gvc_lexer import GVCLexer
except:
    from parser_common import *
    from gvc_lexer import GVCLexer

from pycparser.plyparser import template

from ctools.common.treenode import TNode

from ctools.ly_parser.func_parser import GVCFuncParser
try:
    from ctools.ly_parser.type_parser import GVCTypeParser
except:
    from ctools.ly_parser.parser_common import  GVCParserBase
    class GVCTypeParser(GVCParserBase):
        def __init__(self, *args, **kwargs):
            self._typename = ''
            self.coord = Coord('unkown', 0, 0)

        def start_parse(self, typename, *args, **kwargs):
            self._typename = typename

        def parse_text(self, *args, **kwargs):
            return UnkownNode(self._typename)

import logging
from ctools.logext import *

_fpath = os.path.dirname(os.path.realpath(__file__))
_rlog, _plog, _mlog, _tlog = get_loggers(__file__, __name__, propagate=False)

@template
class GVCParser(GVCParserBase):
    @staticmethod
    def get_dir_style(op):
        if op == '~':
            return 'none', 'invis'

        d_dict = {
            '+': 'both',
            '-': 'none',
            '>': 'forward',
            '<': 'back',
        }
        s_dict = {
            1: 'solid',
            2: 'dashed',
            3: 'dotted',
            4: 'bold',
            5: 'tapered',
        }

        return d_dict[op[0]], s_dict[len(op)]

    def __init__(self, lexer=GVCLexer) -> None:
        super().__init__(lexer=lexer,
                        yacctab='ctools.ly_parser.gvc_yacctab',
                        debugfile='gvc_yacc.out')

        self.root = TNode('None')

    def start_parse(self, name):
        self.root = TNode(name)

    # Wrapper around a translation unit, to allow for empty input.
    # Not strictly part of the C99 Grammar, but useful in practice.
    def p_translation_unit_or_empty(self, p):
        """ translation_unit_or_empty   : translation_unit
                                        | empty
        """

        def __add_node(root, nds):
            for nd in nds:
                if isinstance(nd, Node):
                    root.add_child(nd)
                elif isinstance(nd, TNode):
                    root.add_child(nd)
                elif isinstance(nd, (list, tuple)):
                    __add_node(root, nd)

        if isinstance(p[1], (list, tuple)):
            # Note: translation_unit is already a list
            __add_node(self.root, p[1])

        # the last value
        p[0] = self.root
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_translation_unit_1(self, p):
        """ translation_unit    : external_declaration
        """
        # Note: external_declaration is already a list
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_translation_unit_2(self, p):
        """ translation_unit    : translation_unit external_declaration
        """
        p[1].extend(p[2])
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')


    def p_external_declaration_2(self, p):
        """ external_declaration : initializer_decl
                                | typedef_specifier_decl
                                | struct_or_union_decl
                                | keep_decl
                                | ctree_decl
                                | function_decl
        """
        if isinstance(p[1], list):
            p[0] = p[1]
        else:
            p[0] = [p[1]]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    # params
    def p_initializer_array_decl(self, p):
        """ initializer_decl : initializer_type ID LBRACKET RBRACKET EQUALS initializer_value SEMI
                            | initializer_ptype ID LBRACKET RBRACKET EQUALS initializer_value SEMI
        """
        name = p[2]
        val = p[6]
        node = ParamNode(name, val, self._token_coord(p, 2))
        p[0] = node
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_initializer_decl(self, p):
        """ initializer_decl : initializer_type ID EQUALS initializer_value SEMI
                            | initializer_ptype ID EQUALS initializer_value SEMI
        """
        name = p[2]
        val = p[4]
        node = ParamNode(name, val, self._token_coord(p, 2))
        p[0] = node
        for i in range(1, len(p)):
            _mlog.debug(f'{self._token_coord(p, i)} {p[i]}')


    def p_initializer_ptype(self, p):
        """ initializer_ptype : CONST CHAR TIMES
                            | CHAR TIMES
        """
        p[0] = p[1]

    def p_initializer_type(self, p):
        """ initializer_type : VOID
                            | CHAR
                            | SHORT
                            | INT
                            | LONG
                            | FLOAT
                            | DOUBLE
                            | SIGNED
                            | UNSIGNED
                            | __INT128
        """
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_initializer_1(self, p):
        """ initializer_value : primary_expression
                            | CBLOCK
        """
        # a = xx;
        # a = {xxxxx, xxx};
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_typedef_specifier_decl(self, p):
        """ typedef_specifier_decl : typedef_specifier SEMI
        """
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_typedef_specifier_1(self, p):
        """ typedef_specifier : TYPEDEF struct_or_union_specifier ID
                        | TYPEDEF struct_or_union ID ID
        """
        token_idx = 1
        alias = p[-1]
        if len(p) == 4:
            alias = p[3]
            token_idx = 3
            target = p[2].name
        else:
            alias = p[4]
            token_idx = 4
            target = "{} {}".format(p[2], p[3])
        node = AliasNode(alias, target, coord=self._token_coord(p, token_idx))
        p[0] = node
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def _process_typenode(self, _type, name:str, body, coord:Coord=None):
        typename = ' '.join((_type, name))
        typename = typename.strip()
        lineno = 1
        type_parser = GVCTypeParser()

        if coord:
            lineno = coord.line
            _mlog.debug(f'{coord.line} {lineno}')
            type_parser.set_coord_filename(coord.file)

        type_parser.start_parse(typename, lineno)
        tnode = type_parser.parse_text(body)
        tnode.set_name(name)
        # FIXME:
        tnode.coord = coord
        _mlog.debug(f"type: {typename} coord: {coord}")

        return tnode

    def p_typedef_specifier_2(self, p):
        """ typedef_specifier : TYPEDEF struct_or_union CBLOCK ID
        """
        typename = p[2]
        name = p[4]
        body = p[3]

        token_idx = 3
        last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
        if last_cr < 0:
            last_cr = -1

        column = (p.lexpos(token_idx) - (last_cr))
        coord = self._coord(p.lineno(token_idx), column)
        _mlog.debug(f'body: {body[:15]}')
        _mlog.debug(f'{p.lineno(token_idx)} {coord}')

        node = self._process_typenode(typename, name, body, coord)
        p[0] = node
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    # struct/union
    def p_struct_or_union_decl(self, p):
        """ struct_or_union_decl : struct_or_union_specifier SEMI
                                    | struct_or_union_specifier ID SEMI
        """
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')


    def p_struct_or_union_specifier(self, p):
        """ struct_or_union_specifier : struct_or_union ID CBLOCK
        """
        typename = p[1]
        name = p[2]
        body = p[3]

        token_idx = 3
        last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
        if last_cr < 0:
            last_cr = -1

        column = (p.lexpos(token_idx) - (last_cr))
        coord = self._coord(p.lineno(token_idx), column)
        _tlog.debug(f'body: {body[:15]}')
        _tlog.debug(f'{p.lineno(token_idx)} {coord}')

        node = self._process_typenode(typename, name, body, coord)
        p[0] = node
        _tlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_struct_or_union(self, p):
        """ struct_or_union : STRUCT
                            | INVIS_STRUCT
                            | UNION
        """
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_keep_decl(self, p):
        """ keep_decl : keep_decl_body SEMI
                    | keep_decl_body
        """
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_keep_decl_body(self, p):
        """ keep_decl_body : KEEP KEEPBLOCK
        """
        name = "{}".format(self._token_coord(p, 1))
        kn = KeepNode(name, p[2], self._token_coord(p, 1))
        p[0] = [kn]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_ctree_decl(self, p):
        """ ctree_decl : ctree_decl_body SEMI
                    | ctree_decl_body
        """
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_ctree_decl_body(self, p):
        """ ctree_decl_body : CTREE CTREEBLOCK
        """
        name = "{}".format(self._token_coord(p, 1))
        ctn = CTreeNode(name, p[2], self._token_coord(p, 1))
        p[0] = [ctn]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def _process_funcnode(self, name:str, body,
                        coord:Coord=None) -> FuncNode:
        lineno = 1

        func_parser = GVCFuncParser()

        if coord:
            lineno = coord.line
            _mlog.debug(f'{coord.line} {lineno}')
            func_parser.set_coord_filename(coord.file)

        func_parser.start_parse(name, lineno)
        tnode = func_parser.parse_text(body)
        # FIXME:
        tnode.coord = coord

        return tnode

    # functions
    def p_function_decl(self, p):
        """ function_decl : initializer_type ID LPAREN RPAREN CBLOCK
                          | initializer_type ID LPAREN RPAREN CBLOCK SEMI
        """
        # int xxx() {...} ;
        #            ^
        #            |
        #           p[5]
        name = p[2]
        body = p[5]

        token_idx = 5
        last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
        if last_cr < 0:
            last_cr = -1

        column = (p.lexpos(token_idx) - (last_cr))
        coord = self._coord(p.lineno(token_idx), column)
        _mlog.debug(f'body: {body[:15]}')
        _mlog.debug(f'{p.lineno(token_idx)} {coord}')
        node = self._process_funcnode(name, body, coord)
        p[0] = node
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_primary_expression_2(self, p):
        """ primary_expression  : constant """
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_primary_expression_3(self, p):
        """ primary_expression  : unified_string_literal
                                | unified_wstring_literal
        """
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_constant_1(self, p):
        """ constant    : INT_CONST_DEC
                        | INT_CONST_OCT
                        | INT_CONST_HEX
                        | INT_CONST_BIN
                        | INT_CONST_CHAR
        """
        uCount = 0
        lCount = 0
        for x in p[1][-3:]:
            if x in ('l', 'L'):
                lCount += 1
            elif x in ('u', 'U'):
                uCount += 1
        t = ''
        if uCount > 1:
             raise ValueError('Constant cannot have more than one u/U suffix.')
        elif lCount > 2:
             raise ValueError('Constant cannot have more than two l/L suffix.')
        prefix = 'unsigned ' * uCount + 'long ' * lCount

        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_constant_2(self, p):
        """ constant    : FLOAT_CONST
                        | HEX_FLOAT_CONST
        """
        if 'x' in p[1].lower():
            t = 'float'
        else:
            if p[1][-1] in ('f', 'F'):
                t = 'float'
            elif p[1][-1] in ('l', 'L'):
                t = 'long double'
            else:
                t = 'double'

        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_constant_3(self, p):
        """ constant    : CHAR_CONST
                        | WCHAR_CONST
                        | U8CHAR_CONST
                        | U16CHAR_CONST
                        | U32CHAR_CONST
        """
        p[0] = p[1]
        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    # The "unified" string and wstring literal rules are for supporting
    # concatenation of adjacent string literals.
    # I.e. "hello " "world" is seen by the C compiler as a single string literal
    # with the value "hello world"
    #
    def p_unified_string_literal(self, p):
        """ unified_string_literal  : STRING_LITERAL
                                    | unified_string_literal STRING_LITERAL
        """
        if len(p) == 2: # single literal
            p[0] = p[1]
        else:
            p[1].value = p[1].value[:-1] + p[2][1:]
            p[0] = p[1]

    def p_unified_wstring_literal(self, p):
        """ unified_wstring_literal : WSTRING_LITERAL
                                    | U8STRING_LITERAL
                                    | U16STRING_LITERAL
                                    | U32STRING_LITERAL
                                    | unified_wstring_literal WSTRING_LITERAL
                                    | unified_wstring_literal U8STRING_LITERAL
                                    | unified_wstring_literal U16STRING_LITERAL
                                    | unified_wstring_literal U32STRING_LITERAL
        """
        if len(p) == 2: # single literal
            p[0] = p[1]
        else:
            p[1].value = p[1].value.rstrip()[:-1] + p[2][2:]
            p[0] = p[1]

        _mlog.debug(f'{self._coord(p.lineno(1), 1)} {p[1]}')

    def p_empty(self, p):
        'empty : '
        p[0] = None

    def p_error(self, p):
        # If error recovery is added here in the future, make sure
        # _get_yacc_lookahead_token still works!
        if p:
            self._parse_error(
                'before: %s' % p.value,
                self._coord(lineno=p.lineno,
                            column=self.clex.find_tok_column(p)))
        else:
            self._parse_error('At end of input', self.clex.filename)

import os.path
if not os.path.exists(os.path.join(_fpath, 'gvc_yacctab.py')):
    curr = os.getcwd()
    os.chdir(_fpath)
    prepare_yacctab(GVCParser())
    os.chdir(curr)
