# coding: utf-8

import copy

from ala_classes import SyntaxTreeNode, Token, TokenType, GlobalSymbolTable, LocalSymbolTable, TypeEnum, Type, AlaTypeCheckerError, AlaTypeError, AlaBuiltinRedefineError, AlaBuiltinFunArgumentsError, AlaGeneralSemanticError


class SemanticAnalyzer(object):
    PRODUCTIONS_WITH_EXTENDED_VIEW_AREA = ['if_expr', 'for_expr', 'while_expr']

    BUILTIN_IDS = ['range', 'input', 'raw_input']

    tree = None
    global_symbol_table = None

    def __init__(self, tree):
        self.tree = tree
        self.global_symbol_table = GlobalSymbolTable()

    # INTERFACE

    def run(self):
        self.run1()
        self.run2()

    def run1(self):
        try:
            self.__process1(self.tree)
        except AlaGeneralSemanticError as e:
            raise e
        #except Exception:
        #    raise AlaGeneralSemanticError(u"Неизвестная семантическая ошибка")

    def run2(self):
        try:
            self.tree = self.__process2(self.tree, self.tree, None)
        except AlaGeneralSemanticError as e:
            raise e
        except Exception:
            raise AlaGeneralSemanticError(u"Неизвестная семантическая ошибка")

    # PRIVATE METHODS

    def __process1(self, node):
        if type(node) == Token:
            node.variable_type = SemanticAnalyzer.__to_type(node.type)
        elif type(node) == SyntaxTreeNode:
            node.local_symbol_table = LocalSymbolTable(self.global_symbol_table)
            if node.production == "assign_expr":
                assign_type = node.parts[1].type
                line = node.line
                if assign_type != TokenType.ASSIGN:
                    operator_type = None
                    rule = None
                    if assign_type == TokenType.ASSIGN_PLUS:
                        operator_type = TokenType.PLUS
                        rule = "ve_plus_minus"
                    elif assign_type == TokenType.ASSIGN_MINUS:
                        operator_type = TokenType.MINUS
                        rule = "ve_plus_minus"
                    elif assign_type == TokenType.ASSIGN_MULT:
                        operator_type = TokenType.MULT
                        rule = "ve_mult_div"
                    elif assign_type == TokenType.ASSIGN_DIV:
                        operator_type = TokenType.DIV
                        rule = "ve_mult_div"
                    id = copy.copy(node.parts[0])
                    operator = Token(operator_type, token_line=line)
                    value = copy.copy(node.parts[2])
                    parts = [id, operator, value]
                    new_node = SyntaxTreeNode(rule, parts)
                    node.parts[1] = Token(TokenType.ASSIGN, token_line=line)
                    node.parts[1].parent = node
                    node.parts[2] = new_node
                    node.parts[2].parent = node

            for part in node.parts:
                self.__process1(part)

    def __process2(self, node, nearest_start_node1, nearest_start_node2):
        node_to_return = node
        if type(node_to_return) != SyntaxTreeNode:
            return node_to_return
        prod = node_to_return.production
        if prod == "start":
            nearest_start_node2 = nearest_start_node1
            nearest_start_node1 = node_to_return
            if nearest_start_node2 is None:
                nearest_start_node2 = node_to_return
        elif prod == "for_expr":
            if type(node_to_return.parts[0]) != Token or node_to_return.parts[0].type != TokenType.ID:
                raise AlaTypeError(u"Итератор цикла 'for' должен быть переменной", line_number=node_to_return.line)
            self.__add_symbol(node_to_return, nearest_start_node1, nearest_start_node2, node_to_return.parts[0].value,
                              Type(TypeEnum.INT))

        for i in xrange(len(node_to_return.parts)):
            returned_node = self.__process2(node_to_return.parts[i], nearest_start_node1, nearest_start_node2)
            node_to_return.parts[i] = returned_node

        if prod == "ve_plus_minus" or prod == "ve_mult_div":
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[0])
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[2])
            if SemanticAnalyzer.__compare_node_types(node_to_return.parts[0], node_to_return.parts[2]):
                node_to_return.variable_type = node_to_return.parts[0].variable_type
                if SemanticAnalyzer.__is_int_val_nodes(node_to_return.parts[0], node_to_return.parts[2]):
                    line = node_to_return.line
                    if node_to_return.parts[1].type == TokenType.PLUS:
                        node_to_return = \
                            Token(TokenType.VAL_INT,
                                  str(int(node_to_return.parts[0].value) + int(node_to_return.parts[2].value)),
                                  token_line=line)
                    elif node_to_return.parts[1].type == TokenType.MINUS:
                        node_to_return = \
                            Token(TokenType.VAL_INT,
                                  str(int(node_to_return.parts[0].value) - int(node_to_return.parts[2].value)),
                                  token_line=line)
                    elif node_to_return.parts[1].type == TokenType.MULT:
                        node_to_return = \
                            Token(TokenType.VAL_INT,
                                  str(int(node_to_return.parts[0].value) * int(node_to_return.parts[2].value)),
                                  token_line=line)
                    elif node_to_return.parts[1].type == TokenType.DIV:
                        node_to_return = \
                            Token(TokenType.VAL_INT,
                                  str(int(node_to_return.parts[0].value) / int(node_to_return.parts[2].value)),
                                  token_line=line)
                    node_to_return.variable_type = Type(TypeEnum.INT)
            else:
                raise AlaTypeError(u"Несоответсвие типов {0} и {1}".format(node_to_return.parts[0].variable_type,
                                                                           node_to_return.parts[2].variable_type),
                                   line_number=node_to_return.line)
        elif prod == "ve_brackets":
            if SemanticAnalyzer.__is_int_val_node(node_to_return.parts[0]):
                node_to_return = node_to_return.parts[0]
            else:
                node_to_return.variable_type = node_to_return.parts[0].variable_type
        elif prod == "ve_or" or prod == "ve_and":
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[0])
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[2])
            if not SemanticAnalyzer.__compare_node_types(node_to_return.parts[0], node_to_return.parts[2],
                                                         TypeEnum.BOOL):
                raise AlaTypeError(u"Недопустимый тип для оператора '{0}'".format(node_to_return.parts[1].value),
                                   line_number=node_to_return.line)
            if (SemanticAnalyzer.__is_token_node(node_to_return.parts[0], TokenType.TRUE) or
                    SemanticAnalyzer.__is_token_node(node_to_return.parts[0], TokenType.FALSE) or
                    SemanticAnalyzer.__is_token_node(node_to_return.parts[2], TokenType.TRUE) or
                    SemanticAnalyzer.__is_token_node(node_to_return.parts[2], TokenType.FALSE)):
                raise AlaTypeError(u"Использование булевой константы в операторе '{0}'".format(
                    node_to_return.parts[1].value),
                    line_number=node_to_return.line)
            if SemanticAnalyzer.__is_val_nodes(node_to_return.parts[0], node_to_return.parts[2], TypeEnum.BOOL):
                line = node_to_return.line
                res = None
                if prod == "ve_or":
                    res = (node_to_return.parts[0].type == TokenType.TRUE or
                           node_to_return.parts[2].type == TokenType.TRUE)
                elif prod == "ve_and":
                    res = (node_to_return.parts[0].type == TokenType.TRUE and
                           node_to_return.parts[2].type == TokenType.TRUE)
                if res:
                    node_to_return = Token(TokenType.TRUE, token_line=line)
                else:
                    node_to_return = Token(TokenType.FALSE, token_line=line)
            node_to_return.variable_type = Type(TypeEnum.BOOL)
        elif prod == "ve_compare":
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[0])
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[2])
            if SemanticAnalyzer.__compare_node_types(node_to_return.parts[0], node_to_return.parts[2]):
                if SemanticAnalyzer.__is_val_nodes(node_to_return.parts[0], node_to_return.parts[2]):
                    line = node_to_return.line
                    res = None
                    if node_to_return.parts[1].type == TokenType.EQUAL:
                        res = node_to_return.parts[0].value == node_to_return.parts[2].value
                    elif node_to_return.parts[1].type == TokenType.NOT_EQUAL:
                        res = node_to_return.parts[0].value != node_to_return.parts[2].value
                    elif SemanticAnalyzer.__is_int_val_nodes(node_to_return.parts[0], node_to_return.parts[2]):
                        if node_to_return.parts[1].type == TokenType.MORE:
                            res = node_to_return.parts[0].value > node_to_return.parts[2].value
                        elif node_to_return.parts[1].type == TokenType.LESS:
                            res = node_to_return.parts[0].value < node_to_return.parts[2].value
                        elif node_to_return.parts[1].type == TokenType.MORE_E:
                            res = node_to_return.parts[0].value >= node_to_return.parts[2].value
                        elif node_to_return.parts[1].type == TokenType.LESS_E:
                            res = node_to_return.parts[0].value <= node_to_return.parts[2].value
                    else:
                        raise AlaTypeError(u"Недопустимый тип {0} для такого сравнения".format(
                            node_to_return.parts[0].variable_type))
                    if res:
                        node_to_return = Token(TokenType.TRUE, token_line=line)
                    else:
                        node_to_return = Token(TokenType.FALSE, token_line=line)
                node_to_return.variable_type = Type(TypeEnum.BOOL)
            else:
                raise AlaTypeError(u"Недопустимые типы {0} и {1} для сравнения".format(
                    node_to_return.parts[0].variable_type, node_to_return.parts[2].variable_type))
        elif prod == "ve_not":
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[1])
            if node_to_return.parts[1].variable_type.type != TypeEnum.BOOL:
                raise AlaTypeError(u"Недопустимый тип для оператора '{0}'".format(node_to_return.parts[0].value),
                                   line_number=node_to_return.line)
            if (SemanticAnalyzer.__is_token_node(node_to_return.parts[1], TokenType.TRUE) or
                    SemanticAnalyzer.__is_token_node(node_to_return.parts[1], TokenType.FALSE)):
                raise AlaTypeError(u"Использование булевой константы в операторе '{0}'".format(
                    node_to_return.parts[0].value),
                    line_number=node_to_return.line)
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[1])
            node_to_return.variable_type = Type(TypeEnum.BOOL)
        elif prod == "ve_unary_plus_minus":
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[1])
            if node_to_return.parts[1].variable_type.type != TypeEnum.INT:
                raise AlaTypeError(u"Недопустимый тип операнда для унарного плюса/минуса: {0}".format(
                    node_to_return.parts[1].variable_type), line_number=node_to_return.line)
            if node_to_return.parts[0].type == TokenType.MINUS:
                if SemanticAnalyzer.__is_token_node(node_to_return.parts[1], TokenType.VAL_INT):
                    node_to_return.parts[1].parent = node_to_return.parent
                    node_to_return = node_to_return.parts[1]
                    node_to_return.value = str(-int(node_to_return.value))
            else:
                node_to_return.parts[1].parent = node_to_return.parent
                node_to_return = node_to_return.parts[1]

        elif prod == "ve_array":
            if len(node_to_return.parts) == 0:
                raise AlaTypeError(u"Массив не должен быть пустым", line_number=node_to_return.line)
            if type(node_to_return.parts[0]) == SyntaxTreeNode and node_to_return.parts[0].production == "ve_array":
                raise AlaTypeError(u"Не допускаются вложенные массивы",
                                   line_number=node_to_return.line)
            if SemanticAnalyzer.__is_token_node(node_to_return.parts[0]):
                parts = [node_to_return.parts[0]]
                single = True
            else:
                parts = node_to_return.parts[0].parts
                single = False
            for part in parts:
                SemanticAnalyzer.__set_type_if_not(part)
                if type(part) == SyntaxTreeNode and part.production == "ve_array":
                    raise AlaTypeError(u"Не допускаются вложенные массивы",
                                       line_number=part.line)
                if not single and not SemanticAnalyzer.__compare_node_types(node_to_return.parts[0].parts[0], part):
                    raise AlaTypeError(u"Массив должен содержать элементы одного типа",
                                       line_number=node_to_return.line)
            arr_type = parts[0].variable_type.type
            if arr_type == TypeEnum.INT:
                node_to_return.variable_type = Type(TypeEnum.ARRAY_OF_INT)
            else:
                raise AlaTypeError(u"Недопустимый тип элемента массива", line_number=node_to_return.line)
        elif prod == "ve_index":
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[0])
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[1])
            if node_to_return.parts[0].variable_type.type != TypeEnum.ARRAY_OF_INT:
                raise AlaTypeError(u"Попытка взятия индекса от типа {0}".format(node_to_return.parts[0].variable_type),
                                   line_number=node_to_return.line)
            if node_to_return.parts[1].variable_type.type == TypeEnum.INT:
                if type(node_to_return.parts[1]) == Token and node_to_return.parts[1].type == TokenType.VAL_INT:
                    if int(node_to_return.parts[1].value) < 0:
                        raise AlaTypeError(u"Попытка взятия отрицательного индекса",
                                           line_number=node_to_return.line)
                if node_to_return.parts[0].variable_type.type == TypeEnum.ARRAY_OF_INT:
                    node_to_return.variable_type = Type(TypeEnum.INT)
                else:
                    raise AlaTypeError(u"Попытка взятия индекса от типа {0}".format(
                        node_to_return.parts[0].variable_type), line_number=node_to_return.line)
            else:
                raise AlaTypeError(u"Попытка взятия индекса типа {0}".format(node_to_return.parts[1].parts[0].variable_type),
                                   line_number=node_to_return.line)
        elif prod == "assign_expr":
            SemanticAnalyzer.__set_type_if_not(node_to_return.parts[2])
            assignment_type = node_to_return.parts[2].variable_type
            if assignment_type is None:
                assignment_type = Type(TypeEnum.UNKNOWN)

            if SemanticAnalyzer.__is_token_node(node_to_return.parts[0], TokenType.ID):
                res = SemanticAnalyzer.__is_defined(node_to_return.parts[0].value, node_to_return)
                if res[0]:
                    if assignment_type.type == TypeEnum.ARRAY_OF_INT:
                        raise AlaGeneralSemanticError(u"Попытка переопределить массив", line_number=node_to_return.line)
                    if assignment_type.type == TypeEnum.STRING:
                        raise AlaGeneralSemanticError(u"Попытка переопределить строку", line_number=node_to_return.line)
                    if res[2].type != assignment_type.type and assignment_type.type != TypeEnum.UNKNOWN:
                        raise AlaTypeError(u"Попытка приведения типа '{0}' к '{1}'".format(assignment_type, res[2]),
                                           line_number=node_to_return.line)
                    node_to_return.parts[0].variable_type = assignment_type
                else:
                    node_to_return.parts[0].variable_type = assignment_type
                    self.__add_symbol(node_to_return, nearest_start_node1, nearest_start_node2,
                                      node_to_return.parts[0].value, node_to_return.parts[0].variable_type)
            else:
                if SemanticAnalyzer.__is_token_node(node_to_return.parts[0]):
                    raise AlaTypeError(u"Присвоение возможно только идентификатору или значению по его индексу",
                                       line_number=node_to_return.line)
        elif prod == "print_expr":
            for part in node_to_return.parts:
                self.__set_type_if_not(part)
        elif prod == "ve_funcall_id_or_value" and node_to_return.parts[0].value == "range":
            line = node_to_return.line
            if node_to_return.parent.production != 'for_expr':
                raise AlaGeneralSemanticError(u"Функция '{0}()' может использоваться только в цикле for".format(
                    node_to_return.parts[0].value), line_number=line)
            if len(node_to_return.parts[1].parts) == 0:
                raise AlaBuiltinFunArgumentsError(u"Функция '{0}()' должна принимать аргументы".format(
                    node_to_return.parts[0].value), line_number=line)
            if ((type(node_to_return.parts[1].parts[0]) != SyntaxTreeNode or node_to_return.parts[1].parts[0].production != "ve_list") and
                    type(node_to_return.parts[1].parts[0]) != Token):
                raise AlaBuiltinFunArgumentsError(u"Ошибка в аргументах встроенной функции '{0}()'".format(
                    node_to_return.parts[0].value), line_number=line)
            if type(node_to_return.parts[1].parts[0]) == Token:
                parts = [node_to_return.parts[1].parts[0], ]
            else:
                parts = node_to_return.parts[1].parts[0].parts
            if len(parts) == 0 or len(parts) > 3:
                raise AlaBuiltinFunArgumentsError(u"Функция '{0}()' должна принимать от 1 до 3 аргументов".format(node_to_return.parts[0].value),
                                                  line_number=line)
            for range_argument in parts:
                SemanticAnalyzer.__set_type_if_not(range_argument)
                if range_argument.variable_type.type != TypeEnum.INT:
                    raise AlaBuiltinFunArgumentsError(
                        u"Функция '{0}()' должна принимать только целочисленные аргументы".format(node_to_return.parts[0].value), line_number=line)
        elif (prod == "ve_funcall_id_or_value" and
                (node_to_return.parts[0].value == "input" or node_to_return.parts[0].value == "raw_input")):
            line = node_to_return.line
            if len(node_to_return.parts[1].parts) > 0:
                SemanticAnalyzer.__set_type_if_not(node_to_return.parts[1].parts[0])
                if type(node_to_return.parts[1].parts[0]) != Token:
                    raise AlaBuiltinFunArgumentsError(u"Функция '{0}()' должна принимать 0 или 1 аргументов".format(node_to_return.parts[0].value),
                                                      line_number=line)
            if node_to_return.parts[0].value == "input":
                node_to_return.variable_type = Type(TypeEnum.INT)
            if node_to_return.parts[0].value == "raw_input":
                node_to_return.variable_type = Type(TypeEnum.STRING)
        elif prod == "ve_funcall_id_or_value":
            raise AlaGeneralSemanticError(u"Пользовательские функции не реализованы", line_number=node_to_return.line)
        return node_to_return

    def __add_symbol(self, node, nearest_start_node1, nearest_start_node2, name, variable_type):
        node_to_take = nearest_start_node1
        if node.parent.parent.production in SemanticAnalyzer.PRODUCTIONS_WITH_EXTENDED_VIEW_AREA:
            node_to_take = nearest_start_node2
        node_to_take.local_symbol_table.add_symbol(node, name, variable_type)

    @staticmethod
    def __set_type_if_not(node):
        if node.variable_type is None and type(node) == Token and node.type == TokenType.ID:
            res = SemanticAnalyzer.__is_defined(node.value, node.parent)
            if not res[0]:
                raise AlaTypeError(u"Обращение к необъявленной переменной '{0}'".format(node.value),
                                   line_number=node.line)
            node.variable_type = res[2]

    @staticmethod
    def __is_defined(variable_name, node):
        res = node.local_symbol_table.has_symbol(variable_name)
        if res[0] or variable_name in SemanticAnalyzer.BUILTIN_IDS:
            return True, node, res[1]
        elif node.parent:
            return SemanticAnalyzer.__is_defined(variable_name, node.parent)
        else:
            return False, None, None

    @staticmethod
    def __to_type(token_type):
        if token_type == TokenType.TRUE or token_type == TokenType.FALSE:
            return Type(TypeEnum.BOOL)
        elif token_type == TokenType.VAL_INT:
            return Type(TypeEnum.INT)
        elif token_type == TokenType.VAL_STR:
            return Type(TypeEnum.STRING)
        else:
            return None

    @staticmethod
    def __compare_node_types(node1, node2, necessary_type=None):
        return (node1.variable_type.type == node2.variable_type.type and
               (necessary_type is None or node1.variable_type.type == necessary_type))

    @staticmethod
    def __is_val_node(node, necessary_type=None):
        return (type(node) == Token and (node.type == TokenType.TRUE or
                                        node.type == TokenType.FALSE or
                                        node.type == TokenType.VAL_INT or
                                        node.type == TokenType.VAL_STR) and
                (necessary_type is None or node.variable_type.type == necessary_type))

    @staticmethod
    def __is_val_nodes(node1, node2, necessary_type=None):
        return SemanticAnalyzer.__is_val_node(node1, necessary_type) and SemanticAnalyzer.__is_val_node(node2, necessary_type)

    @staticmethod
    def __is_int_val_node(node):
        return type(node) == Token and node.type == TokenType.VAL_INT

    @staticmethod
    def __is_int_val_nodes(node1, node2):
        return SemanticAnalyzer.__is_int_val_node(node1) and SemanticAnalyzer.__is_int_val_node(node2)

    @staticmethod
    def __is_token_node(node, token_type=None):
        return type(node) == Token and (token_type is None or node.type == token_type)