from collections import defaultdict
from typing import List
from tvm_graph.node import Node
from tvm_graph.graph import Graph
from loguru import logger


class RecoverUtil:
    def __init__(self, graph: Graph) -> None:
        self.graph = graph
        self.import_codes = ["import torch", "import torchvision", "from torchvision.transforms.functional import crop"]
        self.model_code = "class RecoverModel(torch.nn.Module):"
        self.init_code_head = "    def __init__(self):"
        self.init_codes = []

        # TODO: We may need consider multi input
        self.forward_code_head = "    def __forward__(self, x):"
        self.forward_codes = []
        self.return_code = "return x_{}"
        self.name_idx_dict = defaultdict(int)

    def output_model_code(self) -> str:
        model_code = ""
        for import_code in self.import_codes:
            model_code += import_code + "\n"
        model_code += self.model_code + "\n"
        model_code += self.init_code_head + "\n"
        space = "        "
        for init_code in self.init_codes:
            model_code += space + init_code + "\n"

        model_code += self.forward_code_head + "\n"

        for forward_code in self.forward_codes:
            model_code += space + forward_code + "\n"

        model_code += space + self.return_code
        return model_code

    def get_name_idx(self, func_type):
        self.name_idx_dict[func_type] += 1
        return self.name_idx_dict[func_type]

    def generate_activation(self, act_type):
        act_name = "{}_{}".format(act_type, self.get_name_idx(act_type))
        act_code = "{} = ".format(act_name)
        # 'softmax', 'softplus', 'sigmoid', 'elu', 'softsign', 'linear', 'tanh', 'relu', 'selu', None, 'hard_sigmoid'
        if act_type == "relu":
            act_code += "torch.nn.ReLU()"
        elif act_type == "softmax":
            act_code += "torch.nn.Softmax()"
        elif act_type == "softplus":
            act_code += "torch.nn.Softplus()"
        elif act_type == "elu":
            act_code += "torch.nn.ELU()"
        elif act_type == "softsign":
            act_code += "torch.nn.Softsign()"
        elif act_type == "linear":
            act_code += "torch.nn.ReLU()"
            # TODO Make the linear function in that place
        elif act_type == "tanh":
            act_code += "torch.nn.Tanh()"
        elif act_type == "selu":
            act_code += "torch.nn.SELU()"
        elif act_type == "hard_sigmoid":
            act_code += "torch.nn.Hardsigmoid()"

        return act_code, act_name

    def generate_initcode_from_single_node(self, node: Node) -> List:
        code = []
        if node.function_type == "Unknown":
            logger.error("There are unknown function node")
            exit(0)
        if node.function_type == "Conv2D":
            layer_name = "conv2d_{}".format(self.get_name_idx(node.function_type))
            node.recover_layer_name = layer_name
            code.append(
                "{} = torch.nn.Conv2d(in_channels={}, out_channels={}, kernel_size={}, stride=({},{}),padding=\"{}\", dilation_rate=({},{}))"
                .format(
                    layer_name,
                    node.in_shape[-1],
                    node.out_shape[-1],
                    node.function_param["kernel_size"],
                    node.function_param["strides"][0],
                    node.function_param["strides"][1],
                    node.function_param["padding"],
                    node.function_param["dilation_rate"][0],
                    node.function_param["dilation_rate"][1],
                ))
            if node.function_param["activation"] is not None:
                act_type = node.function_param["activation"]
                node.has_activation = True
                act_code, node.activation_name = self.generate_activation(act_type)
                code.append(act_code)

        elif node.function_type == "SeparableConv2D":
            # TODO Need new implementation
            pass
        elif node.function_type == "DepthwiseConv2D":
            layer_name = "dw_conv2d_{}".format(self.get_name_idx(node.function_type))
            node.recover_layer_name = layer_name
            code.append(
                "{} = torch.nn.Conv2d(in_channels={}, out_channels={}, kernel_size={}, stride=({},{}),padding=\"{}\", dilation_rate=({},{}), groups={})"
                .format(layer_name, node.in_shape[-1], node.in_shape[-1], node.function_param["kernel_size"],
                        node.function_param["strides"][0], node.function_param["strides"][1],
                        node.function_param["padding"], node.function_param["dilation_rate"][0],
                        node.function_param["dilation_rate"][1], node.in_shape[-1]))
            if node.function_param["activation"] is not None:
                act_type = node.function_param["activation"]
                node.has_activation = True
                node.activation_name, act_code = self.generate_activation(act_type)
                code.append(act_code)

        elif node.function_type == "Cropping2D":
            node.recover_code = "crop($, {}, {}, {}, {})".format(
                node.function_param["cropping"][0],
                node.function_param["cropping"][1],
                node.function_param["cropping"][2],
                node.function_param["cropping"][3],
            )

        elif node.function_type == "UpSampling2D":
            layer_name = "up2d_{}".format(self.get_name_idx(node.function_type))
            node.recover_layer_name = layer_name
            if node.function_param["interpolation"] == "bilinear":
                code.append("{} = torch.nn.UpsamplingNearest2d(scale_factor={})".format(
                    layer_name, node.function_param["size"]))
            else:
                code.append("{} = torch.nn.UpsamplingBilinear2d(scale_factor={})".format(
                    layer_name, node.function_param["size"]))

        elif node.function_type == "UpSampling3D":
            layer_name = "up3d_{}".format(self.get_name_idx(node.function_type))
            node.recover_layer_name = layer_name
            code.append("{} = torch.nn.Upsample(scale_factor={})".format(layer_name, node.function_param["size"]))
        elif node.function_type == "ZeroPadding2D":
            layer_name = "zp2d_{}".format(self.get_name_idx(node.function_type))
            node.recover_layer_name = layer_name
            code.append("{} = torch.nn.ZeroPad2d(padding={})".format(layer_name, node.function_param["padding"]))
        elif node.function_type == "ZeroPadding3D":
            layer_name = "zp3d_{}".format(self.get_name_idx(node.function_type))
            node.recover_layer_name = layer_name
            code.append("{} = torch.nn.ConstantPad3d(padding={}, value=0)".format(layer_name,
                                                                                  node.function_param["padding"]))
        elif node.function_type == "Concatenate":
            node.recover_code = "torch.cat(($), dim={})".format(node.function_param["axis"])
        elif node.function_type == "Add":
            node.recover_code = "torch.add($)"
        elif node.function_type == "Subtract":
            node.recover_code = "torch.sub($)"
        elif node.function_type == "Multiply":
            node.recover_code = "torch.mul($)"
        elif node.function_type == "Dot":
            pass

        return code

    def generate_init_codes(self):
        node_set: List[Node] = self.graph.node_set
        for node in node_set:
            self.init_codes.extend(self.generate_initcode_from_single_node(node))

    def topology_sort(self):
        node_list: List[Node] = self.graph.node_set
        in_degree = {}
        sorted_list = []
        for node in node_list:
            in_degree[node.name] = len(node.father_nodes)

        while len(sorted_list) < len(node_list):
            for nodename in in_degree:
                if in_degree[nodename] == 0:
                    tmp_node = self.graph.find_node_by_name(nodename)
                    sorted_list.append(tmp_node)
                    in_degree[nodename] = -1
                    for nextnode in tmp_node.next_nodes:
                        in_degree[nextnode.name] -= 1

        return sorted_list

    def generate_forward_codes(self):
        node_arr: List[Node] = self.topology_sort()
        counter = 0
        for node in node_arr:
            counter += 1
            if len(node.father_nodes) == 0:
                if node.recover_code == "":
                    self.forward_codes.append("x_{} = {}({})".format(counter, node.recover_layer_name, "x"))
                else:
                    self.forward_codes.append("x_{} = {}".format(counter, node.recover_code.replace("$", "x")))

                node.recover_output_name = "x_{}".format(counter)
                if node.has_activation:
                    self.forward_codes.append("x_{} = {}(x_{})".format(counter, node.activation_name, counter))
            else:
                inputs = "".join([tmp_node.recover_output_name + ", " for tmp_node in node.father_nodes])
                inputs = inputs[:-2]
                if node.recover_code == "":
                    self.forward_codes.append("x_{} = {}({})".format(counter, node.recover_layer_name, inputs))
                else:
                    self.forward_codes.append("x_{} = {}".format(counter, node.recover_code.replace("$", inputs)))

                node.recover_output_name = "x_{}".format(counter)
                if node.has_activation:
                    self.forward_codes.append("x_{} = {}(x_{})".format(counter, node.activation_name, counter))

        self.return_code = self.return_code.format(counter)


if __name__ == "__main__":
    graph = Graph()
    node_1 = Node("conv2d-1", 1, [1, 29, 198, 512])
    node_1.function_type = "Conv2D"
    node_1.function_param = {
        'filters': 16,
        'kernel_size': 3,
        'strides': [7, 1],
        'padding': 'valid',
        'activation': 'relu',
        'input_shape': [200, 200, 10],
        'dilation_rate': [1, 1],
        'paramsShape': [(3, 3, 10, 512), (1, 1, 1, 512)],
        'outputShape': [1, 29, 198, 512]
    }
    node_1.in_shape = [200, 200, 10]
    node_1.out_shape = [1, 29, 198, 512]
    node_2 = Node("conv2d-2", 1, [1, 27, 196, 32])
    node_2.function_type = "Conv2D"
    node_2.function_param = {
        'filters': 16,
        'kernel_size': 3,
        'strides': [1, 1],
        'padding': 'same',
        'activation': 'tanh',
        'input_shape': [1, 29, 198, 512],
        'dilation_rate': [1, 1],
        'paramsShape': [(3, 3, 10, 512), (1, 1, 1, 512)],
        'outputShape': [1, 29, 198, 32]
    }
    node_2.in_shape = [1, 29, 198, 512]
    node_2.out_shape = [1, 29, 198, 32]
    node_1.add_next_node(node_2)
    node_2.add_father_node(node_1)
    graph.add_node(node_1)
    graph.add_node(node_2)
    re_util = RecoverUtil(graph)
    re_util.generate_init_codes()
    re_util.generate_forward_codes()
    logger.info(re_util.output_model_code())
