# -*- coding: utf-8 -*-
"""Mean field B-CNN model."""


import mindspore
# import torchvision
import mindspore.nn as nn
import mindspore.ops as ops
import mindspore.ops.operations as P
from mindspore.common import initializer as init
from mindspore.common.initializer import initializer
from mindspore.train.serialization import load_param_into_net, load_checkpoint
import math
from src.var_init import KaimingNormal
from src.vgg import vgg16
from src.resnet import resnet101
from src.densenet import _densenet121

import time

# torch.cuda.manual_seed_all(0)
# torch.backends.cudnn.benckmark = True


# __all__ = ['BCNN']
# __author__ = 'Hao Zhang'
# __copyright__ = '2018 LAMDA'
# __date__ = '2018-01-09'
# __email__ = 'zhangh0214@gmail.com'
# __license__ = 'CC BY-SA 3.0'
# __status__ = 'Development'
# __updated__ = '2018-05-21'
# __version__ = '13.7'

class BCNN(nn.Cell):
    def __init__(self, num_classes, pre_trained, train_all, cnn_name):
        """Declare all needed layers.

        Args:
            num_classes, int.
            pre_trained, bool: In the all/fc phase.
        """
        super(BCNN, self).__init__()
        # Convolution and pooling layers of VGG-16.
        if cnn_name == "vgg":
            backbone = vgg16()
            if pre_trained:
                load_param_into_net(backbone, load_checkpoint(pre_trained))
            self.features = backbone.layers
            if not train_all:
                print("ban features")
                for param in self.features.get_parameters():
                    param.requires_grad = False
        elif cnn_name == "resnet":
            backbone = resnet101()
            if pre_trained:
                load_param_into_net(backbone, load_checkpoint(pre_trained))
            conv = nn.Conv2d(1024, 512, kernel_size=3, stride=1, has_bias=False,padding=0, pad_mode="same")
            norm = nn.BatchNorm2d(512)
            relu = nn.ReLU()
            self.features = nn.SequentialCell([backbone.conv1, backbone.bn1, backbone.relu, backbone.maxpool, backbone.layer1, backbone.layer2, backbone.layer3])
            # print(backbone.cells())
            if not train_all:
                print("ban features")
                for param in self.features.get_parameters():
                    param.requires_grad = False
        elif cnn_name == "densenet":
            backbone = _densenet121()
            if pre_trained:
                print(pre_trained)
                load_param_into_net(backbone, load_checkpoint(pre_trained))
                backbone.set_train(True)
            conv = nn.Conv2d(1024, 512, kernel_size=3, stride=1, has_bias=False,padding=0, pad_mode="same")
            norm = nn.BatchNorm2d(512)
            relu = nn.ReLU()
        
            if not train_all:
                print("ban features")
                for param in backbone.get_parameters():
                    param.requires_grad = False
        
        # print(self.features)
        if cnn_name == "vgg":
            self.features = nn.SequentialCell(self.features.cell_list[:-1])  # Remove pool5. # 不支持CPU？
        elif cnn_name == "resnet":
            self.features = nn.SequentialCell(self.features.cell_list + [conv, norm, relu])  # Remove pool5. # 不支持CPU？
            # self.features = nn.SequentialCell([self.features, conv, norm, relu])  # Remove pool5. # 不支持CPU？
        elif cnn_name == "densenet":
            self.features = nn.SequentialCell([backbone, conv, norm, relu])  # Remove pool5. # 不支持CPU？
            # pass
        # print(self.features)
        
        # Classification layer.
        self.fc = nn.Dense(in_channels=512**2, out_channels=num_classes, has_bias=True)
        if cnn_name == "vgg":
            self.fc.weight.set_data(init.initializer(
                        KaimingNormal(a=0, mode='fan_out', nonlinearity='relu'),
                        self.fc.weight.shape, self.fc.weight.dtype))
            if self.fc.bias is not None:
                self.fc.bias.set_data(init.initializer('zeros', self.fc.bias.shape, self.fc.bias.dtype))


    def construct(self, X):
        """Forward pass of the network.

        Args:
            X, torch.Tensor (N*3*448*448).

        Returns:
            score, torch.Tensor (N*200).
        """
        # start = time.time()
        
        # Input.
        # print("before features, shape is ", X.shape)
        
        X = self.features(X)
        # print("after features, shape is ", X.shape)           

        N = P.Shape()(X)[0]
        # C = 512
        C = P.Shape()(X)[1]
        H = P.Shape()(X)[2]
        W = P.Shape()(X)[3]
        
        # print(X.shape)
        # Classical bilinear pooling.
        reshape = ops.Reshape()
        X = reshape(X, (N, C, H * W))
        # print("after reshape, shape is ", X.shape)

        bmm = P.BatchMatMul()
        transpose_X = P.Transpose()(X, (0, 2, 1))
        X = bmm(X, transpose_X) / (H * W)
        # print("after BatchMatMul, shape is ", X.shape)
        X = reshape(X, (N, C * C))
        # print("after reshape, shape is ", X.shape)
        # Normalization.
        # X = torch.sign(X) * torch.sqrt(torch.abs(X) + 1e-5)
        sqrt = P.Sqrt()
        X = sqrt(X + 1e-5)
        # X = P.L2Normalize(1, 1e-12)(X) # 不支持CPU？
        X = P.L2Normalize()(X) # 不支持CPU？
        # print("after L2Normalize, shape is ", X.shape)
        # Classification.
        X = self.fc(X)
        # print("after fc, shape is ", X.shape)
        
        # end = time.time()
        
        # print("start time is {}".format(str(start)) + ", end time is {}".format(end) + ", duration time is {}".format(str(end - start)))
        return X

    def set_train_all(self, train_all):
        if train_all:
            for param in self.features.get_parameters():
                param.requires_grad = True
        else:
            for param in self.features.get_parameters():
                param.requires_grad = False
