# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""RFCN head."""

import mindspore.numpy as np
import numpy
import mindspore.common.dtype as mstype
import mindspore.nn as nn
from mindspore.ops import operations as P
from mindspore.common.tensor import Tensor
import mindspore.ops as ops


class RfcnRegClsBlock(nn.Cell):

    def __init__(self,
                 in_channels,
                 out_channels,
                 k_bins,
                 num_classes,
                 ):
        super(RfcnRegClsBlock, self).__init__()

        self.shard_conv = nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, pad_mode='same',
                                    has_bias=True)
        self.rfcn_cls_conv = nn.Conv2d(out_channels, k_bins * k_bins * num_classes, kernel_size=1, stride=1,
                                       pad_mode='same', has_bias=True)
        self.rfcn_reg_conv = nn.Conv2d(out_channels, k_bins * k_bins * 4, kernel_size=1, stride=1,
                                       pad_mode='same', has_bias=True)

    def construct(self, x):
        x = self.shard_conv(x)
        cls = self.rfcn_cls_conv(x)
        reg = self.rfcn_reg_conv(x)
        return cls, reg


class PSRoIAlign(nn.Cell):
    def __init__(self, pooled_height, pooled_width, spatial_scale=1.0, sample_num=-1, classes_num=81):
        super(PSRoIAlign, self).__init__()

        self.pooled_height = pooled_height
        self.pooled_width = pooled_width
        self.bin_num = self.pooled_height * self.pooled_width
        self.classes_num = classes_num
        self.tile_cls_size = (self.classes_num, 1, 1)
        self.tile_reg_size = (4, 1, 1)
        self.roi_align = ops.ROIAlign(pooled_height, pooled_width, spatial_scale, sample_num)
        self.reshape = ops.Reshape()
        self.softmax = nn.Softmax()
        self.strided_slice = ops.StridedSlice()
        self.concat = ops.Concat()
        self.cls_mask, self.reg_mask = self.build_mask()
        self.reduce_sum = ops.ReduceSum(keep_dims=True)

    def construct(self, input_cls, input_reg, rois):
        roi_align_cls = self.roi_align(input_cls, rois) * self.cls_mask
        roi_align_reg = self.roi_align(input_reg, rois) * self.reg_mask

        cls_score = self.reshape(self.reduce_sum(roi_align_cls, (2, 3)),
                                 (rois.shape[0], self.bin_num, self.classes_num)).mean(axis=1)
        reg_score = self.reshape(self.reduce_sum(roi_align_reg, (2, 3)),
                                 (rois.shape[0], self.bin_num, 4)).mean(axis=1)
        return self.softmax(cls_score), self.softmax(reg_score)

    def build_mask(self):
        cls_mask = ()
        reg_mask = ()
        for index in range(self.bin_num):
            vector = np.zeros(self.bin_num)
            vector[index] = 1
            cls_mask += (np.tile(np.expand_dims(vector.reshape((self.pooled_height, self.pooled_width)), axis=0),
                                 self.tile_cls_size),)
            reg_mask += (np.tile(np.expand_dims(vector.reshape((self.pooled_height, self.pooled_width)), axis=0),
                                 self.tile_reg_size),)
        return self.concat(cls_mask), self.concat(reg_mask)


class Rfcn_Head(nn.Cell):

    def __init__(self,
                 config,
                 in_channels,
                 out_channels,
                 k_bins,
                 num_classes,
                 batch_size,
                 spatial_scale,
                 sample_num,
                 ):
        super(Rfcn_Head, self).__init__()
        cfg = config
        self.dtype = numpy.float32
        self.num_classes = num_classes
        self.loss_cls = P.SoftmaxCrossEntropyWithLogits()
        self.loss_bbox = P.SmoothL1Loss(beta=1.0)
        self.logicaland = P.LogicalAnd()
        self.cast = P.Cast()
        self.onehot = P.OneHot()
        self.greater = P.Greater()
        self.rcnn_loss_cls_weight = Tensor(np.array(1).astype(self.dtype))
        self.rcnn_loss_reg_weight = Tensor(np.array(1).astype(self.dtype))
        self.num_bboxes = (cfg.num_expected_pos_stage2 + cfg.num_expected_neg_stage2) * batch_size
        self.ms_type = mstype.float32
        self.on_value = Tensor(1.0, mstype.float32)
        self.off_value = Tensor(0.0, mstype.float32)
        self.value = Tensor(1.0, self.ms_type)
        self.reshape = P.Reshape()
        self.sum_loss = P.ReduceSum()
        self.tile = P.Tile()
        self.expandims = P.ExpandDims()
        self.clip = ops.clip_by_value
        rmv_first = np.ones((self.num_bboxes, self.num_classes))
        rmv_first[:, 0] = np.zeros((self.num_bboxes,))
        self.rmv_first_tensor = Tensor(rmv_first.astype(self.dtype))
        self.rfcn_reg_cls_block = RfcnRegClsBlock(in_channels, out_channels, k_bins, num_classes)
        self.ps_roi_align = PSRoIAlign(k_bins, k_bins, spatial_scale, sample_num, num_classes)

    def construct(self, res_block5, roi_data, bbox_targets, labels, mask):
        cls_conv, reg_conv = self.rfcn_reg_cls_block(res_block5)
        cls_score, reg_score = self.ps_roi_align(cls_conv, reg_conv, roi_data)

        if self.training:
            bbox_weights = self.cast(self.logicaland(self.greater(labels, 0), mask), mstype.int32) * labels  # (1280,)
            bbox_weights = self.cast(self.clip(bbox_weights, self.off_value, self.on_value), mstype.int32)
            #bbox_weights: [28  1 28 ...  0  0  0]
            labels = self.onehot(labels, self.num_classes, self.on_value, self.off_value)  # (1280, 81)
            loss, loss_cls, loss_reg, loss_print = self.loss(cls_score, reg_score, bbox_targets, bbox_weights, labels, mask)
            out = (loss, loss_cls, loss_reg, loss_print)
        else:
            out = (cls_score, (cls_score / self.value), reg_score, cls_score)

        return out

    def loss(self, cls_score, bbox_pred, bbox_targets, bbox_weights, labels, weights):
        """Loss method."""
        loss_print = ()
        loss_cls, _ = self.loss_cls(cls_score, labels)
        weights = self.cast(weights, self.ms_type)
        loss_cls = loss_cls * weights
        loss_cls = self.sum_loss(loss_cls, (0,)) / self.sum_loss(weights, (0,))
        # -> bbox_weights: (1280, )
        loss_reg = self.loss_bbox(bbox_pred, bbox_targets)
        loss_reg = self.sum_loss(loss_reg, (1,))
        loss_reg = loss_reg * bbox_weights
        loss_reg = loss_reg / self.sum_loss(weights, (0,))
        loss_reg = self.sum_loss(loss_reg, 0)
        loss = self.rcnn_loss_cls_weight * loss_cls + self.rcnn_loss_reg_weight * loss_reg
        loss_print += (loss_cls, loss_reg)
        return loss, loss_cls, loss_reg, loss_print
