import math
import numpy as np
from mindspore import nn, Parameter, Tensor, context, ParallelMode
import mindspore as ms
import mindspore.common.dtype as mstype
from mindspore.ops import operations as P, constexpr
from mindspore.ops import functional as F
import mindspore.common.initializer as init
import mindspore.common.initializer as weight_init
from .pos_embed import *

class LayerNorm(nn.LayerNorm):
    # pylint: disable=W0212
    r"""
    A self-defined layer norm operation using reduce sum and reduce mean.
    """

    def __init__(self, normalized_shape, eps=1e-6, param_init_type=mstype.float32):
        super(LayerNorm, self).__init__(
            normalized_shape,
            eps=eps,
            param_init_type=param_init_type)


class PatchEmbed3D(nn.Cell):
    def __init__(self, img_size=(96, 96, 96), patch_size=(16, 16, 16), in_chans=1, embed_dim=768, norm_layer=None, parallel_config=None):
        super().__init__()
        self.img_size = img_size
        self.patch_size = patch_size
        assert np.all([s % p == 0 for s, p in zip(img_size, patch_size)]), 'image size must be divisible by patch size'
        
        self.grid_size = [s // p for s, p in zip(img_size, patch_size)]
        self.num_patches = int(np.prod(self.grid_size))
        self.patch_size = patch_size
        
        self.embed_dim = embed_dim
        self.reshape = P.Reshape()
        self.transpose = P.Transpose()
        self.patch_dim = in_chans * int(np.prod(self.patch_size))
        self.proj = nn.Dense(self.patch_dim, embed_dim, weight_init='xavier_uniform')

    def construct(self, x):
        b, c, h, w, d = x.shape
        x = self.reshape(x, (
            b, c, 
            self.grid_size[0], self.patch_size[0],
            self.grid_size[1], self.patch_size[1],
            self.grid_size[2], self.patch_size[2]))
        x = self.transpose(x, (0, 2, 4, 6, 1, 3, 5, 7))
        x = self.reshape(x, (b * self.num_patches, self.patch_dim))
        x = self.proj(x)
        x = self.reshape(x, (b, self.num_patches, self.embed_dim))
        return x


class PosEmbed3D(nn.Cell):
    ''' Separate Positional Embedding, follow MAE_ST'''
    def __init__(self, embed_dim, grid_size, sincos=False, initializer_range=0.02):
        super().__init__()
        self.embed_dim = embed_dim
        self.grid_size = grid_size
        self.spatial_size = grid_size[1] * grid_size[2]
        self.depth_size = grid_size[0]
        self.reshape = P.Reshape()

        self.pos_embed_spatial = Parameter(
            weight_init.initializer(weight_init.TruncatedNormal(sigma=initializer_range),(1, grid_size[1] * grid_size[2], embed_dim)), name='pos_embed_spatial', requires_grad=(not sincos))
        self.pos_embed_temporal = Parameter(
            weight_init.initializer(weight_init.TruncatedNormal(sigma=initializer_range),(1, grid_size[0], embed_dim)), name='pos_embed_temporal', requires_grad=(not sincos))
        
        if sincos:
            self.pos_embed_cls = Parameter(
                weight_init.initializer(weight_init.Zero(),(1, 1, embed_dim)), name='pos_embed_cls', requires_grad=False)
            pos_embed = ms.Tensor(
                get_2d_sincos_pos_embed(embed_dim, grid_size[1:], cls_token=False),
                mstype.float32
            )
            pos_embed = P.ExpandDims()(pos_embed, 0)
            self.pos_embed_spatial.set_data(pos_embed)
            
            pos_embed = ms.Tensor(
                get_1d_sincos_pos_embed_from_grid(embed_dim, grid_size[0]),
                mstype.float32
            )
            pos_embed = P.ExpandDims()(pos_embed, 0)
            self.pos_embed_temporal.set_data(pos_embed)
        else:
            self.pos_embed_cls = Parameter(
                weight_init.initializer(weight_init.TruncatedNormal(sigma=initializer_range),(1, 1, embed_dim)), name='pos_embed_cls', requires_grad=True)
            
        self.tile = P.Tile()
        self.cat = P.Concat(axis=1)

    def get_pos_embed_cls(self):
        return self.pos_embed_cls
    
    def get_pos_embed_patch(self):
        return self.tile(self.pos_embed_spatial, (1, self.depth_size, 1)) + self.pos_embed_temporal.repeat(self.spatial_size, axis=1)
    
    def construct(self):
        pos_embed_patch = self.get_pos_embed_patch()
        pos_embed_cls = self.get_pos_embed_cls()
        pos_embed = self.cat((pos_embed_cls, pos_embed_patch))
        return pos_embed





class Patchify3D(nn.Cell):
    """Patchify"""

    def __init__(self, patch_size, parallel_config=None):
        super(Patchify3D, self).__init__()
        if parallel_config:
            dp = parallel_config.data_parallel
        else:
            dp = 1
        self.patch_size = patch_size
        self.reshape = P.Reshape()
        self.transpose = P.Transpose().shard(((dp, 1, 1, 1, 1, 1, 1, 1),))

    def construct(self, img):
        p = self.patch_size
        bs, channels, h, w, d = img.shape
        x = self.reshape(img, (bs, channels, h // p[0], p[0], w // p[1], p[1], d // p[2], p[2]))
        x = self.transpose(x, (0, 2, 4, 6, 1, 3, 5, 7))
        patches = self.reshape(x, (bs, -1, channels * p[0] * p[1] * p[2]))
        return patches


class UnPatchify3D(nn.Cell):
    """UnPatchify"""

    def __init__(self, patch_size, grid_size, parallel_config=None):
        super(UnPatchify3D, self).__init__()
        if parallel_config:
            dp = parallel_config.data_parallel
        else:
            dp = 1
        self.patch_size = tuple(patch_size)
        self.grid_size = grid_size

        self.reshape = P.Reshape()
        self.transpose = P.Transpose().shard(((dp, 1, 1, 1, 1, 1, 1, 1),))

    def construct(self, x):
        bs = x.shape[0]
        h, w, d = self.grid_size
        p = self.patch_size
        x = self.reshape(x, (bs, h, w, d, p[0], p[1], p[2], -1))
        x = self.transpose(x, (0, 7, 1, 4, 2, 5, 3, 6))
        images = self.reshape(x, (bs, -1, h*p[0], w*p[1], d*p[2]))
        return images