import logging
import mindspore as ms
from mindspore import nn
#import torch.nn as nn
from PositionAttention import *
from ms_Retsranformer import ResTranformer
from ms_resnet import resnet45
from ms_model import Model,_default_tfmer_cfg
import numpy as np
from ms_utils import CharsetMapper
#from modules.attention import *
#from modules.backbone import ResTranformer
#from modules.model import Model
#from modules.resnet import resnet45
class BCNLanguage(Model):
    def __init__(self, config):
        super().__init__(config)
        d_model =  _default_tfmer_cfg['d_model']
        nhead =  _default_tfmer_cfg['nhead']
        d_inner =  _default_tfmer_cfg['d_inner']
        dropout =  _default_tfmer_cfg['dropout']
        activation =  _default_tfmer_cfg['activation']
        num_layers = 4
        self.d_model = d_model
        self.detach = config.model_language_detach
        self.use_self_attn = config.model_language_use_self_attn
        self.loss_weight = config.model_language_loss_weight
        self.max_length = config.dataset_max_length + 1  # additional stop token
        self.debug =  False
        self.encoder_mask = ms.Tensor(np.ones((4, 256, 256)), ms.float16)
        self.transformer = nn.TransformerEncoder(batch_size=4,num_layers=num_layers,hidden_size=self.d_model, num_heads=nhead, 
                ffn_hidden_size=d_inner, hidden_dropout_rate=dropout,attention_dropout_rate=dropout, hidden_act=activation,seq_length=256)
        self.proj = nn.Dense(self.charset.num_classes, d_model, weight_init ='uniform',bias_init='uniform',has_bias =False)
        self.token_encoder = PositionalEncoding(d_model, max_len=self.max_length)
        self.pos_encoder = PositionalEncoding(d_model, dropout=1.0, max_len=self.max_length)
        #decoder_layer = TransformerDecoderLayer(d_model, nhead, d_inner, dropout, 
                #activation, self_attn=self.use_self_attn, debug=self.debug)
        #self.model = TransformerDecoder(decoder_layer, num_layers)
        self.model = nn.TransformerDecoder(batch_size=4,num_layers=num_layers,hidden_size=self.d_model,num_heads=nhead,ffn_hidden_size=d_inner,
                        hidden_dropout_rate=dropout,attention_dropout_rate=dropout,hidden_act=activation,src_seq_length=26,tgt_seq_length=26)
        self.cls = nn.Dense(self.d_model, self.charset.num_classes, weight_init ='uniform',bias_init='uniform')

        #if config.model_language_checkpoint is not None:
            #logging.info(f'Read language model from {config.model_language_checkpoint}.')
            #self.load(config.model_language_checkpoint)

    def construct(self, tokens):
        """
        Args:
            tokens: (N, T, C) where T is length, N is batch size and C is classes number
            lengths: (N,)
        """
        #if self.detach: tokens = tokens.detach()
        embed = self.proj(tokens)  # (N, T, E)
        embed = embed.transpose(1,0,2)
        #embed = embed.permute(1, 0, 2)  # (T, N, E)
        embed = self.token_encoder(embed)  # (T, N, E)
        embed = embed.transpose(1,0,2)
        #padding_mask = self._get_padding_mask(lengths, self.max_length)#需要1，26，26。现在是1，26
        padding_mask = ms.Tensor(ms.numpy.ones((4, 26, 26)), ms.dtype.float32)
        zeroo = ms.ops.Zeros()
        zeros = zeroo((4,26,512),ms.float16)
        #zeros = embed.new_zeros(*embed.shape)#需要是1 26 512，现在是 26 1 512
        zeros = zeros.transpose(1,0,2)
        qeury = self.pos_encoder(zeros) #需要是1 26 512，现在是26 1 512
        qeury = qeury.transpose(1,0,2)
        location_mask = ms.Tensor(ms.numpy.ones((4, 26, 26)), ms.dtype.float32)
        #location_mask = self._get_location_mask(self.max_length, tokens.device)#需要是1，26，26。现在是26，26
        
        output, past = self.model(hidden_states=qeury, encoder_output=embed,
                attention_mask = padding_mask,
                memory_mask=location_mask)
                #memory_key_padding_mask=padding_mask  # (T, N, E)
        #output = output.permute(1, 0, 2)  # (N, T, E)
        # print(output.shape)
        logits = self.cls(output)  # (N, T, C)
        pt_lengths = self._get_length(logits)
        res =  {'feature': output, 'logits': logits, 'pt_lengths': pt_lengths,
                 'loss_weight':self.loss_weight, 'name': 'language'}
        
        #print(output.shape)
        return res
