import logging
import mindspore as ms
from mindspore import nn
#import torch.nn as nn
from PositionAttention import *
from ms_Retsranformer import ResTranformer
from ms_resnet import resnet45
from ms_model import Model,_default_tfmer_cfg
#from modules.attention import *

class BaseAlignment(Model):
    def __init__(self, config):
        super().__init__(config)
        d_model =  _default_tfmer_cfg['d_model']

        self.loss_weight = config.model_alignment_loss_weight
        self.max_length = config.dataset_max_length + 1  # additional stop token
        self.w_att = nn.Dense(2 * d_model, d_model,weight_init ='uniform',bias_init='uniform')
        self.cls = nn.Dense(d_model, self.charset.num_classes,weight_init ='uniform',bias_init='uniform')

    def construct(self, l_feature, v_feature):
        """
        Args:
            l_feature: (N, T, E) where T is length, N is batch size and d is dim of model
            v_feature: (N, T, E) shape the same as l_feature 
            l_lengths: (N,)
            v_lengths: (N,)
        """
        concat = ms.ops.Concat(2)
        f = concat((l_feature, v_feature))
        sigmoid = ms.ops.Sigmoid()
        f_att = sigmoid(self.w_att(f))
        output = f_att * v_feature + (1 - f_att) * l_feature
        logits = self.cls(output)  # (N, T, C)
        pt_lengths = self._get_length(logits)
        #print("output_ms.shape:")
        #print(output.shape)
        return {'logits': logits, 'pt_lengths': pt_lengths, 'loss_weight':self.loss_weight,
                'name': 'alignment'}
