import torch
import torch.nn as nn
import numpy as np

class TimeAttention(nn.Module):
    def __init__(self):
        super(TimeAttention, self).__init__()
        self.sigmoid = nn.Sigmoid()

    def forward(self, x):
        # 平均值
        mean_x = x.mean(dim=2, keepdim=True)

        mean_x[mean_x == 0] = 1e-8  # 避免平均值是0

        normalized_x = x / mean_x

        # 计算At值
        A_t = self.sigmoid(normalized_x)

        return A_t

class BiLSTM(nn.Module):
    def __init__(self,L,hidden_size):
        super(BiLSTM, self).__init__()
        self.bilstm = nn.LSTM(L, hidden_size, bidirectional=True) #双向长短期记忆
        self.TimeAttention = TimeAttention()

    def forward(self, x):
        AT = self.TimeAttention(x)
        x_in = AT * x
        out, (h , c) = self.bilstm(x_in)
        return out


hidden_size = 128
# num_layers = 1

# 示例
N = 240
L = 100

x = torch.randn(N, 1, L)

bilstm = BiLSTM(L,hidden_size=128)

output = bilstm(x)
output_data = output.squeeze(1)
print(output_data.size()) # torch.Size([N, 256])
print(type(x))

