import numpy as np
import torch
import torch.nn as nn

class TimeAttention(nn.Module):
    def __init__(self):
        super(TimeAttention, self).__init__()
        self.sigmoid = nn.Sigmoid()

    def forward(self, x):
        # Calculate the mean along the last axis (length L)
        mean_x = x.mean(dim=2, keepdim=True)

        mean_x[mean_x == 0] = 1e-8  # Avoid division by zero

        # Normalize the input data
        normalized_x = x / mean_x

        # Calculate the time attention values using the sigmoid function
        A_t = self.sigmoid(normalized_x)  # Sigmoid function

        return A_t

# Example usage
N = 4  # Batch size
L = 5  # Length of time

# Create a random input signal T of shape (N, 1, L)
T = torch.randn(N, 1, L)

# Instantiate the TimeAttention class
attention = TimeAttention()

# Calculate attention values
A_t = attention(T)

# Output the results
print("Input T:\n", T)
print("Attention Values A_t:\n", A_t)
