import torch
import torch.nn as nn
from modules.nonLocal_module import NonLocal_Module
from modules.temporal_se import TemporalAttention

class SpatioTempAttentionLayer(nn.Module):
    def __init__(self, seqLen, channel,
                 temporal_reduction=4,
                 spatio_reduction=4,
                 use_temp=True,
                 use_spatio=True):
        super(SpatioTempAttentionLayer, self).__init__()

        self.seqLen = seqLen
        self.use_temp = use_temp
        self.use_spatio = use_spatio
        if use_temp:
            self.temporal_attention = TemporalAttention(seqLen, channel, temporal_reduction)
        if use_spatio:
            self.spatial_attention = NonLocal_Module(seqLen, channel, spatio_reduction)

    def forward(self, x):
        b, c, d, _, _ = x.size()
        assert self.seqLen == d

        feat1 = 0
        feat2 = 0

        if self.use_temp:
            feat1 = self.temporal_attention(x)
        if self.use_spatio:
            feat2 = self.spatial_attention(x)

        output = feat1 + feat2

        return output
