from typing import Type

import torch
import torch.nn as nn
import torch.nn.functional as F
# from utils import display_np_arrays_as_images
import einops
from einops.layers.torch import Reduce, Rearrange
from collections import OrderedDict


class DummyLayer(nn.Module):
    def __init__(self, in_channels, out_channels, kernel_size=3, stride=1):
        super(DummyLayer, self).__init__()
        self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding='same')

    def forward(self, x):
        return F.relu(self.conv(x))


if __name__ == '__main__':
    simulate_shape = (32, 8, 224, 224)
    fake_batch = torch.randn(simulate_shape)
    layers = [
        DummyLayer(simulate_shape[1], simulate_shape[1])
    ]

    for layer in layers:
        attention = layer(fake_batch)
        print('Layer:', type(layer), '----output shape', attention.shape,
              '----multiply to input,and get shape:', torch.mul(fake_batch, attention).shape)
