import torch
from torch import nn

from network.backbone.bottleneck_transformer_pytorch import Attention

img = torch.randn(2, 2, 8, 8)
rel_pos_emb = False
fmap_size = (8,8)
heads = 4
dim_head = 4
attn_dim_in = 2
modle = nn.Sequential(
    Attention(
    dim=attn_dim_in,
    fmap_size=fmap_size,
    heads=heads,
    dim_head=dim_head,
    rel_pos_emb=rel_pos_emb,
    )
)
print(img.shape)
#print(img)
preds = modle(img) # (2, 1000)
print(preds.shape)
#print(preds)
