import torch
import torch.nn.functional as F
import torch.nn as nn
from .bconv import Bconv

class RF(nn.Module):
    def __init__(self, in_channels, out_channels):
        super(RF, self).__init__()
        self.b1 = nn.Sequential(
            Bconv(in_channels, out_channels, 1),
            Bconv(out_channels, out_channels, (1,3), padding=(0,1)),
            Bconv(out_channels, out_channels, (3,1),  padding=(1,0)),
            Bconv(out_channels, out_channels, 3,  padding=3, dilation=3)
        )

        self.b2 = nn.Sequential(
            Bconv(in_channels, out_channels, 1),
            Bconv(out_channels, out_channels, (1,5), padding=(0,2)),
            Bconv(out_channels, out_channels, (5,1),  padding=(2,0)),
            Bconv(out_channels, out_channels, 3,  padding=5, dilation=5)
        )

        self.b3 = nn.Sequential(
            Bconv(in_channels, out_channels, 1),
            Bconv(out_channels, out_channels, (1,7), padding=(0,3)),
            Bconv(out_channels, out_channels, (7,1),  padding=(3,0)),
            Bconv(out_channels, out_channels, 3,  padding=7, dilation=7)
        )

        self.b4 = Bconv(in_channels, out_channels,1)

        self.b5 = Bconv(in_channels, out_channels, 1)

        self.b6 = Bconv(4*out_channels, out_channels, 3, padding=1)

        self.relu = nn.ReLU()

    def forward(self, x):
        x1 = self.b1(x)
        x2 = self.b2(x)
        x3 = self.b3(x)
        x4 = self.b4(x)
        x5 = self.b5(x)

        x_cat = torch.cat((x1, x2, x3, x4), dim=1)
        x6 = self.b6(x_cat)

        x = self.relu(x6 + x5)
        return x
    

if __name__ == '__main__':
    input = torch.randn(64, 3, 32, 32)
    model = RF(3, 64)
    output = model(input)
    print(output.size())