import torch
import torch.nn as nn
import torch.nn.functional as F

from .bconv import Bconv
from .cfm import CFM
from .fam import FAM
from .pdc import PDC
from .resnet import ResNet18,init_weight
from .rf import RF
from .sa import SA
from .sca import SpatialAttention, ChannelwiseAttention

class MyNet_fam(nn.Module):
    def __init__(self):
        super(MyNet_fam, self).__init__()
        self.resnet = ResNet18()
        init_weight(self.resnet)
        self.downsample2 = nn.MaxPool2d(2, stride=2)
        self.upsameple2 = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True)

        self.conv1 = FAM(512,256)
        self.conv2 = FAM(256,128,True)
        self.conv3 = FAM(128,64,True)
        self.conv4 = FAM(64,64,True)
        self.conv5 = FAM(64,1,True)

    def forward(self,x):
        resnet_dict = self.resnet(x)
        x0 = resnet_dict['X0']      # 64 88 88
        x1 = resnet_dict['X1']      # 64 88 88
        x2 = resnet_dict['X2']      # 128 44 44
        x3 = resnet_dict['X3']      # 256 22 22
        x4 = resnet_dict['X4']      # 512 11 11

        out4 = self.conv1(x4)

        # print(out4.shape)

        out3 = self.conv2(out4,x3)

        # print(out3.shape)

        out2 = self.conv3(out3,x2)

        # print(out2.shape)

        out1 = self.conv4(out2,x1)

        # print(out1.shape)

        out = self.conv5(out1,x0)

        # print(out.shape)
        out = self.upsameple2(self.upsameple2(out))

        return out, out1, out2, out3, out4