import torch
import torch.nn as nn
from model.resnet101 import resnet101
import math
import numpy as np


def bilinear_kernel(in_channels, out_channels, kernel_size):
    factor = (kernel_size + 1) // 2

    center = kernel_size / 2
    og = np.ogrid[:kernel_size, :kernel_size]
    filt = (1 - abs(og[0] - center) / factor) * (1 - abs(og[1] - center) / factor)
    weight = np.zeros((in_channels, out_channels, kernel_size, kernel_size), dtype='float32')
    weight[range(in_channels), range(out_channels), :, :] = filt
    return torch.from_numpy(weight)


def Downblock(in_ch,out_ch):
    block = nn.Sequential(
        nn.Conv2d(in_ch,out_ch,kernel_size=3),
        nn.ReLU(inplace=True),
        nn.BatchNorm2d(out_ch),
        nn.Conv2d(out_ch,out_ch,kernel_size=3),
        nn.ReLU(inplace=True),
        nn.BatchNorm2d(out_ch)
    )
    return block

class UnetUpBlock(nn.Module):
    def __init__(self,in_ch,mid_ch,out_ch,up_mode,stride = 2):
        super(UnetUpBlock, self).__init__()
        if up_mode=="upconv":
            self.up = nn.ConvTranspose2d(in_ch,in_ch//2,kernel_size=2,stride=stride,)
        elif up_mode == "upsample":
            self.up = nn.Sequential(
                nn.Upsample(mode='bilinear',scale_factor=2),
                nn.Conv2d(in_ch,out_ch,kernel_size=1)
            )
        self.block = nn.Sequential(
            nn.Conv2d(in_ch,mid_ch,kernel_size=3,padding=1),
            nn.ReLU(inplace=True),
            nn.BatchNorm2d(mid_ch),
            nn.Conv2d(mid_ch,out_ch,kernel_size=3,padding=1),
            nn.ReLU(inplace=True),
            nn.BatchNorm2d(out_ch)
        )
    def forward(self,e,d):
        d = self.up(d)
        diffY = int(math.fabs(e.size()[2]-d.size()[2]))
        diffX = int(math.fabs(e.size()[3]-d.size()[3]))
        e = e[...,diffY//2:e.size()[2]-(diffY-diffY//2),diffX//2:e.size()[3]-(diffX-diffX//2)]

        cat = torch.cat([e,d],dim=1)
        out = self.block(cat)
        return out
def final_block(in_ch,class_num):
    block = nn.Sequential(
        nn.Conv2d(in_ch,class_num,kernel_size=1),
        nn.ReLU(inplace=True),
        nn.BatchNorm2d(class_num)
    )
    return block

class ResNet101_UNet(nn.Module):
    def __init__(self,class_num):
        super(ResNet101_UNet, self).__init__()
        self.encoder = resnet101(pretrained=True)

        self.bottleneck  = nn.Sequential(
            nn.Conv2d(2048,1024,kernel_size=3),
            nn.ReLU(inplace=True),
            nn.BatchNorm2d(1024),
            nn.Conv2d(1024,1024,kernel_size=3),
            nn.ReLU(inplace=True),
            nn.BatchNorm2d(1024)
        )
        self.deconder4 = UnetUpBlock(2048,1024,1024,'upconv')
        self.deconder3 = UnetUpBlock(1024, 512, 512, 'upconv')
        self.deconder2 = UnetUpBlock(512, 256, 256, 'upconv')
        self.deconder1 = UnetUpBlock(256, 128, 128, 'upconv')
        self.block = nn.Sequential(
            nn.Conv2d(64,128,kernel_size=3,padding=1),
            nn.ReLU(inplace=True),
            nn.BatchNorm2d(128)
        )
        self.up = nn.ConvTranspose2d(128, 64, kernel_size=2, stride=2 )
        self.final_layer = final_block(64,class_num)
        # for m in self.modules():
        #     if isinstance(m,nn.ConvTranspose2d):
        #         m.weight.data = bilinear_kernel(class_num,class_num,m.kernel_size[0])

    def forward(self,x):
        e0,e1,e2,e3,e4 = self.encoder(x)
        d3 = self.deconder4(e3,e4)
        d2 =self.deconder3(e2,d3)
        d1 = self.deconder2(e1,d2)

        e0 = self.block(e0)
        d0 = self.deconder1(e0,d1)

        up2time =self.up(d0)

        out = self.final_layer(up2time)
        return out
if __name__ == '__main__':
    image = torch.rand((1, 3, 352,480))
    unet = ResNet101_UNet(12)
    mask = unet(image)
    print(mask.size())
