Spaces:
Running
Running
File size: 5,384 Bytes
c35dad1 c59c074 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 |
from typing import Sequence, Union
import torch
import torch.nn as nn
from monai.networks.blocks.convolutions import Convolution, ResidualUnit
from monai.networks.layers.factories import Act, Norm
from monai.networks.layers.simplelayers import SkipConnection
from monai.utils import alias, export
class UNet_double(nn.Module):
def __init__(
self,
dimensions: int,
in_channels: int,
out_channels: int,
channels: Sequence[int],
strides: Sequence[int],
kernel_size: Union[Sequence[int], int] = 3,
up_kernel_size: Union[Sequence[int], int] = 3,
num_res_units: int = 0,
act=Act.PRELU,
norm=Norm.INSTANCE,
dropout=0.0,) -> None:
super().__init__()
self.dimensions = dimensions
self.in_channels = in_channels
self.out_channels = out_channels
self.channels = channels
self.strides = strides
self.kernel_size = kernel_size
self.up_kernel_size = up_kernel_size
self.num_res_units = num_res_units
self.act = act
self.norm = norm
self.dropout = dropout
def _create_block(
inc: int, outc: int, channels: Sequence[int], strides: Sequence[int], is_top: bool) -> nn.Sequential:
c = channels[0]
s = strides[0]
subblock: nn.Module
if len(channels) > 2:
subblock1, subblock2 = _create_block(c, c, channels[1:], strides[1:], False) # continue recursion down
upc = c * 2
else:
# the next layer is the bottom so stop recursion, create the bottom layer as the sublock for this layer
subblock = self._get_bottom_layer(c, channels[1])
upc = c + channels[1]
down = self._get_down_layer(inc, c, s, is_top) # create layer in downsampling path
up1 = self._get_up_layer(upc, outc, s, is_top) # create layer in upsampling path
up2 = self._get_up_layer(upc, outc, s, is_top)
return nn.Sequential(down, SkipConnection(subblock), up1), nn.Sequential(down, SkipConnection(subblock), up2)
down = self._get_down_layer(inc, c, s, is_top) # create layer in downsampling path
up1 = self._get_up_layer(upc, outc, s, is_top) # create layer in upsampling path
up2 = self._get_up_layer(upc, outc, s, is_top)
return nn.Sequential(down, SkipConnection(subblock1), up1), nn.Sequential(down, SkipConnection(subblock2), up2)
self.model1, self.model2 = _create_block(in_channels, out_channels, self.channels, self.strides, True)
self.activation = nn.Sigmoid()
def _get_down_layer(self, in_channels: int, out_channels: int, strides: int, is_top: bool) -> nn.Module:
"""
Args:
in_channels: number of input channels.
out_channels: number of output channels.
strides: convolution stride.
is_top: True if this is the top block.
"""
if self.num_res_units > 0:
return ResidualUnit(
self.dimensions,
in_channels,
out_channels,
strides=strides,
kernel_size=self.kernel_size,
subunits=self.num_res_units,
act=self.act,
norm=self.norm,
dropout=self.dropout,
)
return Convolution(
self.dimensions,
in_channels,
out_channels,
strides=strides,
kernel_size=self.kernel_size,
act=self.act,
norm=self.norm,
dropout=self.dropout,
)
def _get_bottom_layer(self, in_channels: int, out_channels: int) -> nn.Module:
"""
Args:
in_channels: number of input channels.
out_channels: number of output channels.
"""
return self._get_down_layer(in_channels, out_channels, 1, False)
def _get_up_layer(self, in_channels: int, out_channels: int, strides: int, is_top: bool) -> nn.Module:
"""
Args:
in_channels: number of input channels.
out_channels: number of output channels.
strides: convolution stride.
is_top: True if this is the top block.
"""
conv: Union[Convolution, nn.Sequential]
conv = Convolution(
self.dimensions,
in_channels,
out_channels,
strides=strides,
kernel_size=self.up_kernel_size,
act=self.act,
norm=self.norm,
dropout=self.dropout,
conv_only=is_top and self.num_res_units == 0,
is_transposed=True,
)
if self.num_res_units > 0:
ru = ResidualUnit(
self.dimensions,
out_channels,
out_channels,
strides=1,
kernel_size=self.kernel_size,
subunits=1,
act=self.act,
norm=self.norm,
dropout=self.dropout,
last_conv_only=is_top,
)
conv = nn.Sequential(conv, ru)
return conv
def forward(self, x, box = None) -> torch.Tensor:
return self.activation(self.model1(x))
|