Spaces:
Sleeping
Sleeping
File size: 5,096 Bytes
907b7f3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 |
import math
import torch.nn as nn
import pdb # 파이썬 디버거
# Conv2D (3,3)
def conv3x3(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
# Conv2D (1,1) + BatchNorm2D
def downsample_basic_block( inplanes, outplanes, stride ):
return nn.Sequential(
nn.Conv2d(inplanes, outplanes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(outplanes),
)
# AvgPool2D + Conv2D (1,1) + BatchNorm2D
def downsample_basic_block_v2( inplanes, outplanes, stride ):
return nn.Sequential(
nn.AvgPool2d(kernel_size=stride, stride=stride, ceil_mode=True, count_include_pad=False),
nn.Conv2d(inplanes, outplanes, kernel_size=1, stride=1, bias=False),
nn.BatchNorm2d(outplanes),
)
# 기본 블럭 2D
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None, relu_type = 'relu' ):
super(BasicBlock, self).__init__()
# relu_type 변수 값이 'relu','prelu' 인지 확인, 아니면 AssertionError 메시지를 띄움
assert relu_type in ['relu','prelu'] # 원하는 조건의 변수값을 보증하기 위해 사용
self.conv1 = conv3x3(inplanes, planes, stride) # Conv2D (3,3)
self.bn1 = nn.BatchNorm2d(planes) # BatchNorm2D
# type of ReLU is an input option
if relu_type == 'relu': # ReLU
self.relu1 = nn.ReLU(inplace=True)
self.relu2 = nn.ReLU(inplace=True)
elif relu_type == 'prelu': # PReLU
self.relu1 = nn.PReLU(num_parameters=planes)
self.relu2 = nn.PReLU(num_parameters=planes)
else:
raise Exception('relu type not implemented') # 에러 발생시키기
# --------
self.conv2 = conv3x3(planes, planes) # Conv2D (3,3)
self.bn2 = nn.BatchNorm2d(planes) # BatchNorm2D
self.downsample = downsample
self.stride = stride
# 모델이 학습데이터를 입력받아서 forward propagation 진행
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu1(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu2(out)
return out
# 레즈넷 2D
class ResNet(nn.Module):
def __init__(self, block, layers, num_classes=1000, relu_type = 'relu', gamma_zero = False, avg_pool_downsample = False):
self.inplanes = 64
self.relu_type = relu_type
self.gamma_zero = gamma_zero
self.downsample_block = downsample_basic_block_v2 if avg_pool_downsample else downsample_basic_block # AvgPool2D 적용하면 v2 아니면 v1
super(ResNet, self).__init__()
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = nn.AdaptiveAvgPool2d(1)
# default init
for m in self.modules():
if isinstance(m, nn.Conv2d): # Conv2D 인스턴스인가
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d): # BatchNrom2D 인스턴스인가
m.weight.data.fill_(1)
m.bias.data.zero_()
#nn.init.ones_(m.weight)
#nn.init.zeros_(m.bias)
if self.gamma_zero:
for m in self.modules():
if isinstance(m, BasicBlock ): # 기본 블럭 인스턴스인가
m.bn2.weight.data.zero_()
# 레이어 생성
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = self.downsample_block( inplanes = self.inplanes,
outplanes = planes * block.expansion,
stride = stride ) # (AvgPool2D) + Conv2D (1,1) + BatchNorm2D
layers = []
layers.append(block(self.inplanes, planes, stride, downsample, relu_type = self.relu_type))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, relu_type = self.relu_type))
return nn.Sequential(*layers) # 설정한 레이어 반환
# 모델이 학습데이터를 입력받아서 forward propagation 진행
def forward(self, x):
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
return x
|