|
""" |
|
CNN models for binary and multi-class classifications |
|
""" |
|
import torch |
|
from torch import nn |
|
|
|
|
|
class Convnet(nn.Module): |
|
""" |
|
Convolutional Neural Network for binary classification |
|
|
|
input args: n_classes (int) --> number of classes |
|
|
|
Input shape: [1, 60, 60] |
|
|
|
Matrix shape (Conv layer): |
|
|
|
Input shape: [N, C_in, H, W] |
|
- N: batch_size |
|
- C_in: number of input channels |
|
- H: height of input planes |
|
- W: width of input planes |
|
|
|
- Conv2d(1, 64, (5, 3), 1) --> [64, 56, 58] |
|
- MaxPool2d(kernel_size=(2, 1)) --> [64, 28, 58] |
|
- Conv2d(64, 128, (5, 3), 1) --> [128, 24, 56] |
|
- MaxPool2d(kernel_size=(2, 1)) --> [128, 12, 56] |
|
- Conv2d(128, 256, (5, 3), 1) --> [256, 8, 54] |
|
- MaxPool2d(kernel_size=(2, 1)) --> [256, 4, 54] |
|
|
|
Matrix shape (Fully connected layer): |
|
- Linear(256 * 4 * 54, 1024) --> [1024] |
|
- Linear(1024, 512) --> [512] |
|
- Linear(512, 128) --> [128] |
|
- Linear(128, 64) --> [64] |
|
- Linear(64, n_classes) --> [n_classes] |
|
|
|
Softmax() --> to probability |
|
""" |
|
def __init__(self, n_classes: int) -> None: |
|
super().__init__() |
|
self.cnn = nn.Sequential( |
|
nn.Conv2d(in_channels=1, out_channels=64, kernel_size=(5, 3), stride=1), |
|
nn.BatchNorm2d(64), |
|
nn.LeakyReLU(negative_slope=0.01), |
|
nn.MaxPool2d(kernel_size=(2, 1)), |
|
nn.Conv2d(64, 128, (5, 3), 1), |
|
nn.BatchNorm2d(128), |
|
nn.LeakyReLU(negative_slope=0.01), |
|
nn.MaxPool2d(kernel_size=(2, 1)), |
|
nn.Conv2d(128, 256, (5, 3), 1), |
|
nn.BatchNorm2d(256), |
|
nn.LeakyReLU(negative_slope=0.01), |
|
nn.MaxPool2d(kernel_size=(2, 1)), |
|
) |
|
self.dropout = nn.Sequential(nn.Dropout(0.5)) |
|
self.fc = nn.Sequential( |
|
nn.Linear(256 * 4 * 54, 1024), |
|
nn.Linear(1024, 512), |
|
nn.Linear(512, 128), |
|
nn.Linear(128, 64), |
|
nn.Linear(64, n_classes), |
|
nn.Softmax() |
|
) |
|
for layer in self.cnn: |
|
if isinstance(layer, nn.Conv2d): |
|
nn.init.xavier_normal_(layer.weight) |
|
nn.init.constant_(layer.bias, 0.0) |
|
|
|
|
|
def forward(self, x: torch.Tensor) -> torch.Tensor: |
|
""" |
|
forward prop |
|
""" |
|
x = self.cnn(x) |
|
x = self.dropout(x) |
|
x = x.view(x.size(0), -1) |
|
x = self.fc(x) |
|
return x |
|
|