import torch import torch.nn as nn import torch.nn.functional as F class Net(nn.Module): def __init__(self): super(Net, self).__init__() self.conv1 = nn.Conv2d(3, 32, 5) self.conv2 = nn.Conv2d(32, 64, 5) self.conv3 = nn.Conv2d(64, 128, 5) self.conv4 = nn.Conv2d(128, 256, 5) self.conv5 = nn.Conv2d(256, 512, 5) self.fc1 = None self.fc2 = nn.Linear(512, 128) self.fc3 = nn.Linear(128, 64) self.fc4 = nn.Linear(64, 2) def forward(self, x): x = x.float() """ x = F.relu(self.conv1(x)) x = F.relu(self.conv2(x)) x = F.max_pool2d(x, 2) x = F.relu(self.conv3(x)) x = F.relu(self.conv4(x)) x = F.max_pool2d(x, 2) x = F.relu(self.conv5(x)) x = F.max_pool2d(x, 2) """ x = F.max_pool2d(F.relu(self.conv1(x)), 2) x = F.max_pool2d(F.relu(self.conv2(x)), 2) x = F.max_pool2d(F.relu(self.conv3(x)), 2) x = F.max_pool2d(F.relu(self.conv4(x)), 2) x = F.max_pool2d(F.relu(self.conv5(x)), 2) #x = x.view(x.size(0), -1) x = torch.flatten(x, 1) if self.fc1 is None: self.fc1 = nn.Linear(x.shape[1], 512).to(x.device) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x