Spaces:
Runtime error
Runtime error
from torch import nn | |
import pdb | |
import torch | |
class Generator(nn.Module): | |
def __init__(self): | |
super(Generator, self).__init__() | |
self.ngpu = 0 | |
nz = 100 | |
ngf = 64 | |
self.main = nn.Sequential( | |
# input is Z, going into a convolution | |
nn.ConvTranspose2d( nz, ngf * 8, 4, 1, 0, bias=False), | |
nn.BatchNorm2d(ngf * 8), | |
nn.ReLU(True), | |
# state size. (ngf*8) x 4 x 4 | |
nn.ConvTranspose2d(ngf * 8, ngf * 4, 4, 2, 1, bias=False), | |
nn.BatchNorm2d(ngf * 4), | |
nn.ReLU(True), | |
# state size. (ngf*4) x 8 x 8 | |
nn.ConvTranspose2d( ngf * 4, ngf * 2, 4, 2, 1, bias=False), | |
nn.BatchNorm2d(ngf * 2), | |
nn.ReLU(True), | |
# state size. (ngf*2) x 16 x 16 | |
nn.ConvTranspose2d( ngf * 2, ngf, 4, 2, 1, bias=False), | |
nn.BatchNorm2d(ngf), | |
nn.ReLU(True), | |
# state size. (ngf) x 32 x 32 | |
nn.ConvTranspose2d( ngf, 4, 4, 2, 1, bias=False), | |
nn.Tanh() | |
# state size. (nc) x 64 x 64 | |
) | |
def forward(self, input): | |
return self.main(input) | |