GenBaB / cifar /models /gelu.py
zhouxingshi's picture
Move files
c303ecb
import torch.nn as nn
import numpy as np
import torch
class GELUOp(torch.autograd.Function):
@staticmethod
def symbolic(g, x):
return g.op('custom::Gelu', x)
@staticmethod
def forward(ctx, x):
ctx.save_for_backward(x)
return torch.nn.functional.gelu(x)
@staticmethod
def backward(ctx, grad_output):
x, = ctx.saved_tensors
grad_input = grad_output.clone()
grad = 0.5 * (1 + torch.erf(x / np.sqrt(2))) + x * torch.exp(-0.5 * x ** 2) / np.sqrt(2 * torch.pi)
return grad_input * grad
class GELU(nn.Module):
def forward(self, x):
return GELUOp.apply(x)
def gelu_fc(in_ch=3, in_dim=32, width=100, depth=4, omega=0.3, num_classes=10):
layers = [nn.Flatten(), nn.Linear(in_ch*in_dim**2, width), GELU()]
for _ in range(depth - 1):
layers.extend([nn.Linear(width, width), GELU()])
layers.append(nn.Linear(width, num_classes))
return nn.Sequential(*layers)
def gelu_4fc_100(in_ch=3, in_dim=32):
return gelu_fc(in_ch, in_dim, width=100, depth=4)
def gelu_4fc_200(in_ch=3, in_dim=32):
return gelu_fc(in_ch, in_dim, width=200, depth=4)
def gelu_4fc_500(in_ch=3, in_dim=32):
return gelu_fc(in_ch, in_dim, width=500, depth=4)