diff --git a/op/fused_act.py b/op/fused_act.py index 5d46e10..bc522ed 100755 --- a/op/fused_act.py +++ b/op/fused_act.py @@ -1,5 +1,3 @@ -import os - import torch from torch import nn from torch.nn import functional as F @@ -7,16 +5,6 @@ from torch.autograd import Function from torch.utils.cpp_extension import load -module_path = os.path.dirname(__file__) -fused = load( - "fused", - sources=[ - os.path.join(module_path, "fused_bias_act.cpp"), - os.path.join(module_path, "fused_bias_act_kernel.cu"), - ], -) - - class FusedLeakyReLUFunctionBackward(Function): @staticmethod def forward(ctx, grad_output, out, bias, negative_slope, scale): diff --git a/op/upfirdn2d.py b/op/upfirdn2d.py index 67e0375..6c5840e 100755 --- a/op/upfirdn2d.py +++ b/op/upfirdn2d.py @@ -1,5 +1,4 @@ from collections import abc -import os import torch from torch.nn import functional as F @@ -7,16 +6,6 @@ from torch.autograd import Function from torch.utils.cpp_extension import load -module_path = os.path.dirname(__file__) -upfirdn2d_op = load( - "upfirdn2d", - sources=[ - os.path.join(module_path, "upfirdn2d.cpp"), - os.path.join(module_path, "upfirdn2d_kernel.cu"), - ], -) - - class UpFirDn2dBackward(Function): @staticmethod def forward(