LSZTT commited on
Commit
49e032a
1 Parent(s): 9037f36

Update models/common.py

Browse files
Files changed (1) hide show
  1. models/common.py +8 -13
models/common.py CHANGED
@@ -35,15 +35,15 @@ from utils.plots import Annotator, colors, save_one_box
35
  from utils.torch_utils import copy_attr, smart_inference_mode
36
 
37
 
38
-
39
- def autopad(k, p, d):
 
 
40
  if p is None:
41
- if d != 1:
42
- p = d * (k - 1) // 2
43
- else:
44
- p = k // 2
45
  return p
46
 
 
47
  class Conv(nn.Module):
48
  # Standard convolution with args(ch_in, ch_out, kernel, stride, padding, groups, dilation, activation)
49
  default_act = nn.SiLU() # default activation
@@ -55,15 +55,10 @@ class Conv(nn.Module):
55
  self.act = self.default_act if act is True else act if isinstance(act, nn.Module) else nn.Identity()
56
 
57
  def forward(self, x):
58
- out = self.conv(x)
59
- out = self.bn(out)
60
- out = self.act(out)
61
- return out
62
 
63
  def forward_fuse(self, x):
64
- out = self.conv(x)
65
- out = self.act(out)
66
- return out
67
 
68
 
69
  class DWConv(Conv):
 
35
  from utils.torch_utils import copy_attr, smart_inference_mode
36
 
37
 
38
+ def autopad(k, p=None, d=1): # kernel, padding, dilation
39
+ # Pad to 'same' shape outputs
40
+ if d > 1:
41
+ k = d * (k - 1) + 1 if isinstance(k, int) else [d * (x - 1) + 1 for x in k] # actual kernel-size
42
  if p is None:
43
+ p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad
 
 
 
44
  return p
45
 
46
+
47
  class Conv(nn.Module):
48
  # Standard convolution with args(ch_in, ch_out, kernel, stride, padding, groups, dilation, activation)
49
  default_act = nn.SiLU() # default activation
 
55
  self.act = self.default_act if act is True else act if isinstance(act, nn.Module) else nn.Identity()
56
 
57
  def forward(self, x):
58
+ return self.act(self.bn(self.conv(x)))
 
 
 
59
 
60
  def forward_fuse(self, x):
61
+ return self.act(self.conv(x))
 
 
62
 
63
 
64
  class DWConv(Conv):