ibaiGorordo commited on
Commit
915d009
1 Parent(s): 498007e

Update model.py

Browse files
Files changed (1) hide show
  1. model.py +21 -21
model.py CHANGED
@@ -130,9 +130,9 @@ class myConv(nn.Module):
130
 
131
  # Deep Feature Fxtractor
132
  class deepFeatureExtractor_ResNext101(nn.Module):
133
- def __init__(self, args, lv6=False):
134
  super(deepFeatureExtractor_ResNext101, self).__init__()
135
- self.args = args
136
  # after passing ReLU : H/2 x W/2
137
  # after passing Layer1 : H/4 x W/4
138
  # after passing Layer2 : H/8 x W/8
@@ -317,25 +317,25 @@ class Dilated_bottleNeck_lv6(nn.Module):
317
 
318
  # Laplacian Decoder Network
319
  class Lap_decoder_lv5(nn.Module):
320
- def __init__(self, args, dimList):
321
  super(Lap_decoder_lv5, self).__init__()
322
- norm = args.norm
323
  conv = conv_ws
324
  if norm == 'GN':
325
- if args.rank == 0:
326
  print("==> Norm: GN")
327
  else:
328
- if args.rank == 0:
329
  print("==> Norm: BN")
330
 
331
- if args.act == 'ELU':
332
  act = 'ELU'
333
- elif args.act == 'Mish':
334
  act = 'Mish'
335
  else:
336
  act = 'ReLU'
337
  kSize = 3
338
- self.max_depth = args.max_depth
339
  self.ASPP = Dilated_bottleNeck(norm, act, dimList[3])
340
  self.dimList = dimList
341
  ############################################ Pyramid Level 5 ###################################################
@@ -474,25 +474,25 @@ class Lap_decoder_lv5(nn.Module):
474
 
475
 
476
  class Lap_decoder_lv6(nn.Module):
477
- def __init__(self, args, dimList):
478
  super(Lap_decoder_lv6, self).__init__()
479
- norm = args.norm
480
  conv = conv_ws
481
  if norm == 'GN':
482
- if args.rank == 0:
483
  print("==> Norm: GN")
484
  else:
485
- if args.rank == 0:
486
  print("==> Norm: BN")
487
 
488
- if args.act == 'ELU':
489
  act = 'ELU'
490
- elif args.act == 'Mish':
491
  act = 'Mish'
492
  else:
493
  act = 'ReLU'
494
  kSize = 3
495
- self.max_depth = args.max_depth
496
  self.ASPP = Dilated_bottleNeck_lv6(norm, act, dimList[4])
497
  dimList[4] = dimList[4] // 2
498
  self.dimList = dimList
@@ -659,15 +659,15 @@ class Lap_decoder_lv6(nn.Module):
659
 
660
  # Laplacian Depth Residual Network
661
  class LDRN(nn.Module):
662
- def __init__(self, args):
663
  super(LDRN, self).__init__()
664
- lv6 = args.lv6
665
- self.encoder = deepFeatureExtractor_ResNext101(args, lv6)
666
 
667
  if lv6 is True:
668
- self.decoder = Lap_decoder_lv6(args, self.encoder.dimList)
669
  else:
670
- self.decoder = Lap_decoder_lv5(args, self.encoder.dimList)
671
 
672
  def forward(self, x):
673
  out_featList = self.encoder(x)
 
130
 
131
  # Deep Feature Fxtractor
132
  class deepFeatureExtractor_ResNext101(nn.Module):
133
+ def __init__(self, lv6=False):
134
  super(deepFeatureExtractor_ResNext101, self).__init__()
135
+
136
  # after passing ReLU : H/2 x W/2
137
  # after passing Layer1 : H/4 x W/4
138
  # after passing Layer2 : H/8 x W/8
 
317
 
318
  # Laplacian Decoder Network
319
  class Lap_decoder_lv5(nn.Module):
320
+ def __init__(self, dimList, norm="BN", rank=0, act='ReLU', max_depth=80):
321
  super(Lap_decoder_lv5, self).__init__()
322
+
323
  conv = conv_ws
324
  if norm == 'GN':
325
+ if rank == 0:
326
  print("==> Norm: GN")
327
  else:
328
+ if rank == 0:
329
  print("==> Norm: BN")
330
 
331
+ if act == 'ELU':
332
  act = 'ELU'
333
+ elif act == 'Mish':
334
  act = 'Mish'
335
  else:
336
  act = 'ReLU'
337
  kSize = 3
338
+ self.max_depth = max_depth
339
  self.ASPP = Dilated_bottleNeck(norm, act, dimList[3])
340
  self.dimList = dimList
341
  ############################################ Pyramid Level 5 ###################################################
 
474
 
475
 
476
  class Lap_decoder_lv6(nn.Module):
477
+ def __init__(self, dimList, norm="BN", rank=0, act='ReLU', max_depth=80):
478
  super(Lap_decoder_lv6, self).__init__()
479
+ norm = norm
480
  conv = conv_ws
481
  if norm == 'GN':
482
+ if rank == 0:
483
  print("==> Norm: GN")
484
  else:
485
+ if rank == 0:
486
  print("==> Norm: BN")
487
 
488
+ if act == 'ELU':
489
  act = 'ELU'
490
+ elif act == 'Mish':
491
  act = 'Mish'
492
  else:
493
  act = 'ReLU'
494
  kSize = 3
495
+ self.max_depth = max_depth
496
  self.ASPP = Dilated_bottleNeck_lv6(norm, act, dimList[4])
497
  dimList[4] = dimList[4] // 2
498
  self.dimList = dimList
 
659
 
660
  # Laplacian Depth Residual Network
661
  class LDRN(nn.Module):
662
+ def __init__(self, lv6=False, norm="BN", rank=0, act='ReLU', max_depth=80):
663
  super(LDRN, self).__init__()
664
+
665
+ self.encoder = deepFeatureExtractor_ResNext101(lv6)
666
 
667
  if lv6 is True:
668
+ self.decoder = Lap_decoder_lv6(self.encoder.dimList, norm, rank, act, max_depth)
669
  else:
670
+ self.decoder = Lap_decoder_lv5(self.encoder.dimList, norm, rank, act, max_depth)
671
 
672
  def forward(self, x):
673
  out_featList = self.encoder(x)