KyanChen commited on
Commit
6fb655b
1 Parent(s): c203731

Update mmseg/ttp/models.py

Browse files
Files changed (1) hide show
  1. mmseg/ttp/models.py +9 -2
mmseg/ttp/models.py CHANGED
@@ -52,6 +52,13 @@ class MMPretrainSamVisionEncoder(BaseModule):
52
  # freeze the vision encoder
53
  for param in self.vision_encoder.parameters():
54
  param.requires_grad = False
 
 
 
 
 
 
 
55
 
56
  def forward(self, x):
57
  return self.vision_encoder(x)
@@ -265,7 +272,7 @@ class TimeFusionTransformerEncoderLayer(BaseModule):
265
  dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate),
266
  act_cfg=act_cfg)
267
 
268
- if self.window_size > 0:
269
  in_channels = embed_dims * 2
270
  self.down_channel = nn.Conv2d(in_channels, 1, kernel_size=1, stride=1, bias=False)
271
  self.down_channel.weight.data.fill_(1.0/in_channels)
@@ -300,7 +307,7 @@ class TimeFusionTransformerEncoderLayer(BaseModule):
300
 
301
  x = self.ffn(self.ln2(x), identity=x)
302
  # # time phase fusion
303
- if self.window_size > 0:
304
  x = einops.rearrange(x, 'b h w d -> b d h w') # 2B, C, H, W
305
  x0 = x[:x.size(0)//2]
306
  x1 = x[x.size(0)//2:] # B, C, H, W
 
52
  # freeze the vision encoder
53
  for param in self.vision_encoder.parameters():
54
  param.requires_grad = False
55
+ for name, param in self.vision_encoder.named_parameters():
56
+ if 'down_channel' in name:
57
+ param.requires_grad = True
58
+ if 'soft_ffn' in name:
59
+ param.requires_grad = True
60
+ if is_main_process() and peft_cfg is not None:
61
+ self.vision_encoder.print_trainable_parameters()
62
 
63
  def forward(self, x):
64
  return self.vision_encoder(x)
 
272
  dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate),
273
  act_cfg=act_cfg)
274
 
275
+ if self.window_size == 0:
276
  in_channels = embed_dims * 2
277
  self.down_channel = nn.Conv2d(in_channels, 1, kernel_size=1, stride=1, bias=False)
278
  self.down_channel.weight.data.fill_(1.0/in_channels)
 
307
 
308
  x = self.ffn(self.ln2(x), identity=x)
309
  # # time phase fusion
310
+ if self.window_size == 0:
311
  x = einops.rearrange(x, 'b h w d -> b d h w') # 2B, C, H, W
312
  x0 = x[:x.size(0)//2]
313
  x1 = x[x.size(0)//2:] # B, C, H, W