MykolaL commited on
Commit
b0bac6d
1 Parent(s): 093516f

Upload EVPRefer_warp

Browse files
Files changed (3) hide show
  1. config.json +2 -2
  2. model.py +13 -5
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
  "architectures": [
3
- "EVPRefer"
4
  ],
5
  "auto_map": {
6
  "AutoConfig": "evpconfig.EVPConfig",
7
- "AutoModel": "model.EVPRefer"
8
  },
9
  "model_type": "EVP",
10
  "torch_dtype": "float32",
 
1
  {
2
  "architectures": [
3
+ "EVPRefer_warp"
4
  ],
5
  "auto_map": {
6
  "AutoConfig": "evpconfig.EVPConfig",
7
+ "AutoModel": "model.EVPRefer_warp"
8
  },
9
  "model_type": "EVP",
10
  "torch_dtype": "float32",
model.py CHANGED
@@ -243,23 +243,31 @@ class InverseMultiAttentiveFeatureRefinement(nn.Module):
243
  '''
244
  return [x_c4, x_c3, x_c2, x_c1]
245
 
 
 
 
 
 
 
 
 
 
246
 
247
-
248
- class EVPRefer(PreTrainedModel):
249
  """Encoder Decoder segmentors.
250
 
251
  EncoderDecoder typically consists of backbone, decode_head, auxiliary_head.
252
  Note that auxiliary_head is only used for deep supervision during training,
253
  which could be dumped during inference.
254
  """
255
- config_class = EVPConfig
256
- def __init__(self, config,
257
  sd_path=None,
258
  base_size=512,
259
  token_embed_dim=768,
260
  neck_dim=[320,680,1320,1280],
261
  **args):
262
- super().__init__(config)
263
  config = OmegaConf.load('./v1-inference.yaml')
264
  if os.path.exists(f'{sd_path}'):
265
  config.model.params.ckpt_path = f'{sd_path}'
 
243
  '''
244
  return [x_c4, x_c3, x_c2, x_c1]
245
 
246
+
247
+ class EVPRefer_warp(PreTrainedModel):
248
+ config_class = EVPConfig
249
+ def __init__(self, config):
250
+ super().__init__(config)
251
+ self.model = EVPRefer()
252
+ def forward(self, img, sentences):
253
+ return self.model(img, sentences)
254
+
255
 
256
+ class EVPRefer(nn.Module):
 
257
  """Encoder Decoder segmentors.
258
 
259
  EncoderDecoder typically consists of backbone, decode_head, auxiliary_head.
260
  Note that auxiliary_head is only used for deep supervision during training,
261
  which could be dumped during inference.
262
  """
263
+ #config_class = EVPConfig
264
+ def __init__(self, #config,
265
  sd_path=None,
266
  base_size=512,
267
  token_embed_dim=768,
268
  neck_dim=[320,680,1320,1280],
269
  **args):
270
+ super().__init__()
271
  config = OmegaConf.load('./v1-inference.yaml')
272
  if os.path.exists(f'{sd_path}'):
273
  config.model.params.ckpt_path = f'{sd_path}'
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:657dc9c51ec9bbcb561120186fe0f02e9381fc789cf9bb70f5006bceba8b1377
3
- size 4317946624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8cb0158d61ae68cf82b2a478c35521123c8f7e89af87888349c97363392824a0
3
+ size 4317953152