ariG23498 HF staff commited on
Commit
ed74ac9
β€’
1 Parent(s): 35301df
app.py CHANGED
@@ -28,7 +28,7 @@ check_path = 'clip-dinoiser/checkpoints/last.pt'
28
  device = "cuda" if torch.cuda.is_available() else "cpu"
29
 
30
  check = torch.load(check_path, map_location=device)
31
- dinoclip_cfg = "clip_dinoiser.yaml"
32
  cfg = compose(config_name=dinoclip_cfg)
33
 
34
  model = build_model(cfg.model, class_names=PascalVOCDataset.CLASSES).to(device)
 
28
  device = "cuda" if torch.cuda.is_available() else "cpu"
29
 
30
  check = torch.load(check_path, map_location=device)
31
+ dinoclip_cfg = "configs/clip_dinoiser.yaml"
32
  cfg = compose(config_name=dinoclip_cfg)
33
 
34
  model = build_model(cfg.model, class_names=PascalVOCDataset.CLASSES).to(device)
clip_dinoiser.yaml β†’ configs/clip_dinoiser.yaml RENAMED
File without changes
configs/default.yml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+
2
+ model_name: "default" # display name in the logger
3
+ tag: default
4
+ print_freq: 20
5
+ seed: 0
configs/maskclip.yaml ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ _base_: "default.yml"
2
+
3
+ defaults:
4
+ - _self_
5
+
6
+ model:
7
+ type: MaskClip
8
+ clip_model: CLIP-ViT-B-16-laion2B-s34B-b88K
9
+ backbone:
10
+ img_size: 448
11
+ patch_size: 16
12
+ patch_bias: False
13
+ in_channels: 3
14
+ embed_dims: 768
15
+ num_layers: 12
16
+ num_heads: 12
17
+ mlp_ratio: 4
18
+ out_indices: -1
19
+ qkv_bias: True
20
+ drop_rate: 0.0
21
+ attn_drop_rate: 0.0
22
+ drop_path_rate: 0.0
23
+ with_cls_token: True
24
+ output_cls_token: False
25
+ norm_cfg:
26
+ type: 'LN'
27
+ eps: 1e-6
28
+ act_cfg:
29
+ type: 'GELU'
30
+ patch_norm: False
31
+ pre_norm: True
32
+ final_norm: True
33
+ return_qkv: True
34
+ interpolate_mode: 'bicubic'
35
+ num_fcs: 2
36
+ norm_eval: False
37
+ pretrained: 'checkpoints/ViT-16-laion_clip_backbone.pth'
38
+
39
+ decode_head:
40
+ type: MaskClipHead
41
+ in_channels: 768
42
+ channels: 0
43
+ text_channels: 512
44
+ in_index: -1
45
+ norm_cfg:
46
+ type: 'SyncBN'
47
+ requires_grad: False
48
+ align_corners: False
49
+ visual_projs_path: 'checkpoints/ViT-16-laion_clip_proj.pth'
50
+ model_prefix: 'hf-hub:laion'
51
+ use_templates: True