camenduru commited on
Commit
923695d
·
verified ·
1 Parent(s): 1f38028

thanks to yisol ❤

Browse files
image_encoder/config.json CHANGED
@@ -1,23 +1,25 @@
1
  {
2
- "_name_or_path": "./image_encoder",
3
  "architectures": [
4
- "CLIPVisionModelWithProjection"
5
  ],
6
  "attention_dropout": 0.0,
 
7
  "dropout": 0.0,
 
8
  "hidden_act": "gelu",
9
  "hidden_size": 1280,
10
- "image_size": 224,
11
  "initializer_factor": 1.0,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 5120,
14
  "layer_norm_eps": 1e-05,
15
- "model_type": "clip_vision_model",
16
- "num_attention_heads": 16,
17
- "num_channels": 3,
18
  "num_hidden_layers": 32,
19
- "patch_size": 14,
20
- "projection_dim": 1024,
21
  "torch_dtype": "float16",
22
- "transformers_version": "4.28.0.dev0"
 
23
  }
 
1
  {
2
+ "_name_or_path": "camenduru/IDM-VTON-F16",
3
  "architectures": [
4
+ "CLIPTextModelWithProjection"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 0,
8
  "dropout": 0.0,
9
+ "eos_token_id": 2,
10
  "hidden_act": "gelu",
11
  "hidden_size": 1280,
 
12
  "initializer_factor": 1.0,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 5120,
15
  "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 77,
17
+ "model_type": "clip_text_model",
18
+ "num_attention_heads": 20,
19
  "num_hidden_layers": 32,
20
+ "pad_token_id": 1,
21
+ "projection_dim": 1280,
22
  "torch_dtype": "float16",
23
+ "transformers_version": "4.40.0",
24
+ "vocab_size": 49408
25
  }
image_encoder/model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6ca9667da1ca9e0b0f75e46bb030f7e011f44f86cbfb8d5a36590fcd7507b030
3
- size 2528373448
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec310df2af79c318e24d20511b601a591ca8cd4f1fce1d8dff822a356bcdb1f4
3
+ size 1389382176