Bingsu commited on
Commit
d2b256b
1 Parent(s): fc96f45

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -8,9 +8,9 @@
8
  "AutoConfig": "modeling_clip_masked_lm.CLIPTextConfig",
9
  "AutoModelForMaskedLM": "modeling_clip_masked_lm.CLIPTextModelForMaskedLM"
10
  },
11
- "bos_token_id": 0,
12
  "dropout": 0.0,
13
- "eos_token_id": 2,
14
  "hidden_act": "quick_gelu",
15
  "hidden_size": 768,
16
  "initializer_factor": 1.0,
@@ -21,7 +21,7 @@
21
  "model_type": "clip_text_model",
22
  "num_attention_heads": 12,
23
  "num_hidden_layers": 12,
24
- "pad_token_id": 1,
25
  "projection_dim": 768,
26
  "torch_dtype": "float32",
27
  "transformers_version": "4.24.0",
 
8
  "AutoConfig": "modeling_clip_masked_lm.CLIPTextConfig",
9
  "AutoModelForMaskedLM": "modeling_clip_masked_lm.CLIPTextModelForMaskedLM"
10
  },
11
+ "bos_token_id": 49406,
12
  "dropout": 0.0,
13
+ "eos_token_id": 49407,
14
  "hidden_act": "quick_gelu",
15
  "hidden_size": 768,
16
  "initializer_factor": 1.0,
 
21
  "model_type": "clip_text_model",
22
  "num_attention_heads": 12,
23
  "num_hidden_layers": 12,
24
+ "pad_token_id": 49407,
25
  "projection_dim": 768,
26
  "torch_dtype": "float32",
27
  "transformers_version": "4.24.0",