Lugaborg commited on
Commit
3915170
1 Parent(s): 162300e

Upload StableLmForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +3 -3
  2. generation_config.json +4 -4
  3. model.safetensors +1 -1
config.json CHANGED
@@ -4,8 +4,8 @@
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 100289,
8
- "eos_token_id": 100290,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
@@ -22,7 +22,7 @@
22
  "rope_theta": 10000,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
- "transformers_version": "4.38.0",
26
  "use_cache": false,
27
  "use_qkv_bias": true,
28
  "vocab_size": 100352
 
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 100257,
8
+ "eos_token_id": 100257,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
 
22
  "rope_theta": 10000,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.38.2",
26
  "use_cache": false,
27
  "use_qkv_bias": true,
28
  "vocab_size": 100352
generation_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 100289,
4
- "eos_token_id": 100290,
5
- "transformers_version": "4.38.0",
6
- "use_cache": false
7
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 100257,
4
+ "do_sample": true,
5
+ "eos_token_id": 100257,
6
+ "transformers_version": "4.38.2"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8daf726fa6dce309c591a540c45710390785ef6c08764d6c6ced7515c6c9629
3
  size 3289069520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7cb37aa73e2c35ec59c739c2aecff9977a5437ff16402d585f8fb015789ada42
3
  size 3289069520