whizzzzkid commited on
Commit
9de18a7
1 Parent(s): d0b5813

Upload StableLmForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +3 -4
  2. generation_config.json +2 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -1,12 +1,11 @@
1
  {
2
- "_name_or_path": "axalotl_ft",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 100289,
8
- "eos_token_id": 100257,
9
- "eos_token_id_was": 100290,
10
  "hidden_act": "silu",
11
  "hidden_dropout": 0.0,
12
  "hidden_size": 2048,
@@ -24,7 +23,7 @@
24
  "rope_theta": 10000,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "bfloat16",
27
- "transformers_version": "4.38.2",
28
  "use_cache": false,
29
  "use_parallel_residual": false,
30
  "use_qkv_bias": true,
 
1
  {
2
+ "_name_or_path": "sn6-finetuning/test_471",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 100289,
8
+ "eos_token_id": 100290,
 
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
 
23
  "rope_theta": 10000,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.40.0",
27
  "use_cache": false,
28
  "use_parallel_residual": false,
29
  "use_qkv_bias": true,
generation_config.json CHANGED
@@ -2,5 +2,6 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 100289,
4
  "eos_token_id": 100290,
5
- "transformers_version": "4.38.2"
 
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 100289,
4
  "eos_token_id": 100290,
5
+ "transformers_version": "4.40.0",
6
+ "use_cache": false
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8b6dc04b44dbb3019e6850d82d1748a6ed9fe5f777eefd6859854ff99ba13821
3
  size 3289069520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:282d2671343a18168fd862561fb3d042d9a7337a5b8cd1828d5ed3befe6a12eb
3
  size 3289069520