gate369 commited on
Commit
0887c5f
1 Parent(s): 79dacea

(Trained with Unsloth)

Browse files
config.json CHANGED
@@ -12,14 +12,17 @@
12
  "hidden_size": 2304,
13
  "initializer_range": 0.1,
14
  "intermediate_size": 5760,
15
- "max_position_embeddings": 8192,
16
  "model_type": "llama",
17
  "num_attention_heads": 36,
18
  "num_hidden_layers": 40,
19
  "num_key_value_heads": 36,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
- "rope_scaling": null,
 
 
 
23
  "rope_theta": 10000.0,
24
  "scale_depth": 1.4,
25
  "scale_emb": 12,
 
12
  "hidden_size": 2304,
13
  "initializer_range": 0.1,
14
  "intermediate_size": 5760,
15
+ "max_position_embeddings": 32768,
16
  "model_type": "llama",
17
  "num_attention_heads": 36,
18
  "num_hidden_layers": 40,
19
  "num_key_value_heads": 36,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
+ "rope_scaling": {
23
+ "factor": 4.0,
24
+ "type": "linear"
25
+ },
26
  "rope_theta": 10000.0,
27
  "scale_depth": 1.4,
28
  "scale_emb": 12,
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6e6306fd324d43c93a09f1af05b48424827f8c0b72f55723639b68d1f7d698aa
3
  size 4993313262
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a51b0ca88bab3fca5379779ce51e5633dfdff89eb38f2e63b1be56f13129b7dd
3
  size 4993313262
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b9fb7d5b0f57a8936b837e343d19f2334649b102619c5744c887eb570315cce7
3
  size 1022223648
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1629609cd855d782e8c7a5fd96f28aadc7191d342ff9a64c4743fe3d3ef42ce1
3
  size 1022223648
tokenizer_config.json CHANGED
@@ -38,7 +38,7 @@
38
  "input_ids",
39
  "attention_mask"
40
  ],
41
- "model_max_length": 8192,
42
  "pad_token": "</s>",
43
  "padding_side": "right",
44
  "sep_token": null,
 
38
  "input_ids",
39
  "attention_mask"
40
  ],
41
+ "model_max_length": 32768,
42
  "pad_token": "</s>",
43
  "padding_side": "right",
44
  "sep_token": null,