sequelbox commited on
Commit
b55fd00
1 Parent(s): 7568e1c

Upload folder using huggingface_hub (#2)

Browse files

- 798409a979d450e735696ebd1040081a798abba827e809137afcff8eab7acdf0 (c8a22b9cbec6d1b747bb9fd0122020dfb23e1026)
- a955320a8068680043c032ebd495ff5d90ad2382fdb0f696713f7c4ec67f7f13 (45aa27e1bb59424732295b32327c214cb0e35c21)
- e90bf7c7d894903acd370d10cad60d0ac50fc35681ac3f68a7ce313df9750cb8 (a4bc6bb5d32c965710538b46a475f69b978c1da8)
- d6ef2ac8dfca7a720e8c8813523ed7d85e6e49dbf59a4350bbd500d27000cd34 (c08340c35d6408e0693321ec5ee6d1e2bc9a1737)
- b692fe35e974ccf4e6471e5580c339aaee83d89258eadc43aa1a378ef80c6269 (60292a513778c38eb517274bf45f231809cc3397)
- 5b210dec1ad61e51869debb7cd22a8c984fa074a555b8bba40c4217ef4835884 (c3220398f104cb1e1b1c0584ff74f5cbaad05377)
- 2a075e57de335431ef072747491fdc9127cbb7d3125eac0f36e2819ed1928fce (0acacded49271a0d208977aad2b93be866665392)
- a6993eea56e56763af175dacd2e2fcb1bc18fe7da66ab8706c7b7e23fb9153a5 (f377db6e924e0e461ecb81e130017f1a13a3c47e)
- 77bd4dcb1a5a7f3b466c3a6d60b5e1530b69c0d2389d48f1214e5fffa8d7effd (7956978b2615b07de114adf5216d7ef1598f9cc5)
- 71cb2d9b4bd69f4a29d9bc1182ebb2fe55d991892558cb9610f341a4efe58bb8 (d81f456eaa4e7278b664e25b4d5f2ed105dba689)
- 5dc20712cc735d5029bfae52b76bb200bab3ee792b4141f51a4aaa653bb6e9f2 (8a2d7cab1cbaf899bac69f30f3e38e5b622417b7)
- 38e0013b1f4d29ad620e05c4fc8e7c7bdf5f9a3e493c838c9ada5cfc4716de5c (8bc22264786fb2fe57ae626db09662d0b5551840)
- 96ed557f923f5a5bdd69927cbd5cef5a2aadd66fe0e75f9b15af7df965f72e68 (65fb6320e37bec4dc68f7529d9bc4aab50a349af)
- 8a0d825c6450d033726a6752910d621e00b0292a743913b49c9f1351ec72d8e6 (974a5ceeb7ff0e383e98460fab2a6f3b8bfe37be)
- 0f886cee80c803d6bdadc90520de2a8588409ba731dde7677254ed96066a2f8c (69ee36cfa711a1ae24f7397b4659beaefa700843)
- 614bcedb4017ed5fe9f68fb8267a3d27f4e1a73ae99af3332d730897be05adac (2bbc2841b12b9ddc3e5aefce3feb7b4772380abc)
- d1d9de547b99d915b2552725477f0bdd48ea26a7cb2800b4cc4c87db46b859ae (9b68024941656baf574c653141328cef754a8a87)
- 1fe85f3368d26e48ed28e1e4a57392415441a36dc323c059b133264e655cd858 (9d56c8dd0dfc8d0922dd291b35a43ba49d31517c)
- 61c0ced816f09886269c46486ba64cc9b3dca88c9951a6f37028439d4450f125 (c5d986e94c051eaae5139b8bf1b80201236d0eff)
- cd7bf3aa23230bf276a5b5f855961f986c06be3ada1ed5bfb9b7bc4908686b85 (ffd457ffd507a112523c3a2d4e61441623ecd24d)
- efec8ef9ffc86848cac1f2e223c16dfd0e8e716cff88121bb80ec987cd4c76a8 (89aaf25bd62ce16cdeeba9236c37b40c11e91d81)
- 413ba75510ccd610ca06d2e8f97d56a06ab9c35e30eb0c7b10b94fe5e3c86f9c (d39024c630cb4734da6d7b7e93a80a08fa4c68f5)
- 44a0ff047671aa22ff068855caae617ec367e2a54b08607eeda7f985fb685abc (8f434a406efb850656c5c6014c92f1a52a432645)
- 69adbb8b0a0ad47065955247bed524ad611adba001a8633b5e342ad45ea5a0a9 (ba1f4706369c2b04bc319116a9f1fb4542c50661)
- a99a746a3f91797e99f57dabc8c7029af3a03192ecf88068f9899649a03256e9 (095e56a0e68b7a722f7d460eefe4b726c7e79754)
- 3b72a65957af1602a31d490747ec1157a33e7c8689b929697abab86e96a3ccc0 (a57c73ffbcc2a78e32c26ffbad00b7aad0f1e17d)
- 7e1675d0d1928f1adc92c7a9990772a67eeee8cd6f52aabefa04817472752749 (3b278bb60b10b0ae41f9e9abb004d97bcd3f4668)
- 844ad3692c9972783db89ec4d184052a199e05fda9a2b4120b2ec5ca48a07f91 (52c08f28d339517225b8a33802b0c3354cbc1dd3)
- 24abebb192240eb92013b9496908b17e991cd68178b36a74b1e51b22f8c738c1 (7341048a9df5316aa793700e59ec69ce7d6057ca)
- 2b23af56914d2fe770e9a61b08e8f865fd72d79a583137af704678de289c27dd (798352a59b28c59d8ea84a23507596139180b40e)
- a505d4506580310dafd838f835f957d858bcdc54bedd81efdfb71c61db1393ad (64acf850704cc81f73bac6ea0796c817642a33e8)
- fcdcb8b521623d44fa163263e407737877eea8f93071b4ae3f3abea9f4a27b2f (e03fb4a9052b75ffb23bedc65fde08f0ea0a5890)
- 4f03389a547f8dc3e87cc27d46c2b64c3c33c05b1f86b6e40e05eef512df6889 (8086d8f8229ed80a410c75159986f6912d0c338c)
- 78d3e1734d2b87d80b02f5fc717a002a4a731f98300c48437b3d3cba1b4291e8 (fc416c1bf419aff2a48f338531bdbbd30a7e94f0)
- f91824db826c51be7b4c574d1db256146f05301311ce603ac3371617d247992f (53ac90a76edf6c24921152b50f266d3c07535ebb)
- a6d370b598634e4ba607bd9e09e7ead6d4f1af6d845fd762c39f0bc15c22232f (c89ed9271cb8783ab90c694aaabaa7f09fa44c7d)
- 517420c08776b3ad19cee8392380da7d7f39dc39ad68f9eb8260b23bad343297 (42588e0bd7b98b08744bb74c4f9d3115beb916a4)
- 520f9ca27e83e87971ef989cbafe7b66f31e1afa199bbbc27ada47ace2c578b1 (2938cddc8ff5a8e53ea7c7b6e68fd35be6ca0f33)
- 6a42a43211582fdadf2936aacd2b39a86cc4a526918634b22db9d89094fb68da (a7897df04b5db2db1f4e75fae3370d00be42ab61)
- 8e16e42f487195dd7bfb05a8362e81d797057b43a2d94555e6df1791ac24e748 (730f8d57a87daee09147906303d4206f3dd85f98)
- 64199c1d077dafe8e662fa604f534ddee29a144b8022890d6e55bc90876132a1 (53ac39e023d4c83b8110401a042c6a4a8dbe8ab1)
- c948484a736333877f297355ea483adcd02a169834b722e7a7c42a89bd456780 (643b293f954c6342afc0f38fe47e84dbb3f224d8)
- 732f835f867672a13d878374c5eb07b1e14948cfe6513e9d43c4f220edc09348 (1ed127b3756bf594d390da3b3bccedc4f949b3b5)
- 27e9640da75a16d20de9c5681d466dcf0503b29da0abaa39ca3694a4e7bb37b7 (ffefcd3c9e632809470fe681778f804202967a95)
- d91b0db8fd4a95432063612843bf06eb0122961ba8998a6d8e378ba7d76e855e (6649c0947554b04debf8edca17d7f60e1a37ae1d)
- 1ccb648f918baaff7d4d982bbc2c779a0b8f33e185e8ca4d308ca43b5759af12 (d71ce08338876bde4285dd8952d4368d58b3c4f2)
- 0a489f67b4223c15cc5457d1ee4c2b25582594f34cf6885976f8b4f4925704bb (aef9400a825d37f8f79cf03aa8f6535d7449a1bf)
- f2fdde45bfa0889ddb462c2edce3b60943648c59fcbcca973e5b38b852c47aae (c268c54a2a7da08608ff297870a01ef0bf22d3bd)
- 8bbb28d9d92a358a76edc976620fe7cb51376d0f8f214d1273179607640dbd66 (3a6fbc1b3bc39401996e4424d72aec0a4574c680)
- 6f2f988a5ebd1e6c7f8dd5ecfbd6c3bf080d6812d89e388934e5414c6d17f113 (fb0b32ed330f823ab27610802df2a18763af7828)
- 8d70a6111eecc6753e1f42349356f9fdf7568bd41113af1c488c0ec10dbde705 (b54cb9291476344405d23c7ba776b55392c88027)
- 745ebf9e3a4ad88c995612acad31841bbe45ea78302b20dfdc9f8bd05e8156c3 (db8303fabe55c7e7d79fdfc24569aed9e70c68cd)
- 666fd69eedfc3e715a0ed5b230eba196e780aca68097076c5af4208f314ea6a7 (8c228bf9655ccd9946d93092d5c661a1c852952c)
- da77f193e87b2399af6b4ef25f520c0614c4943dbd99f95963d076da8f04c243 (1cf60d71713a8e9d296daa3b2620dca73aada0f3)
- 5199a10694939a782eb133e141f1e11b722e054f2498e6df65442cd7fa76a254 (0502516245a6858bc9f7e66243d21143df4a514b)
- 135faca87a89b08d7d0bd96ad741174cf758c7f15c945a546fa92d38735ecab5 (c22c55fae2dfd89516e36316f4514ea3e1c83567)
- c9ca4cb121c975a151698c242c86e47132c3b873e44b1078d20f57455c09f55e (6da1440fe8be2537aecdc4659be443ba9dd56e39)
- 9394a8e5522145824c0c86fbde9e39e2b1e42e1243b223e3a8f200640e0cfa2c (c9f8949246f21301590a5cfdeefa9ac178d2b803)
- 7de5d419872858434761d7f2d51d46e4c363d5da1ba8225c70d4821a1068cb27 (9e29fea84b3a1bee03dd6e27f7ae43eb73a8fd4e)
- e16c3245911b7c36da730eaac65105fdbe63e47b423283687ab4ff638992981a (84af724b3371b5db438f0094251a4e3e15fc457a)
- f95f93fe6e938a8c213401060925799137b9fcfd2deb84432065b04918407324 (5a8d5477a3ea89b3d06d7de0d898a23579aac42f)

This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .ipynb_checkpoints/config-checkpoint.json +29 -0
  2. .ipynb_checkpoints/generation_config-checkpoint.json +8 -0
  3. .ipynb_checkpoints/model.safetensors.index-checkpoint.json +730 -0
  4. .ipynb_checkpoints/special_tokens_map-checkpoint.json +24 -0
  5. .ipynb_checkpoints/tokenizer-checkpoint.json +0 -0
  6. config.json +3 -2
  7. generation_config.json +1 -1
  8. model-00001-of-00061.safetensors +3 -0
  9. model-00002-of-00061.safetensors +3 -0
  10. model-00003-of-00061.safetensors +3 -0
  11. model-00004-of-00061.safetensors +3 -0
  12. model-00005-of-00061.safetensors +3 -0
  13. model-00006-of-00061.safetensors +3 -0
  14. model-00007-of-00061.safetensors +3 -0
  15. model-00008-of-00061.safetensors +3 -0
  16. model-00009-of-00061.safetensors +3 -0
  17. model-00010-of-00061.safetensors +3 -0
  18. model-00011-of-00061.safetensors +3 -0
  19. model-00012-of-00061.safetensors +3 -0
  20. model-00013-of-00061.safetensors +3 -0
  21. model-00014-of-00061.safetensors +3 -0
  22. model-00015-of-00061.safetensors +3 -0
  23. model-00016-of-00061.safetensors +3 -0
  24. model-00017-of-00061.safetensors +3 -0
  25. model-00018-of-00061.safetensors +3 -0
  26. model-00019-of-00061.safetensors +3 -0
  27. model-00020-of-00061.safetensors +3 -0
  28. model-00021-of-00061.safetensors +3 -0
  29. model-00022-of-00061.safetensors +3 -0
  30. model-00023-of-00061.safetensors +3 -0
  31. model-00024-of-00061.safetensors +3 -0
  32. model-00025-of-00061.safetensors +3 -0
  33. model-00026-of-00061.safetensors +3 -0
  34. model-00027-of-00061.safetensors +3 -0
  35. model-00028-of-00061.safetensors +3 -0
  36. model-00029-of-00061.safetensors +3 -0
  37. model-00030-of-00061.safetensors +3 -0
  38. model-00031-of-00061.safetensors +3 -0
  39. model-00032-of-00061.safetensors +3 -0
  40. model-00033-of-00061.safetensors +3 -0
  41. model-00034-of-00061.safetensors +3 -0
  42. model-00035-of-00061.safetensors +3 -0
  43. model-00036-of-00061.safetensors +3 -0
  44. model-00037-of-00061.safetensors +3 -0
  45. model-00038-of-00061.safetensors +3 -0
  46. model-00039-of-00061.safetensors +3 -0
  47. model-00040-of-00061.safetensors +3 -0
  48. model-00041-of-00061.safetensors +3 -0
  49. model-00042-of-00061.safetensors +3 -0
  50. model-00043-of-00061.safetensors +3 -0
.ipynb_checkpoints/config-checkpoint.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "sequelbox/StellarBright",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 8192,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 28672,
14
+ "max_position_embeddings": 4096,
15
+ "model_type": "llama",
16
+ "num_attention_heads": 64,
17
+ "num_hidden_layers": 80,
18
+ "num_key_value_heads": 8,
19
+ "pad_token_id": 0,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_scaling": null,
23
+ "rope_theta": 10000.0,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.36.2",
27
+ "use_cache": false,
28
+ "vocab_size": 32000
29
+ }
.ipynb_checkpoints/generation_config-checkpoint.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.36.2",
7
+ "use_cache": false
8
+ }
.ipynb_checkpoints/model.safetensors.index-checkpoint.json ADDED
@@ -0,0 +1,730 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 275906592768
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00061-of-00061.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00061.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00061.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00061.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00061.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00061.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00061.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00061.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00061.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00061.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00061.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00002-of-00061.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00002-of-00061.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00061.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00002-of-00061.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00061.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00061.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00061.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00061.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00061.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00009-of-00061.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00009-of-00061.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00008-of-00061.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00009-of-00061.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00009-of-00061.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00008-of-00061.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00008-of-00061.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00008-of-00061.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00008-of-00061.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00010-of-00061.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00010-of-00061.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00009-of-00061.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00009-of-00061.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00010-of-00061.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00009-of-00061.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00009-of-00061.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00009-of-00061.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00009-of-00061.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00010-of-00061.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00010-of-00061.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00010-of-00061.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00010-of-00061.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00010-of-00061.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00010-of-00061.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00010-of-00061.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00010-of-00061.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00010-of-00061.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00011-of-00061.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00011-of-00061.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00011-of-00061.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00011-of-00061.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00011-of-00061.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00010-of-00061.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00010-of-00061.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00010-of-00061.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00010-of-00061.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00012-of-00061.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00012-of-00061.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00011-of-00061.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00012-of-00061.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00012-of-00061.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00011-of-00061.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00011-of-00061.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00011-of-00061.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00011-of-00061.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00013-of-00061.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00013-of-00061.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00012-of-00061.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00012-of-00061.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00013-of-00061.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00012-of-00061.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00012-of-00061.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00012-of-00061.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00012-of-00061.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00013-of-00061.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00013-of-00061.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00013-of-00061.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00013-of-00061.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00013-of-00061.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00013-of-00061.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00013-of-00061.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00013-of-00061.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00013-of-00061.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00014-of-00061.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00014-of-00061.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00014-of-00061.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00014-of-00061.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00014-of-00061.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00013-of-00061.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00013-of-00061.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00013-of-00061.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00013-of-00061.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00015-of-00061.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00015-of-00061.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00014-of-00061.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00015-of-00061.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00015-of-00061.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00014-of-00061.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00014-of-00061.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00014-of-00061.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00014-of-00061.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00016-of-00061.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00016-of-00061.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00015-of-00061.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00015-of-00061.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00016-of-00061.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00015-of-00061.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00015-of-00061.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00015-of-00061.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00015-of-00061.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00003-of-00061.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00003-of-00061.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00002-of-00061.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00003-of-00061.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00003-of-00061.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00002-of-00061.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00002-of-00061.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00002-of-00061.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00002-of-00061.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00016-of-00061.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00016-of-00061.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00016-of-00061.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00016-of-00061.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00016-of-00061.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00016-of-00061.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00016-of-00061.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00016-of-00061.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00016-of-00061.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00017-of-00061.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00017-of-00061.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00017-of-00061.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00017-of-00061.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00017-of-00061.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00016-of-00061.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00016-of-00061.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00016-of-00061.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00016-of-00061.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00018-of-00061.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00018-of-00061.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00017-of-00061.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00018-of-00061.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00018-of-00061.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00017-of-00061.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00017-of-00061.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00017-of-00061.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00017-of-00061.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00019-of-00061.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00019-of-00061.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00018-of-00061.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00018-of-00061.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00019-of-00061.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00018-of-00061.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00018-of-00061.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00018-of-00061.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00018-of-00061.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00019-of-00061.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00019-of-00061.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00019-of-00061.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00019-of-00061.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00019-of-00061.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00019-of-00061.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00019-of-00061.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00019-of-00061.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00019-of-00061.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00020-of-00061.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00020-of-00061.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00020-of-00061.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00020-of-00061.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00020-of-00061.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00019-of-00061.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00019-of-00061.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00019-of-00061.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00019-of-00061.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00021-of-00061.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00021-of-00061.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00020-of-00061.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00021-of-00061.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00021-of-00061.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00020-of-00061.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00020-of-00061.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00020-of-00061.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00020-of-00061.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00022-of-00061.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00022-of-00061.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00021-of-00061.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00021-of-00061.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00022-of-00061.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00021-of-00061.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00021-of-00061.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00021-of-00061.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00021-of-00061.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00022-of-00061.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00022-of-00061.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00022-of-00061.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00022-of-00061.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00022-of-00061.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00022-of-00061.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00022-of-00061.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00022-of-00061.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00022-of-00061.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00023-of-00061.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00023-of-00061.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00023-of-00061.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00023-of-00061.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00023-of-00061.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00022-of-00061.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00022-of-00061.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00022-of-00061.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00022-of-00061.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00004-of-00061.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00004-of-00061.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00003-of-00061.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00003-of-00061.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00004-of-00061.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00003-of-00061.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00003-of-00061.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00003-of-00061.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00003-of-00061.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00024-of-00061.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00024-of-00061.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00023-of-00061.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00024-of-00061.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00024-of-00061.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00023-of-00061.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00023-of-00061.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00023-of-00061.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00023-of-00061.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00025-of-00061.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00025-of-00061.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00024-of-00061.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00024-of-00061.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00025-of-00061.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00024-of-00061.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00024-of-00061.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00024-of-00061.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00024-of-00061.safetensors",
242
+ "model.layers.32.input_layernorm.weight": "model-00025-of-00061.safetensors",
243
+ "model.layers.32.mlp.down_proj.weight": "model-00025-of-00061.safetensors",
244
+ "model.layers.32.mlp.gate_proj.weight": "model-00025-of-00061.safetensors",
245
+ "model.layers.32.mlp.up_proj.weight": "model-00025-of-00061.safetensors",
246
+ "model.layers.32.post_attention_layernorm.weight": "model-00025-of-00061.safetensors",
247
+ "model.layers.32.self_attn.k_proj.weight": "model-00025-of-00061.safetensors",
248
+ "model.layers.32.self_attn.o_proj.weight": "model-00025-of-00061.safetensors",
249
+ "model.layers.32.self_attn.q_proj.weight": "model-00025-of-00061.safetensors",
250
+ "model.layers.32.self_attn.v_proj.weight": "model-00025-of-00061.safetensors",
251
+ "model.layers.33.input_layernorm.weight": "model-00026-of-00061.safetensors",
252
+ "model.layers.33.mlp.down_proj.weight": "model-00026-of-00061.safetensors",
253
+ "model.layers.33.mlp.gate_proj.weight": "model-00026-of-00061.safetensors",
254
+ "model.layers.33.mlp.up_proj.weight": "model-00026-of-00061.safetensors",
255
+ "model.layers.33.post_attention_layernorm.weight": "model-00026-of-00061.safetensors",
256
+ "model.layers.33.self_attn.k_proj.weight": "model-00025-of-00061.safetensors",
257
+ "model.layers.33.self_attn.o_proj.weight": "model-00025-of-00061.safetensors",
258
+ "model.layers.33.self_attn.q_proj.weight": "model-00025-of-00061.safetensors",
259
+ "model.layers.33.self_attn.v_proj.weight": "model-00025-of-00061.safetensors",
260
+ "model.layers.34.input_layernorm.weight": "model-00027-of-00061.safetensors",
261
+ "model.layers.34.mlp.down_proj.weight": "model-00027-of-00061.safetensors",
262
+ "model.layers.34.mlp.gate_proj.weight": "model-00026-of-00061.safetensors",
263
+ "model.layers.34.mlp.up_proj.weight": "model-00027-of-00061.safetensors",
264
+ "model.layers.34.post_attention_layernorm.weight": "model-00027-of-00061.safetensors",
265
+ "model.layers.34.self_attn.k_proj.weight": "model-00026-of-00061.safetensors",
266
+ "model.layers.34.self_attn.o_proj.weight": "model-00026-of-00061.safetensors",
267
+ "model.layers.34.self_attn.q_proj.weight": "model-00026-of-00061.safetensors",
268
+ "model.layers.34.self_attn.v_proj.weight": "model-00026-of-00061.safetensors",
269
+ "model.layers.35.input_layernorm.weight": "model-00028-of-00061.safetensors",
270
+ "model.layers.35.mlp.down_proj.weight": "model-00028-of-00061.safetensors",
271
+ "model.layers.35.mlp.gate_proj.weight": "model-00027-of-00061.safetensors",
272
+ "model.layers.35.mlp.up_proj.weight": "model-00027-of-00061.safetensors",
273
+ "model.layers.35.post_attention_layernorm.weight": "model-00028-of-00061.safetensors",
274
+ "model.layers.35.self_attn.k_proj.weight": "model-00027-of-00061.safetensors",
275
+ "model.layers.35.self_attn.o_proj.weight": "model-00027-of-00061.safetensors",
276
+ "model.layers.35.self_attn.q_proj.weight": "model-00027-of-00061.safetensors",
277
+ "model.layers.35.self_attn.v_proj.weight": "model-00027-of-00061.safetensors",
278
+ "model.layers.36.input_layernorm.weight": "model-00028-of-00061.safetensors",
279
+ "model.layers.36.mlp.down_proj.weight": "model-00028-of-00061.safetensors",
280
+ "model.layers.36.mlp.gate_proj.weight": "model-00028-of-00061.safetensors",
281
+ "model.layers.36.mlp.up_proj.weight": "model-00028-of-00061.safetensors",
282
+ "model.layers.36.post_attention_layernorm.weight": "model-00028-of-00061.safetensors",
283
+ "model.layers.36.self_attn.k_proj.weight": "model-00028-of-00061.safetensors",
284
+ "model.layers.36.self_attn.o_proj.weight": "model-00028-of-00061.safetensors",
285
+ "model.layers.36.self_attn.q_proj.weight": "model-00028-of-00061.safetensors",
286
+ "model.layers.36.self_attn.v_proj.weight": "model-00028-of-00061.safetensors",
287
+ "model.layers.37.input_layernorm.weight": "model-00029-of-00061.safetensors",
288
+ "model.layers.37.mlp.down_proj.weight": "model-00029-of-00061.safetensors",
289
+ "model.layers.37.mlp.gate_proj.weight": "model-00029-of-00061.safetensors",
290
+ "model.layers.37.mlp.up_proj.weight": "model-00029-of-00061.safetensors",
291
+ "model.layers.37.post_attention_layernorm.weight": "model-00029-of-00061.safetensors",
292
+ "model.layers.37.self_attn.k_proj.weight": "model-00028-of-00061.safetensors",
293
+ "model.layers.37.self_attn.o_proj.weight": "model-00028-of-00061.safetensors",
294
+ "model.layers.37.self_attn.q_proj.weight": "model-00028-of-00061.safetensors",
295
+ "model.layers.37.self_attn.v_proj.weight": "model-00028-of-00061.safetensors",
296
+ "model.layers.38.input_layernorm.weight": "model-00030-of-00061.safetensors",
297
+ "model.layers.38.mlp.down_proj.weight": "model-00030-of-00061.safetensors",
298
+ "model.layers.38.mlp.gate_proj.weight": "model-00029-of-00061.safetensors",
299
+ "model.layers.38.mlp.up_proj.weight": "model-00030-of-00061.safetensors",
300
+ "model.layers.38.post_attention_layernorm.weight": "model-00030-of-00061.safetensors",
301
+ "model.layers.38.self_attn.k_proj.weight": "model-00029-of-00061.safetensors",
302
+ "model.layers.38.self_attn.o_proj.weight": "model-00029-of-00061.safetensors",
303
+ "model.layers.38.self_attn.q_proj.weight": "model-00029-of-00061.safetensors",
304
+ "model.layers.38.self_attn.v_proj.weight": "model-00029-of-00061.safetensors",
305
+ "model.layers.39.input_layernorm.weight": "model-00031-of-00061.safetensors",
306
+ "model.layers.39.mlp.down_proj.weight": "model-00031-of-00061.safetensors",
307
+ "model.layers.39.mlp.gate_proj.weight": "model-00030-of-00061.safetensors",
308
+ "model.layers.39.mlp.up_proj.weight": "model-00030-of-00061.safetensors",
309
+ "model.layers.39.post_attention_layernorm.weight": "model-00031-of-00061.safetensors",
310
+ "model.layers.39.self_attn.k_proj.weight": "model-00030-of-00061.safetensors",
311
+ "model.layers.39.self_attn.o_proj.weight": "model-00030-of-00061.safetensors",
312
+ "model.layers.39.self_attn.q_proj.weight": "model-00030-of-00061.safetensors",
313
+ "model.layers.39.self_attn.v_proj.weight": "model-00030-of-00061.safetensors",
314
+ "model.layers.4.input_layernorm.weight": "model-00004-of-00061.safetensors",
315
+ "model.layers.4.mlp.down_proj.weight": "model-00004-of-00061.safetensors",
316
+ "model.layers.4.mlp.gate_proj.weight": "model-00004-of-00061.safetensors",
317
+ "model.layers.4.mlp.up_proj.weight": "model-00004-of-00061.safetensors",
318
+ "model.layers.4.post_attention_layernorm.weight": "model-00004-of-00061.safetensors",
319
+ "model.layers.4.self_attn.k_proj.weight": "model-00004-of-00061.safetensors",
320
+ "model.layers.4.self_attn.o_proj.weight": "model-00004-of-00061.safetensors",
321
+ "model.layers.4.self_attn.q_proj.weight": "model-00004-of-00061.safetensors",
322
+ "model.layers.4.self_attn.v_proj.weight": "model-00004-of-00061.safetensors",
323
+ "model.layers.40.input_layernorm.weight": "model-00031-of-00061.safetensors",
324
+ "model.layers.40.mlp.down_proj.weight": "model-00031-of-00061.safetensors",
325
+ "model.layers.40.mlp.gate_proj.weight": "model-00031-of-00061.safetensors",
326
+ "model.layers.40.mlp.up_proj.weight": "model-00031-of-00061.safetensors",
327
+ "model.layers.40.post_attention_layernorm.weight": "model-00031-of-00061.safetensors",
328
+ "model.layers.40.self_attn.k_proj.weight": "model-00031-of-00061.safetensors",
329
+ "model.layers.40.self_attn.o_proj.weight": "model-00031-of-00061.safetensors",
330
+ "model.layers.40.self_attn.q_proj.weight": "model-00031-of-00061.safetensors",
331
+ "model.layers.40.self_attn.v_proj.weight": "model-00031-of-00061.safetensors",
332
+ "model.layers.41.input_layernorm.weight": "model-00032-of-00061.safetensors",
333
+ "model.layers.41.mlp.down_proj.weight": "model-00032-of-00061.safetensors",
334
+ "model.layers.41.mlp.gate_proj.weight": "model-00032-of-00061.safetensors",
335
+ "model.layers.41.mlp.up_proj.weight": "model-00032-of-00061.safetensors",
336
+ "model.layers.41.post_attention_layernorm.weight": "model-00032-of-00061.safetensors",
337
+ "model.layers.41.self_attn.k_proj.weight": "model-00031-of-00061.safetensors",
338
+ "model.layers.41.self_attn.o_proj.weight": "model-00031-of-00061.safetensors",
339
+ "model.layers.41.self_attn.q_proj.weight": "model-00031-of-00061.safetensors",
340
+ "model.layers.41.self_attn.v_proj.weight": "model-00031-of-00061.safetensors",
341
+ "model.layers.42.input_layernorm.weight": "model-00033-of-00061.safetensors",
342
+ "model.layers.42.mlp.down_proj.weight": "model-00033-of-00061.safetensors",
343
+ "model.layers.42.mlp.gate_proj.weight": "model-00032-of-00061.safetensors",
344
+ "model.layers.42.mlp.up_proj.weight": "model-00033-of-00061.safetensors",
345
+ "model.layers.42.post_attention_layernorm.weight": "model-00033-of-00061.safetensors",
346
+ "model.layers.42.self_attn.k_proj.weight": "model-00032-of-00061.safetensors",
347
+ "model.layers.42.self_attn.o_proj.weight": "model-00032-of-00061.safetensors",
348
+ "model.layers.42.self_attn.q_proj.weight": "model-00032-of-00061.safetensors",
349
+ "model.layers.42.self_attn.v_proj.weight": "model-00032-of-00061.safetensors",
350
+ "model.layers.43.input_layernorm.weight": "model-00034-of-00061.safetensors",
351
+ "model.layers.43.mlp.down_proj.weight": "model-00034-of-00061.safetensors",
352
+ "model.layers.43.mlp.gate_proj.weight": "model-00033-of-00061.safetensors",
353
+ "model.layers.43.mlp.up_proj.weight": "model-00033-of-00061.safetensors",
354
+ "model.layers.43.post_attention_layernorm.weight": "model-00034-of-00061.safetensors",
355
+ "model.layers.43.self_attn.k_proj.weight": "model-00033-of-00061.safetensors",
356
+ "model.layers.43.self_attn.o_proj.weight": "model-00033-of-00061.safetensors",
357
+ "model.layers.43.self_attn.q_proj.weight": "model-00033-of-00061.safetensors",
358
+ "model.layers.43.self_attn.v_proj.weight": "model-00033-of-00061.safetensors",
359
+ "model.layers.44.input_layernorm.weight": "model-00034-of-00061.safetensors",
360
+ "model.layers.44.mlp.down_proj.weight": "model-00034-of-00061.safetensors",
361
+ "model.layers.44.mlp.gate_proj.weight": "model-00034-of-00061.safetensors",
362
+ "model.layers.44.mlp.up_proj.weight": "model-00034-of-00061.safetensors",
363
+ "model.layers.44.post_attention_layernorm.weight": "model-00034-of-00061.safetensors",
364
+ "model.layers.44.self_attn.k_proj.weight": "model-00034-of-00061.safetensors",
365
+ "model.layers.44.self_attn.o_proj.weight": "model-00034-of-00061.safetensors",
366
+ "model.layers.44.self_attn.q_proj.weight": "model-00034-of-00061.safetensors",
367
+ "model.layers.44.self_attn.v_proj.weight": "model-00034-of-00061.safetensors",
368
+ "model.layers.45.input_layernorm.weight": "model-00035-of-00061.safetensors",
369
+ "model.layers.45.mlp.down_proj.weight": "model-00035-of-00061.safetensors",
370
+ "model.layers.45.mlp.gate_proj.weight": "model-00035-of-00061.safetensors",
371
+ "model.layers.45.mlp.up_proj.weight": "model-00035-of-00061.safetensors",
372
+ "model.layers.45.post_attention_layernorm.weight": "model-00035-of-00061.safetensors",
373
+ "model.layers.45.self_attn.k_proj.weight": "model-00034-of-00061.safetensors",
374
+ "model.layers.45.self_attn.o_proj.weight": "model-00034-of-00061.safetensors",
375
+ "model.layers.45.self_attn.q_proj.weight": "model-00034-of-00061.safetensors",
376
+ "model.layers.45.self_attn.v_proj.weight": "model-00034-of-00061.safetensors",
377
+ "model.layers.46.input_layernorm.weight": "model-00036-of-00061.safetensors",
378
+ "model.layers.46.mlp.down_proj.weight": "model-00036-of-00061.safetensors",
379
+ "model.layers.46.mlp.gate_proj.weight": "model-00035-of-00061.safetensors",
380
+ "model.layers.46.mlp.up_proj.weight": "model-00036-of-00061.safetensors",
381
+ "model.layers.46.post_attention_layernorm.weight": "model-00036-of-00061.safetensors",
382
+ "model.layers.46.self_attn.k_proj.weight": "model-00035-of-00061.safetensors",
383
+ "model.layers.46.self_attn.o_proj.weight": "model-00035-of-00061.safetensors",
384
+ "model.layers.46.self_attn.q_proj.weight": "model-00035-of-00061.safetensors",
385
+ "model.layers.46.self_attn.v_proj.weight": "model-00035-of-00061.safetensors",
386
+ "model.layers.47.input_layernorm.weight": "model-00037-of-00061.safetensors",
387
+ "model.layers.47.mlp.down_proj.weight": "model-00037-of-00061.safetensors",
388
+ "model.layers.47.mlp.gate_proj.weight": "model-00036-of-00061.safetensors",
389
+ "model.layers.47.mlp.up_proj.weight": "model-00036-of-00061.safetensors",
390
+ "model.layers.47.post_attention_layernorm.weight": "model-00037-of-00061.safetensors",
391
+ "model.layers.47.self_attn.k_proj.weight": "model-00036-of-00061.safetensors",
392
+ "model.layers.47.self_attn.o_proj.weight": "model-00036-of-00061.safetensors",
393
+ "model.layers.47.self_attn.q_proj.weight": "model-00036-of-00061.safetensors",
394
+ "model.layers.47.self_attn.v_proj.weight": "model-00036-of-00061.safetensors",
395
+ "model.layers.48.input_layernorm.weight": "model-00037-of-00061.safetensors",
396
+ "model.layers.48.mlp.down_proj.weight": "model-00037-of-00061.safetensors",
397
+ "model.layers.48.mlp.gate_proj.weight": "model-00037-of-00061.safetensors",
398
+ "model.layers.48.mlp.up_proj.weight": "model-00037-of-00061.safetensors",
399
+ "model.layers.48.post_attention_layernorm.weight": "model-00037-of-00061.safetensors",
400
+ "model.layers.48.self_attn.k_proj.weight": "model-00037-of-00061.safetensors",
401
+ "model.layers.48.self_attn.o_proj.weight": "model-00037-of-00061.safetensors",
402
+ "model.layers.48.self_attn.q_proj.weight": "model-00037-of-00061.safetensors",
403
+ "model.layers.48.self_attn.v_proj.weight": "model-00037-of-00061.safetensors",
404
+ "model.layers.49.input_layernorm.weight": "model-00038-of-00061.safetensors",
405
+ "model.layers.49.mlp.down_proj.weight": "model-00038-of-00061.safetensors",
406
+ "model.layers.49.mlp.gate_proj.weight": "model-00038-of-00061.safetensors",
407
+ "model.layers.49.mlp.up_proj.weight": "model-00038-of-00061.safetensors",
408
+ "model.layers.49.post_attention_layernorm.weight": "model-00038-of-00061.safetensors",
409
+ "model.layers.49.self_attn.k_proj.weight": "model-00037-of-00061.safetensors",
410
+ "model.layers.49.self_attn.o_proj.weight": "model-00037-of-00061.safetensors",
411
+ "model.layers.49.self_attn.q_proj.weight": "model-00037-of-00061.safetensors",
412
+ "model.layers.49.self_attn.v_proj.weight": "model-00037-of-00061.safetensors",
413
+ "model.layers.5.input_layernorm.weight": "model-00005-of-00061.safetensors",
414
+ "model.layers.5.mlp.down_proj.weight": "model-00005-of-00061.safetensors",
415
+ "model.layers.5.mlp.gate_proj.weight": "model-00005-of-00061.safetensors",
416
+ "model.layers.5.mlp.up_proj.weight": "model-00005-of-00061.safetensors",
417
+ "model.layers.5.post_attention_layernorm.weight": "model-00005-of-00061.safetensors",
418
+ "model.layers.5.self_attn.k_proj.weight": "model-00004-of-00061.safetensors",
419
+ "model.layers.5.self_attn.o_proj.weight": "model-00004-of-00061.safetensors",
420
+ "model.layers.5.self_attn.q_proj.weight": "model-00004-of-00061.safetensors",
421
+ "model.layers.5.self_attn.v_proj.weight": "model-00004-of-00061.safetensors",
422
+ "model.layers.50.input_layernorm.weight": "model-00039-of-00061.safetensors",
423
+ "model.layers.50.mlp.down_proj.weight": "model-00039-of-00061.safetensors",
424
+ "model.layers.50.mlp.gate_proj.weight": "model-00038-of-00061.safetensors",
425
+ "model.layers.50.mlp.up_proj.weight": "model-00039-of-00061.safetensors",
426
+ "model.layers.50.post_attention_layernorm.weight": "model-00039-of-00061.safetensors",
427
+ "model.layers.50.self_attn.k_proj.weight": "model-00038-of-00061.safetensors",
428
+ "model.layers.50.self_attn.o_proj.weight": "model-00038-of-00061.safetensors",
429
+ "model.layers.50.self_attn.q_proj.weight": "model-00038-of-00061.safetensors",
430
+ "model.layers.50.self_attn.v_proj.weight": "model-00038-of-00061.safetensors",
431
+ "model.layers.51.input_layernorm.weight": "model-00040-of-00061.safetensors",
432
+ "model.layers.51.mlp.down_proj.weight": "model-00040-of-00061.safetensors",
433
+ "model.layers.51.mlp.gate_proj.weight": "model-00039-of-00061.safetensors",
434
+ "model.layers.51.mlp.up_proj.weight": "model-00039-of-00061.safetensors",
435
+ "model.layers.51.post_attention_layernorm.weight": "model-00040-of-00061.safetensors",
436
+ "model.layers.51.self_attn.k_proj.weight": "model-00039-of-00061.safetensors",
437
+ "model.layers.51.self_attn.o_proj.weight": "model-00039-of-00061.safetensors",
438
+ "model.layers.51.self_attn.q_proj.weight": "model-00039-of-00061.safetensors",
439
+ "model.layers.51.self_attn.v_proj.weight": "model-00039-of-00061.safetensors",
440
+ "model.layers.52.input_layernorm.weight": "model-00040-of-00061.safetensors",
441
+ "model.layers.52.mlp.down_proj.weight": "model-00040-of-00061.safetensors",
442
+ "model.layers.52.mlp.gate_proj.weight": "model-00040-of-00061.safetensors",
443
+ "model.layers.52.mlp.up_proj.weight": "model-00040-of-00061.safetensors",
444
+ "model.layers.52.post_attention_layernorm.weight": "model-00040-of-00061.safetensors",
445
+ "model.layers.52.self_attn.k_proj.weight": "model-00040-of-00061.safetensors",
446
+ "model.layers.52.self_attn.o_proj.weight": "model-00040-of-00061.safetensors",
447
+ "model.layers.52.self_attn.q_proj.weight": "model-00040-of-00061.safetensors",
448
+ "model.layers.52.self_attn.v_proj.weight": "model-00040-of-00061.safetensors",
449
+ "model.layers.53.input_layernorm.weight": "model-00041-of-00061.safetensors",
450
+ "model.layers.53.mlp.down_proj.weight": "model-00041-of-00061.safetensors",
451
+ "model.layers.53.mlp.gate_proj.weight": "model-00041-of-00061.safetensors",
452
+ "model.layers.53.mlp.up_proj.weight": "model-00041-of-00061.safetensors",
453
+ "model.layers.53.post_attention_layernorm.weight": "model-00041-of-00061.safetensors",
454
+ "model.layers.53.self_attn.k_proj.weight": "model-00040-of-00061.safetensors",
455
+ "model.layers.53.self_attn.o_proj.weight": "model-00040-of-00061.safetensors",
456
+ "model.layers.53.self_attn.q_proj.weight": "model-00040-of-00061.safetensors",
457
+ "model.layers.53.self_attn.v_proj.weight": "model-00040-of-00061.safetensors",
458
+ "model.layers.54.input_layernorm.weight": "model-00042-of-00061.safetensors",
459
+ "model.layers.54.mlp.down_proj.weight": "model-00042-of-00061.safetensors",
460
+ "model.layers.54.mlp.gate_proj.weight": "model-00041-of-00061.safetensors",
461
+ "model.layers.54.mlp.up_proj.weight": "model-00042-of-00061.safetensors",
462
+ "model.layers.54.post_attention_layernorm.weight": "model-00042-of-00061.safetensors",
463
+ "model.layers.54.self_attn.k_proj.weight": "model-00041-of-00061.safetensors",
464
+ "model.layers.54.self_attn.o_proj.weight": "model-00041-of-00061.safetensors",
465
+ "model.layers.54.self_attn.q_proj.weight": "model-00041-of-00061.safetensors",
466
+ "model.layers.54.self_attn.v_proj.weight": "model-00041-of-00061.safetensors",
467
+ "model.layers.55.input_layernorm.weight": "model-00043-of-00061.safetensors",
468
+ "model.layers.55.mlp.down_proj.weight": "model-00043-of-00061.safetensors",
469
+ "model.layers.55.mlp.gate_proj.weight": "model-00042-of-00061.safetensors",
470
+ "model.layers.55.mlp.up_proj.weight": "model-00042-of-00061.safetensors",
471
+ "model.layers.55.post_attention_layernorm.weight": "model-00043-of-00061.safetensors",
472
+ "model.layers.55.self_attn.k_proj.weight": "model-00042-of-00061.safetensors",
473
+ "model.layers.55.self_attn.o_proj.weight": "model-00042-of-00061.safetensors",
474
+ "model.layers.55.self_attn.q_proj.weight": "model-00042-of-00061.safetensors",
475
+ "model.layers.55.self_attn.v_proj.weight": "model-00042-of-00061.safetensors",
476
+ "model.layers.56.input_layernorm.weight": "model-00043-of-00061.safetensors",
477
+ "model.layers.56.mlp.down_proj.weight": "model-00043-of-00061.safetensors",
478
+ "model.layers.56.mlp.gate_proj.weight": "model-00043-of-00061.safetensors",
479
+ "model.layers.56.mlp.up_proj.weight": "model-00043-of-00061.safetensors",
480
+ "model.layers.56.post_attention_layernorm.weight": "model-00043-of-00061.safetensors",
481
+ "model.layers.56.self_attn.k_proj.weight": "model-00043-of-00061.safetensors",
482
+ "model.layers.56.self_attn.o_proj.weight": "model-00043-of-00061.safetensors",
483
+ "model.layers.56.self_attn.q_proj.weight": "model-00043-of-00061.safetensors",
484
+ "model.layers.56.self_attn.v_proj.weight": "model-00043-of-00061.safetensors",
485
+ "model.layers.57.input_layernorm.weight": "model-00044-of-00061.safetensors",
486
+ "model.layers.57.mlp.down_proj.weight": "model-00044-of-00061.safetensors",
487
+ "model.layers.57.mlp.gate_proj.weight": "model-00044-of-00061.safetensors",
488
+ "model.layers.57.mlp.up_proj.weight": "model-00044-of-00061.safetensors",
489
+ "model.layers.57.post_attention_layernorm.weight": "model-00044-of-00061.safetensors",
490
+ "model.layers.57.self_attn.k_proj.weight": "model-00043-of-00061.safetensors",
491
+ "model.layers.57.self_attn.o_proj.weight": "model-00043-of-00061.safetensors",
492
+ "model.layers.57.self_attn.q_proj.weight": "model-00043-of-00061.safetensors",
493
+ "model.layers.57.self_attn.v_proj.weight": "model-00043-of-00061.safetensors",
494
+ "model.layers.58.input_layernorm.weight": "model-00045-of-00061.safetensors",
495
+ "model.layers.58.mlp.down_proj.weight": "model-00045-of-00061.safetensors",
496
+ "model.layers.58.mlp.gate_proj.weight": "model-00044-of-00061.safetensors",
497
+ "model.layers.58.mlp.up_proj.weight": "model-00045-of-00061.safetensors",
498
+ "model.layers.58.post_attention_layernorm.weight": "model-00045-of-00061.safetensors",
499
+ "model.layers.58.self_attn.k_proj.weight": "model-00044-of-00061.safetensors",
500
+ "model.layers.58.self_attn.o_proj.weight": "model-00044-of-00061.safetensors",
501
+ "model.layers.58.self_attn.q_proj.weight": "model-00044-of-00061.safetensors",
502
+ "model.layers.58.self_attn.v_proj.weight": "model-00044-of-00061.safetensors",
503
+ "model.layers.59.input_layernorm.weight": "model-00046-of-00061.safetensors",
504
+ "model.layers.59.mlp.down_proj.weight": "model-00046-of-00061.safetensors",
505
+ "model.layers.59.mlp.gate_proj.weight": "model-00045-of-00061.safetensors",
506
+ "model.layers.59.mlp.up_proj.weight": "model-00045-of-00061.safetensors",
507
+ "model.layers.59.post_attention_layernorm.weight": "model-00046-of-00061.safetensors",
508
+ "model.layers.59.self_attn.k_proj.weight": "model-00045-of-00061.safetensors",
509
+ "model.layers.59.self_attn.o_proj.weight": "model-00045-of-00061.safetensors",
510
+ "model.layers.59.self_attn.q_proj.weight": "model-00045-of-00061.safetensors",
511
+ "model.layers.59.self_attn.v_proj.weight": "model-00045-of-00061.safetensors",
512
+ "model.layers.6.input_layernorm.weight": "model-00006-of-00061.safetensors",
513
+ "model.layers.6.mlp.down_proj.weight": "model-00006-of-00061.safetensors",
514
+ "model.layers.6.mlp.gate_proj.weight": "model-00005-of-00061.safetensors",
515
+ "model.layers.6.mlp.up_proj.weight": "model-00006-of-00061.safetensors",
516
+ "model.layers.6.post_attention_layernorm.weight": "model-00006-of-00061.safetensors",
517
+ "model.layers.6.self_attn.k_proj.weight": "model-00005-of-00061.safetensors",
518
+ "model.layers.6.self_attn.o_proj.weight": "model-00005-of-00061.safetensors",
519
+ "model.layers.6.self_attn.q_proj.weight": "model-00005-of-00061.safetensors",
520
+ "model.layers.6.self_attn.v_proj.weight": "model-00005-of-00061.safetensors",
521
+ "model.layers.60.input_layernorm.weight": "model-00046-of-00061.safetensors",
522
+ "model.layers.60.mlp.down_proj.weight": "model-00046-of-00061.safetensors",
523
+ "model.layers.60.mlp.gate_proj.weight": "model-00046-of-00061.safetensors",
524
+ "model.layers.60.mlp.up_proj.weight": "model-00046-of-00061.safetensors",
525
+ "model.layers.60.post_attention_layernorm.weight": "model-00046-of-00061.safetensors",
526
+ "model.layers.60.self_attn.k_proj.weight": "model-00046-of-00061.safetensors",
527
+ "model.layers.60.self_attn.o_proj.weight": "model-00046-of-00061.safetensors",
528
+ "model.layers.60.self_attn.q_proj.weight": "model-00046-of-00061.safetensors",
529
+ "model.layers.60.self_attn.v_proj.weight": "model-00046-of-00061.safetensors",
530
+ "model.layers.61.input_layernorm.weight": "model-00047-of-00061.safetensors",
531
+ "model.layers.61.mlp.down_proj.weight": "model-00047-of-00061.safetensors",
532
+ "model.layers.61.mlp.gate_proj.weight": "model-00047-of-00061.safetensors",
533
+ "model.layers.61.mlp.up_proj.weight": "model-00047-of-00061.safetensors",
534
+ "model.layers.61.post_attention_layernorm.weight": "model-00047-of-00061.safetensors",
535
+ "model.layers.61.self_attn.k_proj.weight": "model-00046-of-00061.safetensors",
536
+ "model.layers.61.self_attn.o_proj.weight": "model-00046-of-00061.safetensors",
537
+ "model.layers.61.self_attn.q_proj.weight": "model-00046-of-00061.safetensors",
538
+ "model.layers.61.self_attn.v_proj.weight": "model-00046-of-00061.safetensors",
539
+ "model.layers.62.input_layernorm.weight": "model-00048-of-00061.safetensors",
540
+ "model.layers.62.mlp.down_proj.weight": "model-00048-of-00061.safetensors",
541
+ "model.layers.62.mlp.gate_proj.weight": "model-00047-of-00061.safetensors",
542
+ "model.layers.62.mlp.up_proj.weight": "model-00048-of-00061.safetensors",
543
+ "model.layers.62.post_attention_layernorm.weight": "model-00048-of-00061.safetensors",
544
+ "model.layers.62.self_attn.k_proj.weight": "model-00047-of-00061.safetensors",
545
+ "model.layers.62.self_attn.o_proj.weight": "model-00047-of-00061.safetensors",
546
+ "model.layers.62.self_attn.q_proj.weight": "model-00047-of-00061.safetensors",
547
+ "model.layers.62.self_attn.v_proj.weight": "model-00047-of-00061.safetensors",
548
+ "model.layers.63.input_layernorm.weight": "model-00049-of-00061.safetensors",
549
+ "model.layers.63.mlp.down_proj.weight": "model-00049-of-00061.safetensors",
550
+ "model.layers.63.mlp.gate_proj.weight": "model-00048-of-00061.safetensors",
551
+ "model.layers.63.mlp.up_proj.weight": "model-00048-of-00061.safetensors",
552
+ "model.layers.63.post_attention_layernorm.weight": "model-00049-of-00061.safetensors",
553
+ "model.layers.63.self_attn.k_proj.weight": "model-00048-of-00061.safetensors",
554
+ "model.layers.63.self_attn.o_proj.weight": "model-00048-of-00061.safetensors",
555
+ "model.layers.63.self_attn.q_proj.weight": "model-00048-of-00061.safetensors",
556
+ "model.layers.63.self_attn.v_proj.weight": "model-00048-of-00061.safetensors",
557
+ "model.layers.64.input_layernorm.weight": "model-00049-of-00061.safetensors",
558
+ "model.layers.64.mlp.down_proj.weight": "model-00049-of-00061.safetensors",
559
+ "model.layers.64.mlp.gate_proj.weight": "model-00049-of-00061.safetensors",
560
+ "model.layers.64.mlp.up_proj.weight": "model-00049-of-00061.safetensors",
561
+ "model.layers.64.post_attention_layernorm.weight": "model-00049-of-00061.safetensors",
562
+ "model.layers.64.self_attn.k_proj.weight": "model-00049-of-00061.safetensors",
563
+ "model.layers.64.self_attn.o_proj.weight": "model-00049-of-00061.safetensors",
564
+ "model.layers.64.self_attn.q_proj.weight": "model-00049-of-00061.safetensors",
565
+ "model.layers.64.self_attn.v_proj.weight": "model-00049-of-00061.safetensors",
566
+ "model.layers.65.input_layernorm.weight": "model-00050-of-00061.safetensors",
567
+ "model.layers.65.mlp.down_proj.weight": "model-00050-of-00061.safetensors",
568
+ "model.layers.65.mlp.gate_proj.weight": "model-00050-of-00061.safetensors",
569
+ "model.layers.65.mlp.up_proj.weight": "model-00050-of-00061.safetensors",
570
+ "model.layers.65.post_attention_layernorm.weight": "model-00050-of-00061.safetensors",
571
+ "model.layers.65.self_attn.k_proj.weight": "model-00049-of-00061.safetensors",
572
+ "model.layers.65.self_attn.o_proj.weight": "model-00049-of-00061.safetensors",
573
+ "model.layers.65.self_attn.q_proj.weight": "model-00049-of-00061.safetensors",
574
+ "model.layers.65.self_attn.v_proj.weight": "model-00049-of-00061.safetensors",
575
+ "model.layers.66.input_layernorm.weight": "model-00051-of-00061.safetensors",
576
+ "model.layers.66.mlp.down_proj.weight": "model-00051-of-00061.safetensors",
577
+ "model.layers.66.mlp.gate_proj.weight": "model-00050-of-00061.safetensors",
578
+ "model.layers.66.mlp.up_proj.weight": "model-00051-of-00061.safetensors",
579
+ "model.layers.66.post_attention_layernorm.weight": "model-00051-of-00061.safetensors",
580
+ "model.layers.66.self_attn.k_proj.weight": "model-00050-of-00061.safetensors",
581
+ "model.layers.66.self_attn.o_proj.weight": "model-00050-of-00061.safetensors",
582
+ "model.layers.66.self_attn.q_proj.weight": "model-00050-of-00061.safetensors",
583
+ "model.layers.66.self_attn.v_proj.weight": "model-00050-of-00061.safetensors",
584
+ "model.layers.67.input_layernorm.weight": "model-00052-of-00061.safetensors",
585
+ "model.layers.67.mlp.down_proj.weight": "model-00052-of-00061.safetensors",
586
+ "model.layers.67.mlp.gate_proj.weight": "model-00051-of-00061.safetensors",
587
+ "model.layers.67.mlp.up_proj.weight": "model-00051-of-00061.safetensors",
588
+ "model.layers.67.post_attention_layernorm.weight": "model-00052-of-00061.safetensors",
589
+ "model.layers.67.self_attn.k_proj.weight": "model-00051-of-00061.safetensors",
590
+ "model.layers.67.self_attn.o_proj.weight": "model-00051-of-00061.safetensors",
591
+ "model.layers.67.self_attn.q_proj.weight": "model-00051-of-00061.safetensors",
592
+ "model.layers.67.self_attn.v_proj.weight": "model-00051-of-00061.safetensors",
593
+ "model.layers.68.input_layernorm.weight": "model-00052-of-00061.safetensors",
594
+ "model.layers.68.mlp.down_proj.weight": "model-00052-of-00061.safetensors",
595
+ "model.layers.68.mlp.gate_proj.weight": "model-00052-of-00061.safetensors",
596
+ "model.layers.68.mlp.up_proj.weight": "model-00052-of-00061.safetensors",
597
+ "model.layers.68.post_attention_layernorm.weight": "model-00052-of-00061.safetensors",
598
+ "model.layers.68.self_attn.k_proj.weight": "model-00052-of-00061.safetensors",
599
+ "model.layers.68.self_attn.o_proj.weight": "model-00052-of-00061.safetensors",
600
+ "model.layers.68.self_attn.q_proj.weight": "model-00052-of-00061.safetensors",
601
+ "model.layers.68.self_attn.v_proj.weight": "model-00052-of-00061.safetensors",
602
+ "model.layers.69.input_layernorm.weight": "model-00053-of-00061.safetensors",
603
+ "model.layers.69.mlp.down_proj.weight": "model-00053-of-00061.safetensors",
604
+ "model.layers.69.mlp.gate_proj.weight": "model-00053-of-00061.safetensors",
605
+ "model.layers.69.mlp.up_proj.weight": "model-00053-of-00061.safetensors",
606
+ "model.layers.69.post_attention_layernorm.weight": "model-00053-of-00061.safetensors",
607
+ "model.layers.69.self_attn.k_proj.weight": "model-00052-of-00061.safetensors",
608
+ "model.layers.69.self_attn.o_proj.weight": "model-00052-of-00061.safetensors",
609
+ "model.layers.69.self_attn.q_proj.weight": "model-00052-of-00061.safetensors",
610
+ "model.layers.69.self_attn.v_proj.weight": "model-00052-of-00061.safetensors",
611
+ "model.layers.7.input_layernorm.weight": "model-00007-of-00061.safetensors",
612
+ "model.layers.7.mlp.down_proj.weight": "model-00007-of-00061.safetensors",
613
+ "model.layers.7.mlp.gate_proj.weight": "model-00006-of-00061.safetensors",
614
+ "model.layers.7.mlp.up_proj.weight": "model-00006-of-00061.safetensors",
615
+ "model.layers.7.post_attention_layernorm.weight": "model-00007-of-00061.safetensors",
616
+ "model.layers.7.self_attn.k_proj.weight": "model-00006-of-00061.safetensors",
617
+ "model.layers.7.self_attn.o_proj.weight": "model-00006-of-00061.safetensors",
618
+ "model.layers.7.self_attn.q_proj.weight": "model-00006-of-00061.safetensors",
619
+ "model.layers.7.self_attn.v_proj.weight": "model-00006-of-00061.safetensors",
620
+ "model.layers.70.input_layernorm.weight": "model-00054-of-00061.safetensors",
621
+ "model.layers.70.mlp.down_proj.weight": "model-00054-of-00061.safetensors",
622
+ "model.layers.70.mlp.gate_proj.weight": "model-00053-of-00061.safetensors",
623
+ "model.layers.70.mlp.up_proj.weight": "model-00054-of-00061.safetensors",
624
+ "model.layers.70.post_attention_layernorm.weight": "model-00054-of-00061.safetensors",
625
+ "model.layers.70.self_attn.k_proj.weight": "model-00053-of-00061.safetensors",
626
+ "model.layers.70.self_attn.o_proj.weight": "model-00053-of-00061.safetensors",
627
+ "model.layers.70.self_attn.q_proj.weight": "model-00053-of-00061.safetensors",
628
+ "model.layers.70.self_attn.v_proj.weight": "model-00053-of-00061.safetensors",
629
+ "model.layers.71.input_layernorm.weight": "model-00055-of-00061.safetensors",
630
+ "model.layers.71.mlp.down_proj.weight": "model-00055-of-00061.safetensors",
631
+ "model.layers.71.mlp.gate_proj.weight": "model-00054-of-00061.safetensors",
632
+ "model.layers.71.mlp.up_proj.weight": "model-00054-of-00061.safetensors",
633
+ "model.layers.71.post_attention_layernorm.weight": "model-00055-of-00061.safetensors",
634
+ "model.layers.71.self_attn.k_proj.weight": "model-00054-of-00061.safetensors",
635
+ "model.layers.71.self_attn.o_proj.weight": "model-00054-of-00061.safetensors",
636
+ "model.layers.71.self_attn.q_proj.weight": "model-00054-of-00061.safetensors",
637
+ "model.layers.71.self_attn.v_proj.weight": "model-00054-of-00061.safetensors",
638
+ "model.layers.72.input_layernorm.weight": "model-00055-of-00061.safetensors",
639
+ "model.layers.72.mlp.down_proj.weight": "model-00055-of-00061.safetensors",
640
+ "model.layers.72.mlp.gate_proj.weight": "model-00055-of-00061.safetensors",
641
+ "model.layers.72.mlp.up_proj.weight": "model-00055-of-00061.safetensors",
642
+ "model.layers.72.post_attention_layernorm.weight": "model-00055-of-00061.safetensors",
643
+ "model.layers.72.self_attn.k_proj.weight": "model-00055-of-00061.safetensors",
644
+ "model.layers.72.self_attn.o_proj.weight": "model-00055-of-00061.safetensors",
645
+ "model.layers.72.self_attn.q_proj.weight": "model-00055-of-00061.safetensors",
646
+ "model.layers.72.self_attn.v_proj.weight": "model-00055-of-00061.safetensors",
647
+ "model.layers.73.input_layernorm.weight": "model-00056-of-00061.safetensors",
648
+ "model.layers.73.mlp.down_proj.weight": "model-00056-of-00061.safetensors",
649
+ "model.layers.73.mlp.gate_proj.weight": "model-00056-of-00061.safetensors",
650
+ "model.layers.73.mlp.up_proj.weight": "model-00056-of-00061.safetensors",
651
+ "model.layers.73.post_attention_layernorm.weight": "model-00056-of-00061.safetensors",
652
+ "model.layers.73.self_attn.k_proj.weight": "model-00055-of-00061.safetensors",
653
+ "model.layers.73.self_attn.o_proj.weight": "model-00055-of-00061.safetensors",
654
+ "model.layers.73.self_attn.q_proj.weight": "model-00055-of-00061.safetensors",
655
+ "model.layers.73.self_attn.v_proj.weight": "model-00055-of-00061.safetensors",
656
+ "model.layers.74.input_layernorm.weight": "model-00057-of-00061.safetensors",
657
+ "model.layers.74.mlp.down_proj.weight": "model-00057-of-00061.safetensors",
658
+ "model.layers.74.mlp.gate_proj.weight": "model-00056-of-00061.safetensors",
659
+ "model.layers.74.mlp.up_proj.weight": "model-00057-of-00061.safetensors",
660
+ "model.layers.74.post_attention_layernorm.weight": "model-00057-of-00061.safetensors",
661
+ "model.layers.74.self_attn.k_proj.weight": "model-00056-of-00061.safetensors",
662
+ "model.layers.74.self_attn.o_proj.weight": "model-00056-of-00061.safetensors",
663
+ "model.layers.74.self_attn.q_proj.weight": "model-00056-of-00061.safetensors",
664
+ "model.layers.74.self_attn.v_proj.weight": "model-00056-of-00061.safetensors",
665
+ "model.layers.75.input_layernorm.weight": "model-00058-of-00061.safetensors",
666
+ "model.layers.75.mlp.down_proj.weight": "model-00058-of-00061.safetensors",
667
+ "model.layers.75.mlp.gate_proj.weight": "model-00057-of-00061.safetensors",
668
+ "model.layers.75.mlp.up_proj.weight": "model-00057-of-00061.safetensors",
669
+ "model.layers.75.post_attention_layernorm.weight": "model-00058-of-00061.safetensors",
670
+ "model.layers.75.self_attn.k_proj.weight": "model-00057-of-00061.safetensors",
671
+ "model.layers.75.self_attn.o_proj.weight": "model-00057-of-00061.safetensors",
672
+ "model.layers.75.self_attn.q_proj.weight": "model-00057-of-00061.safetensors",
673
+ "model.layers.75.self_attn.v_proj.weight": "model-00057-of-00061.safetensors",
674
+ "model.layers.76.input_layernorm.weight": "model-00058-of-00061.safetensors",
675
+ "model.layers.76.mlp.down_proj.weight": "model-00058-of-00061.safetensors",
676
+ "model.layers.76.mlp.gate_proj.weight": "model-00058-of-00061.safetensors",
677
+ "model.layers.76.mlp.up_proj.weight": "model-00058-of-00061.safetensors",
678
+ "model.layers.76.post_attention_layernorm.weight": "model-00058-of-00061.safetensors",
679
+ "model.layers.76.self_attn.k_proj.weight": "model-00058-of-00061.safetensors",
680
+ "model.layers.76.self_attn.o_proj.weight": "model-00058-of-00061.safetensors",
681
+ "model.layers.76.self_attn.q_proj.weight": "model-00058-of-00061.safetensors",
682
+ "model.layers.76.self_attn.v_proj.weight": "model-00058-of-00061.safetensors",
683
+ "model.layers.77.input_layernorm.weight": "model-00059-of-00061.safetensors",
684
+ "model.layers.77.mlp.down_proj.weight": "model-00059-of-00061.safetensors",
685
+ "model.layers.77.mlp.gate_proj.weight": "model-00059-of-00061.safetensors",
686
+ "model.layers.77.mlp.up_proj.weight": "model-00059-of-00061.safetensors",
687
+ "model.layers.77.post_attention_layernorm.weight": "model-00059-of-00061.safetensors",
688
+ "model.layers.77.self_attn.k_proj.weight": "model-00058-of-00061.safetensors",
689
+ "model.layers.77.self_attn.o_proj.weight": "model-00058-of-00061.safetensors",
690
+ "model.layers.77.self_attn.q_proj.weight": "model-00058-of-00061.safetensors",
691
+ "model.layers.77.self_attn.v_proj.weight": "model-00058-of-00061.safetensors",
692
+ "model.layers.78.input_layernorm.weight": "model-00060-of-00061.safetensors",
693
+ "model.layers.78.mlp.down_proj.weight": "model-00060-of-00061.safetensors",
694
+ "model.layers.78.mlp.gate_proj.weight": "model-00059-of-00061.safetensors",
695
+ "model.layers.78.mlp.up_proj.weight": "model-00060-of-00061.safetensors",
696
+ "model.layers.78.post_attention_layernorm.weight": "model-00060-of-00061.safetensors",
697
+ "model.layers.78.self_attn.k_proj.weight": "model-00059-of-00061.safetensors",
698
+ "model.layers.78.self_attn.o_proj.weight": "model-00059-of-00061.safetensors",
699
+ "model.layers.78.self_attn.q_proj.weight": "model-00059-of-00061.safetensors",
700
+ "model.layers.78.self_attn.v_proj.weight": "model-00059-of-00061.safetensors",
701
+ "model.layers.79.input_layernorm.weight": "model-00061-of-00061.safetensors",
702
+ "model.layers.79.mlp.down_proj.weight": "model-00061-of-00061.safetensors",
703
+ "model.layers.79.mlp.gate_proj.weight": "model-00060-of-00061.safetensors",
704
+ "model.layers.79.mlp.up_proj.weight": "model-00060-of-00061.safetensors",
705
+ "model.layers.79.post_attention_layernorm.weight": "model-00061-of-00061.safetensors",
706
+ "model.layers.79.self_attn.k_proj.weight": "model-00060-of-00061.safetensors",
707
+ "model.layers.79.self_attn.o_proj.weight": "model-00060-of-00061.safetensors",
708
+ "model.layers.79.self_attn.q_proj.weight": "model-00060-of-00061.safetensors",
709
+ "model.layers.79.self_attn.v_proj.weight": "model-00060-of-00061.safetensors",
710
+ "model.layers.8.input_layernorm.weight": "model-00007-of-00061.safetensors",
711
+ "model.layers.8.mlp.down_proj.weight": "model-00007-of-00061.safetensors",
712
+ "model.layers.8.mlp.gate_proj.weight": "model-00007-of-00061.safetensors",
713
+ "model.layers.8.mlp.up_proj.weight": "model-00007-of-00061.safetensors",
714
+ "model.layers.8.post_attention_layernorm.weight": "model-00007-of-00061.safetensors",
715
+ "model.layers.8.self_attn.k_proj.weight": "model-00007-of-00061.safetensors",
716
+ "model.layers.8.self_attn.o_proj.weight": "model-00007-of-00061.safetensors",
717
+ "model.layers.8.self_attn.q_proj.weight": "model-00007-of-00061.safetensors",
718
+ "model.layers.8.self_attn.v_proj.weight": "model-00007-of-00061.safetensors",
719
+ "model.layers.9.input_layernorm.weight": "model-00008-of-00061.safetensors",
720
+ "model.layers.9.mlp.down_proj.weight": "model-00008-of-00061.safetensors",
721
+ "model.layers.9.mlp.gate_proj.weight": "model-00008-of-00061.safetensors",
722
+ "model.layers.9.mlp.up_proj.weight": "model-00008-of-00061.safetensors",
723
+ "model.layers.9.post_attention_layernorm.weight": "model-00008-of-00061.safetensors",
724
+ "model.layers.9.self_attn.k_proj.weight": "model-00007-of-00061.safetensors",
725
+ "model.layers.9.self_attn.o_proj.weight": "model-00007-of-00061.safetensors",
726
+ "model.layers.9.self_attn.q_proj.weight": "model-00007-of-00061.safetensors",
727
+ "model.layers.9.self_attn.v_proj.weight": "model-00007-of-00061.safetensors",
728
+ "model.norm.weight": "model-00061-of-00061.safetensors"
729
+ }
730
+ }
.ipynb_checkpoints/special_tokens_map-checkpoint.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "</s>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
.ipynb_checkpoints/tokenizer-checkpoint.json ADDED
The diff for this file is too large to render. See raw diff
 
config.json CHANGED
@@ -1,9 +1,10 @@
1
  {
2
- "_name_or_path": "./new_model",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
 
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
  "hidden_act": "silu",
@@ -22,7 +23,7 @@
22
  "rope_theta": 10000.0,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "float32",
25
- "transformers_version": "4.34.0",
26
  "use_cache": false,
27
  "vocab_size": 32000
28
  }
 
1
  {
2
+ "_name_or_path": "sequelbox/StellarBright",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
 
23
  "rope_theta": 10000.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float32",
26
+ "transformers_version": "4.36.2",
27
  "use_cache": false,
28
  "vocab_size": 32000
29
  }
generation_config.json CHANGED
@@ -3,6 +3,6 @@
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.34.0",
7
  "use_cache": false
8
  }
 
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.36.2",
7
  "use_cache": false
8
  }
model-00001-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2ac53bb4dae6c10c6944076c7f3ec095e3dec9fbdca3644a7f551153753b4e1
3
+ size 4806739440
model-00002-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:121bcb109936f87f3b261d42626118b9c75a5232bb6001346b5f3c6b7715bb8b
3
+ size 4630578440
model-00003-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42a045876286567cabad9e2939cadfd9a0567d25932ab5dc763bb737461d2040
3
+ size 4362142864
model-00004-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab12cff6359e23672dc724a46cdba6ffa5779d7910571671eed5711b220d8165
3
+ size 4966188864
model-00005-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c4e4bc72a7cd5d4b43bd8beb9b185779ce1decbe72d894fb70637768dd154e8
3
+ size 4362142864
model-00006-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d3e461ca5e296ba89832180aac4e21a3beb6cf38eab8c49cfb0c2f7c90252d0
3
+ size 4362142864
model-00007-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8fd871e42bad877e03245ca91b1225a1dbb34b82c8a396f62e132498926ab43a
3
+ size 4966188864
model-00008-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dbb0e22bf6f924f1f85bb96418510f6c281784ba25a5e98ece5a97cface14fc4
3
+ size 4362142880
model-00009-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7c29ede7d9edc1f3885e6b19100e5b9a9847b06c5dea58356d625ac45caa8da
3
+ size 4362142872
model-00010-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:359f6cb3616691a1b948a38061abf8e427bda19582a96b67de4783a0b6e6f029
3
+ size 4966188880
model-00011-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da3872d7cd89ccfb3cc928d08cc44802b6731fe743cecaeb500861d609a2efc4
3
+ size 4362142872
model-00012-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99117139a8acf648ee2a15dc5c963c720bc952b6009e3452cdf0e25cb44df6da
3
+ size 4362142872
model-00013-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bfffb8c3b589ec25371adf87db888b260631ebddd2b872dfe49fc77a47811bd2
3
+ size 4966188880
model-00014-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7fb55f5b1669e873730eda9af7f34befcf6a209b6b52aac6c66aee4747fcfb6
3
+ size 4362142872
model-00015-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9bcb7af4d98b855b238b9b75e70527dfa78e7625c420784ea1c9fceb3ae692d9
3
+ size 4362142872
model-00016-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a95cf6d1372da205ee2ab8aae4cb1855125dbdd40126a2dfb556a2a39df3d977
3
+ size 4966188880
model-00017-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b35ef866852ab8ab13fc456b12cb4bbff89144fb00f0c188e8006d2089ae3572
3
+ size 4362142872
model-00018-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a52321fa890f893901ab5895c78f074ad3acbe1eaebe9f405e69cce1fa969d2
3
+ size 4362142872
model-00019-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5d2d0e06ea60f75aefcc0263824aaaa8bb75a674cd371744342299bd1dc9179
3
+ size 4966188880
model-00020-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e05162d9c2bbfc46576c5a980bfcd2dcbe891ed7aca953ccfeedfaf5d99979f9
3
+ size 4362142872
model-00021-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15b69b49ef5bcbcc962b8a2909edb0ce7e47db53396823ecf87844c1b14f10af
3
+ size 4362142872
model-00022-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e39c3030e24e1b81ba88661916b52a6d6e7cac08dbeb0fa1f7cfbe19cef30771
3
+ size 4966188880
model-00023-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b009d7ac90308841277d8bdde931bcfe05ad9d30da73ac9b1690e432016f1b4d
3
+ size 4362142872
model-00024-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acdfb1f2251422fbb1a36810f761842dc1290aeed0b00c9eb30fb182d15d9bff
3
+ size 4362142872
model-00025-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26ced8103331d342cfd426249a9df401393e4c2a8db37129695b51425425dcc7
3
+ size 4966188880
model-00026-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb5214ce3e36966dd1e49d392a94e654bcec397b3b1db8cb3b77b9ba6b1e4bbd
3
+ size 4362142872
model-00027-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33829eb6e9ba3cdbee802de513ed092e9f347da746d365ab365e7527171e11a5
3
+ size 4362142872
model-00028-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45021bda2a66f5cb44c7df685fc023bf0e6170b6a9f5281a276192592076cbe2
3
+ size 4966188880
model-00029-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c151e105789827f7a9eb2d53cac848a8302736beb19db35ce245bda4fb572c8
3
+ size 4362142872
model-00030-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3e936af2482f785f3597814ff4aa1adf3dbe302c5e6c5d59a1b3eb08ff90b93
3
+ size 4362142872
model-00031-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19675dad772c48218244e91bf4070869644b36e0a381362575b45e6e070d7523
3
+ size 4966188880
model-00032-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1900b2fe7d513a9b0272759aacce1bbb5c9f8cd4506078f0672fba4eb7da8401
3
+ size 4362142872
model-00033-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e37dc160cc89083d93350742e74c731b8143d7e5c1932bc579aa1af398f244af
3
+ size 4362142872
model-00034-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38c2c6068bb09d76d8e0ef52bd4ff4b8d514ceb0cec9dfa12a324adbdfa83206
3
+ size 4966188880
model-00035-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50b7f73e0aa26595c54320efcc6eed0a5742f4d5910693f8f52a2fdcb0068e2f
3
+ size 4362142872
model-00036-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d65101353e3fade85c0afe0a10408646ad2ee6be9f5c692c830a77555e627caf
3
+ size 4362142872
model-00037-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef5d58b0b6aa95ffc46544c4a90ff50240b54833eda835c4902d1faad329182f
3
+ size 4966188880
model-00038-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66e721362208d1c3f62cad3514d27d0e67631947194edf3642ca2bad7a4f7bbe
3
+ size 4362142872
model-00039-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c3444c52f516a686360e63d5d477ea4faf743bc462d96c34dc8965557bf5304
3
+ size 4362142872
model-00040-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8474c5811df68a0cd3d78094091a83266f260ae8b70abef3a31caa802cbf1543
3
+ size 4966188880
model-00041-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1e9ba6eb12c60fb9bcce3ac299a163bac8c07ab3cfede4e52c55c8766f94b5b
3
+ size 4362142872
model-00042-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f47596c5a91d8531d561a267f334dbc60f7b6ad8c5d221c82e18ad2dc4269a09
3
+ size 4362142872
model-00043-of-00061.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41db8b4a083246ea066c06e0c6ae51c0208b098af581cb34781b9f547843e928
3
+ size 4966188880