jburtoft commited on
Commit
24694f5
1 Parent(s): b8709b9

Upload folder using huggingface_hub (#1)

Browse files

- a6e585fcd25e65447d39f02b928d5c2b0347bbabc63ac9ead8d567df49138425 (0ddc3833fbbf972b7f3cc698e7046c130e4db7ff)
- d188510309c55b81686c87680a61f3f07dd555f99b6aeb50f02f175c77815135 (47e0705340537075b2df17f0da597b548d4aa169)
- 4217416e5444d0b1dad3a94feec26b3dbabbd9563a2746fc9d13edbf4da4633f (4408b0411fcdd52e700559306c21d28e8334211e)
- 8d02a724e008f4bd523b39b37613af602df54476df3638ef6567733ffd78e8de (b87c340f30c7928f1500e0cb69ceb4bd73d3bd5f)
- e67fcddc32db86d54704d781a4d8aaeec770ae190cb12333bf19ccb6cb4c648d (8926df2599646027797de567954e292ea7e1b955)
- b1e68f0277ee013bb1196093defd8dac1e70b911d413c024bbeff106bb103712 (ff202d8b1b74e99d854903d6d3b436f732dfec03)
- fb8b25295052a67aa0f89aacb9cba61d123409ddad8e70ee1bda0428932babd3 (d8c63d3fec597ee45bbe05d0cafe0b7e50923d76)
- 0a27c3706a7678239b4e923de75c294eb9a6cfe4efd30d7b70f512bf825b23ee (850aa19076e2632b0377b729d97270304ddc5c50)
- 0d4e15de2643ebc3b092474042ef1be9db02ef6ff8e538bb53babce74518efb7 (02a05045fca6bb7b5af5e340a4e8f7d961edb517)
- d0cadc8c0b896f79fe73d8afcb4ce700fe7fd91bbb9e21d893cfb4ce8f657501 (999959efa57375e99655d17465d0fddb02b49efa)
- 4c58c4947f6481e6844a96d7dfcf64315ca7b3ef55e700913c129ebb3f1098e4 (c87b7e0beca50959a7269a286cb980f6ccd30255)
- 6c524012584757d79b5010a48211814e20ac0de318480002e575a06d3059ab29 (1199b8aa1cfc2e14209ccf833737d198569ca9e7)
- 998b1c4d5092e36a29099a8826cd6679daea4c8294537a4aa669e6e6d887a35a (1edb75f2bb11524c8de6ab065e0952bfdee70d53)
- 3da82ed4b27fb3961d6d8b192b262fbdf71c44b5f53331c6a7a0c0298a39dc18 (b591315d9b6eb7b30686994c4ea3847d9edc8fda)

This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +12 -0
  2. 1744545db8406da6398e.neff +3 -0
  3. 1bb198a13076d1853641.neff +3 -0
  4. 2e107447c6e0a7f19573.neff +3 -0
  5. 51f8a515591560780d48.neff +3 -0
  6. 6cab3b785bfcdd58ee6a.neff +3 -0
  7. a37ede77a746b866b69d.neff +3 -0
  8. b938f58ffd873c7146a7.neff +3 -0
  9. c3741198db69eb019273.neff +3 -0
  10. config.json +28 -0
  11. d632e5ec42990d4ee01b.neff +3 -0
  12. d95062416ef36beb6494.neff +3 -0
  13. dfcd5ec203680dc9542c.neff +3 -0
  14. e65e10acdd5186bd49cc.neff +3 -0
  15. generation_config.json +10 -0
  16. pytorch_model.bin/key_to_filename.json +3 -0
  17. pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
  18. pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
  19. pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
  20. pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
  21. pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
  22. pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
  23. pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
  24. pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
  25. pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
  26. pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
  27. pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
  28. pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
  29. pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
  30. pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
  31. pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
  32. pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
  33. pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
  34. pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
  35. pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
  36. pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
  37. pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
  38. pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
  39. pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
  40. pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
  41. pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
  42. pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
  43. pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
  44. pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
  45. pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
  46. pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
  47. pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
  48. pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
  49. pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
  50. pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
.gitattributes CHANGED
@@ -33,3 +33,15 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ 1744545db8406da6398e.neff filter=lfs diff=lfs merge=lfs -text
37
+ 1bb198a13076d1853641.neff filter=lfs diff=lfs merge=lfs -text
38
+ 2e107447c6e0a7f19573.neff filter=lfs diff=lfs merge=lfs -text
39
+ 51f8a515591560780d48.neff filter=lfs diff=lfs merge=lfs -text
40
+ 6cab3b785bfcdd58ee6a.neff filter=lfs diff=lfs merge=lfs -text
41
+ a37ede77a746b866b69d.neff filter=lfs diff=lfs merge=lfs -text
42
+ b938f58ffd873c7146a7.neff filter=lfs diff=lfs merge=lfs -text
43
+ c3741198db69eb019273.neff filter=lfs diff=lfs merge=lfs -text
44
+ d632e5ec42990d4ee01b.neff filter=lfs diff=lfs merge=lfs -text
45
+ d95062416ef36beb6494.neff filter=lfs diff=lfs merge=lfs -text
46
+ dfcd5ec203680dc9542c.neff filter=lfs diff=lfs merge=lfs -text
47
+ e65e10acdd5186bd49cc.neff filter=lfs diff=lfs merge=lfs -text
1744545db8406da6398e.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7cd6c8edf70b901880554dfe27ffb4315bf35ab5c935f9ff42da59ee109f2dfe
3
+ size 7906304
1bb198a13076d1853641.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:788603c6079c4cb4e82bfbd03cda86d728c1fd405ad4ccc0ac3a7b74b6edeeec
3
+ size 12770304
2e107447c6e0a7f19573.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6969a90bade88e80741a807fad86cbaaedd487b8f7db9e60477fdafc9623eaba
3
+ size 7875584
51f8a515591560780d48.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:773594d7df843d374f1863d6c3207aea8014b12d2e54e5774a0be45cc5a5659a
3
+ size 10036224
6cab3b785bfcdd58ee6a.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:114405e4280982ab0db933b692071569a20acc723d8a60ca34756b2afee7e267
3
+ size 8551424
a37ede77a746b866b69d.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60a6b087226f47997528e05f0e0b280cd6b10e705d320d906d4fdf4e75ac10ef
3
+ size 7998464
b938f58ffd873c7146a7.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6159abddee4a14ca90403894c588c6e01fdd9f913213cff49c3c7b095d25c800
3
+ size 23307264
c3741198db69eb019273.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f1f8e6861b7f28e9b652658d254b192343fdf7009e4f16f15b0193ab08ec4a2
3
+ size 8162304
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama/Llama-2-7b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 11008,
14
+ "max_position_embeddings": 4096,
15
+ "model_type": "llama",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 32,
18
+ "num_key_value_heads": 32,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float32",
25
+ "transformers_version": "4.36.2",
26
+ "use_cache": true,
27
+ "vocab_size": 32000
28
+ }
d632e5ec42990d4ee01b.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ad07a26a48a343672f0bc8e4a6617b2d8f50fdf172682dadfdeff42750ee8b8
3
+ size 8479744
d95062416ef36beb6494.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8195990bf3936cf0b5feb067ac0824ce375c8a5c437005f709073192c2cdc3e
3
+ size 8899584
dfcd5ec203680dc9542c.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1493bd7d1e32a22a0b5481b1c956b3f5cddbad4c69902473756c0f04430e9ca
3
+ size 9186304
e65e10acdd5186bd49cc.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbd968ef605127f3fd11d3c5d0f7a63d4adef33ed4bd10882b3f92f1f53a5345
3
+ size 47197184
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 1,
3
+ "do_sample": true,
4
+ "eos_token_id": 2,
5
+ "max_length": 4096,
6
+ "pad_token_id": 0,
7
+ "temperature": 0.6,
8
+ "top_p": 0.9,
9
+ "transformers_version": "4.36.2"
10
+ }
pytorch_model.bin/key_to_filename.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:825d20f4a18183eff3963e805edd13ef7eb35b0aff7a850e8153ca1eeeb37970
3
+ size 26397
pytorch_model.bin/p0.model.embed_tokens.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da5c4acb278381b1fc040ff34ca7cdfa8264895cfad4fee5c90436c423a8f459
3
+ size 524288789
pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e4486401b9f7b27193d310385d3468fdd234936513b93a4d2e9662d745b9b74
3
+ size 67109756
pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43a22f8644d0a6940f854aaf7882e60579972973c97575f7e25622f97280f977
3
+ size 67109759
pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de5c80fae90ca3476520a485c29d07121eb270e0534cb2c9ce08f4aa764e90fd
3
+ size 67109765
pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c7b3eefbb663e184e8ee804ea11b02ac98f29a30abdf361eab46e4da7983a00
3
+ size 67109765
pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b58bd301b57fc9e3c23375ed119c0ceecd9962e1aa714cea3722885e08c9d6a
3
+ size 67109765
pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ed98fdbfda7fc25acc4f80728f69e7cc647477ed659009d9339a16fd780bd34
3
+ size 67109765
pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75b7a49d0bb826f91c9547c1494367d5273678c2d221de9080ccec9ba1befc56
3
+ size 180355964
pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a54b7365baf60a919654b1f2f4feb8991a700459769a779c95094018a08c5291
3
+ size 180355958
pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c079b1f4963c658c0dd95c54ab6f31e55e95cb40b0242465cd78388631c6302
3
+ size 180355964
pytorch_model.bin/p107.model.layers.11.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08b697c974c581a4b64024f0dfa328941f597fa5b93312fff8dbe2140988b6f1
3
+ size 17282
pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:440dd212bb2e67c2ff83a2d42135f13589a04f622f41d523c241941e008ec1b8
3
+ size 17309
pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3046b38eb5f0dbd87c7c9cc320dfc3374311930d38ee43a03169c61a8b6eb19f
3
+ size 67109765
pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4afe803e9188698fe5b29e551e418da34e536dd5fc42d53f2d4d02acb06780a0
3
+ size 67109759
pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b3ef1f510bd1955a076e2bbc7c509f1f28479bcf17755469e2ad201c70362cb
3
+ size 67109765
pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5dc6f96ead6b19d38d2bf5fdcb98dbc2baa34487ffb416029ad6bed9d37baf0
3
+ size 67109765
pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46267dfe1859f8a4fd450e43523fb9603c849162eb53a5f72445daec88730607
3
+ size 67109765
pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d874134d19b3a5e0c28b4cb14a5c6ce416592cc0b280d22e6893c178f0bc4890
3
+ size 180355964
pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c6dbe94e47a28ab08e80a8069e06d9a6f31e16306ea138a356e90f90ff0139e
3
+ size 180355958
pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5afc26b5261f3c992c3a9e9b84cf42d9e5e6365c6d0dfb2a0397f52fdcf611ab
3
+ size 180355964
pytorch_model.bin/p116.model.layers.12.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af688ecb35a5f33863c2ea72f697027bc01e19ae891d13c878a9bb077aef9eaf
3
+ size 17282
pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f42abe22e1afc5696515dfb5e91d0541aaf789bb8777d5f47c48726f7425c326
3
+ size 17309
pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d57fbe0a4df894dbd3f98e744ad1775a43693b4ce7992991616576f66af86964
3
+ size 67109765
pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7e1fb093b541409c38ec658f77bf301eba121873d557d8037b569a2a82b947c
3
+ size 67109765
pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c67935134cf42385cfc6ee23d9c41f3e0e146cf620fdfbfb7d61993332867779
3
+ size 67109759
pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7762b921206e6b9548e584537cd2634724d909ed2d73ed5a0342344a698d8528
3
+ size 67109765
pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28120f754e95324e127cca63a761d124d834fd56cced2fd38d27e15ce7e4691b
3
+ size 67109765
pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60f749c15d65ba3b6456e058611e0fd0dbcfce4d38041d814e0b778bb8a74593
3
+ size 180355964
pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ef1b6504ed2fb7f3d0b515a3ae3fb06e59fd2a2c92f107c89b8727e74dc9bcc
3
+ size 180355958
pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bd51d04347f173637b730826633a263f1b25370bd446a1ff34c8f03c7c51e79
3
+ size 180355964
pytorch_model.bin/p125.model.layers.13.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d96cfb55f2a0edd518209aed60673eb2cdbd5e6aee6c8cb51ef3ec618ff15bc
3
+ size 17282
pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14703c6f0d6c692a048a16ec17d3bd5bb8e925dc6024558931041df48fa6d46c
3
+ size 17309
pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75fc137e6fac484eb99a764320706ab7de62b162e12a40e4af706f33a916c2e0
3
+ size 67109765
pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:963cc6b708a5b9beb5c7648797394e8897e5bb7f3502602563e33fdb4731c5bd
3
+ size 67109765