dacorvo HF staff commited on
Commit
dbb2adb
1 Parent(s): 3115c45

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +10 -0
  2. LICENSE.txt +126 -0
  3. README.md +60 -0
  4. checkpoint/config.json +27 -0
  5. checkpoint/generation_config.json +10 -0
  6. checkpoint/pytorch_model.bin/key_to_filename.json +3 -0
  7. checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
  8. checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
  9. checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
  10. checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
  11. checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
  12. checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
  13. checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
  14. checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
  15. checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
  16. checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
  17. checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
  18. checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
  19. checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
  20. checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
  21. checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
  22. checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
  23. checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
  24. checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
  25. checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
  26. checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
  27. checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
  28. checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
  29. checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
  30. checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
  31. checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
  32. checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
  33. checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
  34. checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
  35. checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
  36. checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
  37. checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
  38. checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
  39. checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
  40. checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
  41. checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight +3 -0
  42. checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight +3 -0
  43. checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight +3 -0
  44. checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight +3 -0
  45. checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight +3 -0
  46. checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight +3 -0
  47. checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight +3 -0
  48. checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight +3 -0
  49. checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight +3 -0
  50. checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight +3 -0
.gitattributes CHANGED
@@ -33,3 +33,13 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ compiled/157da229375c206ab7ef.neff filter=lfs diff=lfs merge=lfs -text
37
+ compiled/1b51e03f5e884a90f2ef.neff filter=lfs diff=lfs merge=lfs -text
38
+ compiled/3114efb5a11fc402a894.neff filter=lfs diff=lfs merge=lfs -text
39
+ compiled/46114601e479a3ab86d5.neff filter=lfs diff=lfs merge=lfs -text
40
+ compiled/7addf9d4ce2c4f32bcbc.neff filter=lfs diff=lfs merge=lfs -text
41
+ compiled/b7a3add84c081da2eb6d.neff filter=lfs diff=lfs merge=lfs -text
42
+ compiled/bde1bac5ad2a892268b0.neff filter=lfs diff=lfs merge=lfs -text
43
+ compiled/cd329e9e234371dd1be8.neff filter=lfs diff=lfs merge=lfs -text
44
+ compiled/cf14444e182904ed3fd6.neff filter=lfs diff=lfs merge=lfs -text
45
+ compiled/e85e95c0cd23bcd18b0e.neff filter=lfs diff=lfs merge=lfs -text
LICENSE.txt ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ LLAMA 2 COMMUNITY LICENSE AGREEMENT
2
+ Llama 2 Version Release Date: July 18, 2023
3
+
4
+ "Agreement" means the terms and conditions for use, reproduction, distribution and
5
+ modification of the Llama Materials set forth herein.
6
+
7
+ "Documentation" means the specifications, manuals and documentation
8
+ accompanying Llama 2 distributed by Meta at ai.meta.com/resources/models-and-
9
+ libraries/llama-downloads/.
10
+
11
+ "Licensee" or "you" means you, or your employer or any other person or entity (if
12
+ you are entering into this Agreement on such person or entity's behalf), of the age
13
+ required under applicable laws, rules or regulations to provide legal consent and that
14
+ has legal authority to bind your employer or such other person or entity if you are
15
+ entering in this Agreement on their behalf.
16
+
17
+ "Llama 2" means the foundational large language models and software and
18
+ algorithms, including machine-learning model code, trained model weights,
19
+ inference-enabling code, training-enabling code, fine-tuning enabling code and other
20
+ elements of the foregoing distributed by Meta at ai.meta.com/resources/models-and-
21
+ libraries/llama-downloads/.
22
+
23
+ "Llama Materials" means, collectively, Meta's proprietary Llama 2 and
24
+ Documentation (and any portion thereof) made available under this Agreement.
25
+
26
+ "Meta" or "we" means Meta Platforms Ireland Limited (if you are located in or, if you
27
+ are an entity, your principal place of business is in the EEA or Switzerland) and Meta
28
+ Platforms, Inc. (if you are located outside of the EEA or Switzerland).
29
+
30
+ By clicking "I Accept" below or by using or distributing any portion or element of the
31
+ Llama Materials, you agree to be bound by this Agreement.
32
+
33
+ 1. License Rights and Redistribution.
34
+
35
+ a. Grant of Rights. You are granted a non-exclusive, worldwide, non-
36
+ transferable and royalty-free limited license under Meta's intellectual property or
37
+ other rights owned by Meta embodied in the Llama Materials to use, reproduce,
38
+ distribute, copy, create derivative works of, and make modifications to the Llama
39
+ Materials.
40
+
41
+ b. Redistribution and Use.
42
+
43
+ i. If you distribute or make the Llama Materials, or any derivative works
44
+ thereof, available to a third party, you shall provide a copy of this Agreement to such
45
+ third party.
46
+ ii. If you receive Llama Materials, or any derivative works thereof, from
47
+ a Licensee as part of an integrated end user product, then Section 2 of this
48
+ Agreement will not apply to you.
49
+
50
+ iii. You must retain in all copies of the Llama Materials that you
51
+ distribute the following attribution notice within a "Notice" text file distributed as a
52
+ part of such copies: "Llama 2 is licensed under the LLAMA 2 Community License,
53
+ Copyright (c) Meta Platforms, Inc. All Rights Reserved."
54
+
55
+ iv. Your use of the Llama Materials must comply with applicable laws
56
+ and regulations (including trade compliance laws and regulations) and adhere to the
57
+ Acceptable Use Policy for the Llama Materials (available at
58
+ https://ai.meta.com/llama/use-policy), which is hereby incorporated by reference into
59
+ this Agreement.
60
+
61
+ v. You will not use the Llama Materials or any output or results of the
62
+ Llama Materials to improve any other large language model (excluding Llama 2 or
63
+ derivative works thereof).
64
+
65
+ 2. Additional Commercial Terms. If, on the Llama 2 version release date, the
66
+ monthly active users of the products or services made available by or for Licensee,
67
+ or Licensee's affiliates, is greater than 700 million monthly active users in the
68
+ preceding calendar month, you must request a license from Meta, which Meta may
69
+ grant to you in its sole discretion, and you are not authorized to exercise any of the
70
+ rights under this Agreement unless or until Meta otherwise expressly grants you
71
+ such rights.
72
+
73
+ 3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE LAW, THE
74
+ LLAMA MATERIALS AND ANY OUTPUT AND RESULTS THEREFROM ARE
75
+ PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
76
+ EITHER EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY
77
+ WARRANTIES OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR
78
+ FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE
79
+ FOR DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING
80
+ THE LLAMA MATERIALS AND ASSUME ANY RISKS ASSOCIATED WITH YOUR
81
+ USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND RESULTS.
82
+
83
+ 4. Limitation of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE
84
+ LIABLE UNDER ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, TORT,
85
+ NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS
86
+ AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL,
87
+ CONSEQUENTIAL, INCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN
88
+ IF META OR ITS AFFILIATES HAVE BEEN ADVISED OF THE POSSIBILITY OF
89
+ ANY OF THE FOREGOING.
90
+
91
+ 5. Intellectual Property.
92
+
93
+ a. No trademark licenses are granted under this Agreement, and in
94
+ connection with the Llama Materials, neither Meta nor Licensee may use any name
95
+ or mark owned by or associated with the other or any of its affiliates, except as
96
+ required for reasonable and customary use in describing and redistributing the
97
+ Llama Materials.
98
+
99
+ b. Subject to Meta's ownership of Llama Materials and derivatives made by or
100
+ for Meta, with respect to any derivative works and modifications of the Llama
101
+ Materials that are made by you, as between you and Meta, you are and will be the
102
+ owner of such derivative works and modifications.
103
+
104
+ c. If you institute litigation or other proceedings against Meta or any entity
105
+ (including a cross-claim or counterclaim in a lawsuit) alleging that the Llama
106
+ Materials or Llama 2 outputs or results, or any portion of any of the foregoing,
107
+ constitutes infringement of intellectual property or other rights owned or licensable
108
+ by you, then any licenses granted to you under this Agreement shall terminate as of
109
+ the date such litigation or claim is filed or instituted. You will indemnify and hold
110
+ harmless Meta from and against any claim by any third party arising out of or related
111
+ to your use or distribution of the Llama Materials.
112
+
113
+ 6. Term and Termination. The term of this Agreement will commence upon your
114
+ acceptance of this Agreement or access to the Llama Materials and will continue in
115
+ full force and effect until terminated in accordance with the terms and conditions
116
+ herein. Meta may terminate this Agreement if you are in breach of any term or
117
+ condition of this Agreement. Upon termination of this Agreement, you shall delete
118
+ and cease use of the Llama Materials. Sections 3, 4 and 7 shall survive the
119
+ termination of this Agreement.
120
+
121
+ 7. Governing Law and Jurisdiction. This Agreement will be governed and
122
+ construed under the laws of the State of California without regard to choice of law
123
+ principles, and the UN Convention on Contracts for the International Sale of Goods
124
+ does not apply to this Agreement. The courts of California shall have exclusive
125
+ jurisdiction of any dispute arising out of this Agreement.
126
+
README.md ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ pipeline_tag: text-generation
5
+ inference: false
6
+ tags:
7
+ - facebook
8
+ - meta
9
+ - pytorch
10
+ - llama
11
+ - llama-2
12
+ - inferentia2
13
+ - neuron
14
+ ---
15
+ # Neuronx model for [meta-llama/Llama-2-7b-hf](https://huggingface.co/meta-llama/Llama-2-7b-hf)
16
+
17
+ This repository contains [**AWS Inferentia2**](https://aws.amazon.com/ec2/instance-types/inf2/) and [`neuronx`](https://awsdocs-neuron.readthedocs-hosted.com/en/latest/) compatible checkpoints for [meta-llama/Llama-2-7b-hf](https://huggingface.co/meta-llama/Llama-2-7b-hf).
18
+ You can find detailed information about the base model on its [Model Card](https://huggingface.co/meta-llama/Llama-2-7b-hf).
19
+
20
+ This model has been exported to the `neuron` format using specific `input_shapes` and `compiler` parameters detailed in the paragraphs below.
21
+
22
+ Please refer to the 🤗 `optimum-neuron` [documentation](https://huggingface.co/docs/optimum-neuron/main/en/guides/models#configuring-the-export-of-a-generative-model) for an explanation of these parameters.
23
+
24
+ ## Usage on Amazon SageMaker
25
+
26
+ _coming soon_
27
+
28
+ ## Usage with 🤗 `optimum-neuron`
29
+
30
+ ```python
31
+ >>> from optimum.neuron import pipeline
32
+
33
+ >>> p = pipeline('text-generation', 'aws-neuron/Llama-2-7b-hf-neuron-latency')
34
+ >>> p("My favorite place on earth is", max_new_tokens=64, do_sample=True, top_k=50)
35
+ [{'generated_text': 'My favorite place on earth is the ocean. It is where I feel most
36
+ at peace. I love to travel and see new places. I have a'}]
37
+ ```
38
+
39
+ This repository contains tags specific to versions of `neuronx`. When using with 🤗 `optimum-neuron`, use the repo revision specific to the version of `neuronx` you are using, to load the right serialized checkpoints.
40
+
41
+ ## Arguments passed during export
42
+
43
+ **input_shapes**
44
+
45
+ ```json
46
+ {
47
+ "batch_size": 1,
48
+ "sequence_length": 2048,
49
+ }
50
+ ```
51
+
52
+ **compiler_args**
53
+
54
+ ```json
55
+ {
56
+ "auto_cast_type": "fp16",
57
+ "num_cores": 24,
58
+ }
59
+ ```
60
+
checkpoint/config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama/Llama-2-7b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 11008,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "num_key_value_heads": 32,
18
+ "pretraining_tp": 1,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_scaling": null,
21
+ "rope_theta": 10000.0,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.34.1",
25
+ "use_cache": true,
26
+ "vocab_size": 32000
27
+ }
checkpoint/generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 1,
3
+ "do_sample": true,
4
+ "eos_token_id": 2,
5
+ "max_length": 4096,
6
+ "pad_token_id": 0,
7
+ "temperature": 0.6,
8
+ "top_p": 0.9,
9
+ "transformers_version": "4.34.1"
10
+ }
checkpoint/pytorch_model.bin/key_to_filename.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:825d20f4a18183eff3963e805edd13ef7eb35b0aff7a850e8153ca1eeeb37970
3
+ size 26397
checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da5c4acb278381b1fc040ff34ca7cdfa8264895cfad4fee5c90436c423a8f459
3
+ size 524288789
checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e4486401b9f7b27193d310385d3468fdd234936513b93a4d2e9662d745b9b74
3
+ size 67109756
checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43a22f8644d0a6940f854aaf7882e60579972973c97575f7e25622f97280f977
3
+ size 67109759
checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de5c80fae90ca3476520a485c29d07121eb270e0534cb2c9ce08f4aa764e90fd
3
+ size 67109765
checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c7b3eefbb663e184e8ee804ea11b02ac98f29a30abdf361eab46e4da7983a00
3
+ size 67109765
checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b58bd301b57fc9e3c23375ed119c0ceecd9962e1aa714cea3722885e08c9d6a
3
+ size 67109765
checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ed98fdbfda7fc25acc4f80728f69e7cc647477ed659009d9339a16fd780bd34
3
+ size 67109765
checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75b7a49d0bb826f91c9547c1494367d5273678c2d221de9080ccec9ba1befc56
3
+ size 180355964
checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a54b7365baf60a919654b1f2f4feb8991a700459769a779c95094018a08c5291
3
+ size 180355958
checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c079b1f4963c658c0dd95c54ab6f31e55e95cb40b0242465cd78388631c6302
3
+ size 180355964
checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08b697c974c581a4b64024f0dfa328941f597fa5b93312fff8dbe2140988b6f1
3
+ size 17282
checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:440dd212bb2e67c2ff83a2d42135f13589a04f622f41d523c241941e008ec1b8
3
+ size 17309
checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3046b38eb5f0dbd87c7c9cc320dfc3374311930d38ee43a03169c61a8b6eb19f
3
+ size 67109765
checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4afe803e9188698fe5b29e551e418da34e536dd5fc42d53f2d4d02acb06780a0
3
+ size 67109759
checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b3ef1f510bd1955a076e2bbc7c509f1f28479bcf17755469e2ad201c70362cb
3
+ size 67109765
checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5dc6f96ead6b19d38d2bf5fdcb98dbc2baa34487ffb416029ad6bed9d37baf0
3
+ size 67109765
checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46267dfe1859f8a4fd450e43523fb9603c849162eb53a5f72445daec88730607
3
+ size 67109765
checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d874134d19b3a5e0c28b4cb14a5c6ce416592cc0b280d22e6893c178f0bc4890
3
+ size 180355964
checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c6dbe94e47a28ab08e80a8069e06d9a6f31e16306ea138a356e90f90ff0139e
3
+ size 180355958
checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5afc26b5261f3c992c3a9e9b84cf42d9e5e6365c6d0dfb2a0397f52fdcf611ab
3
+ size 180355964
checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af688ecb35a5f33863c2ea72f697027bc01e19ae891d13c878a9bb077aef9eaf
3
+ size 17282
checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f42abe22e1afc5696515dfb5e91d0541aaf789bb8777d5f47c48726f7425c326
3
+ size 17309
checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d57fbe0a4df894dbd3f98e744ad1775a43693b4ce7992991616576f66af86964
3
+ size 67109765
checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7e1fb093b541409c38ec658f77bf301eba121873d557d8037b569a2a82b947c
3
+ size 67109765
checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c67935134cf42385cfc6ee23d9c41f3e0e146cf620fdfbfb7d61993332867779
3
+ size 67109759
checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7762b921206e6b9548e584537cd2634724d909ed2d73ed5a0342344a698d8528
3
+ size 67109765
checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28120f754e95324e127cca63a761d124d834fd56cced2fd38d27e15ce7e4691b
3
+ size 67109765
checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60f749c15d65ba3b6456e058611e0fd0dbcfce4d38041d814e0b778bb8a74593
3
+ size 180355964
checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ef1b6504ed2fb7f3d0b515a3ae3fb06e59fd2a2c92f107c89b8727e74dc9bcc
3
+ size 180355958
checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bd51d04347f173637b730826633a263f1b25370bd446a1ff34c8f03c7c51e79
3
+ size 180355964
checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d96cfb55f2a0edd518209aed60673eb2cdbd5e6aee6c8cb51ef3ec618ff15bc
3
+ size 17282
checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14703c6f0d6c692a048a16ec17d3bd5bb8e925dc6024558931041df48fa6d46c
3
+ size 17309
checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75fc137e6fac484eb99a764320706ab7de62b162e12a40e4af706f33a916c2e0
3
+ size 67109765
checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:963cc6b708a5b9beb5c7648797394e8897e5bb7f3502602563e33fdb4731c5bd
3
+ size 67109765
checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f156177fd8262b098c5f477bbeaa1137933ea23062c87c9d7b6f5e531fbbbb5f
3
+ size 67109765
checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b774652267c782b295972d3ad5a80cd9b8726460798c63b155cbbed629d5ee95
3
+ size 67109759
checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12cec480791ecd87866ee79de6b6929135451fb948a6eaea47b52e21c5168e16
3
+ size 67109765
checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8ae779c065179df5c262ef305c227ef4d8fbe4ea8fa673e18955e3d09c40bae
3
+ size 180355964
checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6e4498fb9eb95f190b8304c5e57acf6a4a8f1aa44b4adbbbed14eda5dc6fe83
3
+ size 180355958
checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5a2321a7081de7f76b053a22b5fcd1fdb854e5fbb15d298a8e2fd48b02c7abc
3
+ size 180355964
checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37f5642ad300b1225a3f0f195317d674cc8b3a443b2145b39abc912b3e8f7e60
3
+ size 17282
checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b7ed46e4eafb9182394036bdc4980359ca7770a8fd7d739eedf232ecd89de23
3
+ size 17309
checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b8645153a8338589fafb0d6143e5b57d56d4298a424123c4c212e488b28b803
3
+ size 67109765
checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08ff0b29c8f6b08fe3a14ac6f08deee7a28c195afa9ddd05cecbba506875cf91
3
+ size 67109765