philschmid HF staff commited on
Commit
f5b31f9
1 Parent(s): 0b44628

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +10 -0
  2. README.md +77 -0
  3. checkpoint/config.json +27 -0
  4. checkpoint/generation_config.json +10 -0
  5. checkpoint/pytorch_model.bin/key_to_filename.json +3 -0
  6. checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
  7. checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
  8. checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
  9. checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
  10. checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
  11. checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
  12. checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
  13. checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
  14. checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
  15. checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
  16. checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
  17. checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
  18. checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
  19. checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
  20. checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
  21. checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
  22. checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
  23. checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
  24. checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
  25. checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
  26. checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
  27. checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
  28. checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
  29. checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
  30. checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
  31. checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
  32. checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
  33. checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
  34. checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
  35. checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
  36. checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
  37. checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
  38. checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
  39. checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
  40. checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight +3 -0
  41. checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight +3 -0
  42. checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight +3 -0
  43. checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight +3 -0
  44. checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight +3 -0
  45. checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight +3 -0
  46. checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight +3 -0
  47. checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight +3 -0
  48. checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight +3 -0
  49. checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight +3 -0
  50. checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight +3 -0
.gitattributes CHANGED
@@ -33,3 +33,13 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ compiled/0f12364c693e4bc350d6.neff filter=lfs diff=lfs merge=lfs -text
37
+ compiled/11542220b8bf801e13f2.neff filter=lfs diff=lfs merge=lfs -text
38
+ compiled/36e1ce7aff5023dcef55.neff filter=lfs diff=lfs merge=lfs -text
39
+ compiled/3ef740eb467f762c3c93.neff filter=lfs diff=lfs merge=lfs -text
40
+ compiled/59693de3aebe4137f425.neff filter=lfs diff=lfs merge=lfs -text
41
+ compiled/6e492ea2fcb8c4365957.neff filter=lfs diff=lfs merge=lfs -text
42
+ compiled/8df171844a096fe60a5c.neff filter=lfs diff=lfs merge=lfs -text
43
+ compiled/97cb854bca564e595f78.neff filter=lfs diff=lfs merge=lfs -text
44
+ compiled/b41916b24567005d8716.neff filter=lfs diff=lfs merge=lfs -text
45
+ compiled/e56039fa0d8d702ce4b7.neff filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ tags:
5
+ - facebook
6
+ - meta
7
+ - pytorch
8
+ - llama
9
+ - llama-2
10
+ - inferentia2
11
+ - neuron
12
+ extra_gated_heading: Access Llama 2 on Hugging Face
13
+ extra_gated_description: This is a form to enable access to Llama 2 on Hugging Face
14
+ after you have been granted access from Meta. Please visit the [Meta website](https://ai.meta.com/resources/models-and-libraries/llama-downloads)
15
+ and accept our license terms and acceptable use policy before submitting this form.
16
+ Requests will be processed in 1-2 days.
17
+ extra_gated_prompt: '**Your Hugging Face account email address MUST match the email
18
+ you provide on the Meta website, or your request will not be approved.**'
19
+ extra_gated_button_content: Submit
20
+ extra_gated_fields:
21
+ ? I agree to share my name, email address and username with Meta and confirm that
22
+ I have already been granted download access on the Meta website
23
+ : checkbox
24
+ pipeline_tag: text-generation
25
+ inference: false
26
+ arxiv: 2307.09288
27
+ ---
28
+ # Neuronx model for [meta-llama/Llama-2-7b-chat-hf](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf)
29
+
30
+ This repository contains are [**AWS Inferentia2**](https://aws.amazon.com/ec2/instance-types/inf2/) and [`neuronx`](https://awsdocs-neuron.readthedocs-hosted.com/en/latest/) compatible checkpoint for [meta-llama/Llama-2-7b-chat-hf](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf). You can find detailed information about the base model on its [Model Card](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf).
31
+
32
+ ## Usage on Amazon SageMaker
33
+
34
+ _coming soon_
35
+
36
+ ## Usage with optimum-neuron
37
+
38
+ ```python
39
+
40
+ from optimum.neuron import pipeline
41
+
42
+ # Load pipeline from Hugging Face repository
43
+ pipe = pipeline("text-generation", "meta-llama/Llama-2-7b-chat-hf")
44
+
45
+ # We use the tokenizer's chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
46
+ messages = [
47
+ {"role": "user", "content": "What is 2+2?"},
48
+ ]
49
+ prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
50
+ # Run generation
51
+ outputs = pipe(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
52
+ print(outputs[0]["generated_text"])
53
+
54
+ ```
55
+
56
+ ## Compilation Arguments
57
+
58
+ **compilation arguments**
59
+
60
+ ```json
61
+ {
62
+ "num_cores": 2,
63
+ "auto_cast_type": "fp16"
64
+ }
65
+ ```
66
+
67
+ **input_shapes**
68
+
69
+ ```json
70
+ {
71
+ "sequence_length": 2048,
72
+ "batch_size": 4
73
+ }
74
+ ```
75
+
76
+
77
+
checkpoint/config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama/Llama-2-7b-chat-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 11008,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "num_key_value_heads": 32,
18
+ "pretraining_tp": 1,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_scaling": null,
21
+ "rope_theta": 10000.0,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.34.1",
25
+ "use_cache": true,
26
+ "vocab_size": 32000
27
+ }
checkpoint/generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 1,
3
+ "do_sample": true,
4
+ "eos_token_id": 2,
5
+ "max_length": 4096,
6
+ "pad_token_id": 0,
7
+ "temperature": 0.6,
8
+ "top_p": 0.9,
9
+ "transformers_version": "4.34.1"
10
+ }
checkpoint/pytorch_model.bin/key_to_filename.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:825d20f4a18183eff3963e805edd13ef7eb35b0aff7a850e8153ca1eeeb37970
3
+ size 26397
checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b95d85278b1d01a971a7c6a67c824f97601415652eec60abc5045bf72d09e29
3
+ size 524288789
checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d17934ba5277e8186e914184f219813929a9470e6e8f0f3f76e13e156c13f5d3
3
+ size 67109756
checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1fff4d6edf30167b5ad1d47fd2b15593b028fa13c54c65cb7e7dc1e2989e401
3
+ size 67109759
checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45197a954ec886d63a4a9c3ec7ac3fa7ab732950f8f823046f3297d01ee03188
3
+ size 67109765
checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18a664a3d76a840b4377bb92053e7374506966827a3c5928758961371b973654
3
+ size 67109765
checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b53a11e8c45469610d063edcc227c0fcb4a889003095ed825da8bd360f32fb9
3
+ size 67109765
checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:599d75c511ffc03da3d1337b22f234ed015294be4bfa8601e64b4b1d9a99d02b
3
+ size 67109765
checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78d6f75c4715bafcce844f9f7d265f338888d9dcec9018d4899adb89a30f01e8
3
+ size 180355964
checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f152f631ab5b9359940b96a9e8eddf2094cc405a521dd354a944292555846a4c
3
+ size 180355958
checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca8316920b7f0185667b5d1a0d95f244e57cf7439ec34d244ba9767e2876c4da
3
+ size 180355964
checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29e70854c02954ec73682a0e37083d842c556b5944356ab933464f5b1c899dfe
3
+ size 17282
checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adc863fa2e9b113019f47c162fab1b2d8f3e24756aa313746b09470043821b4c
3
+ size 17309
checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8569d2b33abf7e991cfb296fc0f251287352e34bdd7f69cb452f3245d0fdeab
3
+ size 67109765
checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dca8032d3fd0d87ab1249a5d44b08c72340cf3b318fbefec6ebab1261a5667d
3
+ size 67109759
checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23bbfaac8f3b90c8da89d9a8651da28bcd1bccdfc022271b0e0fa59f500b0b49
3
+ size 67109765
checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eac16b12c6ca686385212212a2c12aee043fe325bfb09ad5291e05a49268fd5e
3
+ size 67109765
checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a5e2f7ad7f4242705a12bc8967b862a5aeb6bd86bd4d752175b3ef25ea1b793
3
+ size 67109765
checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cacdfdec958d51b22005632d21995b3dfa5b88741fc9f712711cf14b5b3c9a12
3
+ size 180355964
checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93836f3e7b9d7da92390d227a8f9e9201708b6f796688453ce543d44881e1d93
3
+ size 180355958
checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3174e267c3229c1f09f131661b579ba0bc4b5bdceb56b21c070d27914ce5b4ce
3
+ size 180355964
checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ec0751f78c8348c7b2b24eb04f8d62c9ddd35111dae08cef77a67c22f41fe06
3
+ size 17282
checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc59b82792d45361f3013060c5043398d0bca37220f07320443cd244c1d3220b
3
+ size 17309
checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3a826b357c699fc24ea574cecc8c565ff4cfba86a284ec818243050a27fda3c
3
+ size 67109765
checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e738683813f4ea2bc85a5cc2546dd0e9ce4369591d24c160014f96b3a2d42ebc
3
+ size 67109765
checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57a822cf3c2bdd87ec94113f83496cbc2874e3880fadc5a0e74e70234750cd0b
3
+ size 67109759
checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6db91d48499930ceeba5946ab13954d2c4fa3559611d2aad824c41c0008e1c45
3
+ size 67109765
checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdc053bb6caf1588bca49c2737f1e9f4e5dd033fa4d411a3cc7db25675f084a6
3
+ size 67109765
checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c77821767d64db6b230ff1f1a22150420c08f5c5bfd311a7072d8aaa44eb10d8
3
+ size 180355964
checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c8ada4f7c4a85fcb593e892f7f537d6f04b9fc0a9ef499e7bd90fa4ed32bd4d
3
+ size 180355958
checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:489af295a7f782edcb1f91df4cadcfb4bf39cd20b12559cb8877e4ebf00b167c
3
+ size 180355964
checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4328ff412e6b5b5a0f78c5240adfeb2a16072721cfc4f2cd58d01caf58fa480
3
+ size 17282
checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bbf8f9056013c32446afd571f708e517daedb1166ec18c4672bbac4f8c22ca2
3
+ size 17309
checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94d04f5fd0c2fd0f0c092aa0eee8bec179a3b9fca2d0c9086daea70b35b1dc8e
3
+ size 67109765
checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4628507f45711fb91626a60a8ad3cff01590d61d11628f3b21aceef1bae8b5e
3
+ size 67109765
checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68f73858f359d8554437c704d0ce421fcd757f754a2959d1e258ec1e330d5736
3
+ size 67109765
checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10955c037e132d0bf9b3c6b6340de96252108a97b4e7884558dd54a79ad117ce
3
+ size 67109759
checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87d91ed175ea587c20492e9285baed83edca3370ac7f01d9d7137dadb4b58aae
3
+ size 67109765
checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:054d1d903e02c20018c5e56406ca8506140c7762d2363757890faeffda007675
3
+ size 180355964
checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e433551a3f3c2232e8ec856be5b52b13a88139c365272404558b7fb29e7258e
3
+ size 180355958
checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f8439df10dce6ed8d9dc5908da81e2598d5c7b3346f71386a1b0daf266cd3ec
3
+ size 180355964
checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2cca827bd0129b66c51a289e89af9a3cba71bdc8757e1927a59aa1094ed17660
3
+ size 17282
checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d8836455dedf1974045cb8cc31f259ed558d43a348dc593cd30a5c01f63d458
3
+ size 17309
checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c594f879c85d8eb722ed8beae8a6c42efbabde558b8115d02bfab2cc1dd4961
3
+ size 67109765
checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc983a20e0dc1b4fb130c83a717b8da167d3137ef1eb10aa03f748319ed3307a
3
+ size 67109765
checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d42fe3c380f800d1b5508b21d313bdd7ef252940c902aac577ca827f9674483a
3
+ size 67109765