dwetzel commited on
Commit
4d582cc
·
verified ·
1 Parent(s): f9c64aa

Upload LlamaForCausalLM

Browse files
config.json ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/ubuntu/.cache/huggingface/hub/models--meta-llama--Meta-Llama-3.1-70B-Instruct/snapshots/1d54af340dc8906a2d21146191a9c184c35e47bd",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "compression_config": {
10
+ "config_groups": {
11
+ "group_0": {
12
+ "input_activations": null,
13
+ "output_activations": null,
14
+ "targets": [
15
+ "Linear"
16
+ ],
17
+ "weights": {
18
+ "block_structure": null,
19
+ "dynamic": false,
20
+ "group_size": 128,
21
+ "num_bits": 4,
22
+ "observer": "minmax",
23
+ "observer_kwargs": {},
24
+ "strategy": "group",
25
+ "symmetric": true,
26
+ "type": "int"
27
+ }
28
+ }
29
+ },
30
+ "format": "pack-quantized",
31
+ "global_compression_ratio": 1.9030208016028207,
32
+ "ignore": [
33
+ "lm_head"
34
+ ],
35
+ "kv_cache_scheme": null,
36
+ "quant_method": "compressed-tensors",
37
+ "quantization_status": "frozen"
38
+ },
39
+ "eos_token_id": [
40
+ 128001,
41
+ 128008,
42
+ 128009
43
+ ],
44
+ "hidden_act": "silu",
45
+ "hidden_size": 8192,
46
+ "initializer_range": 0.02,
47
+ "intermediate_size": 28672,
48
+ "max_position_embeddings": 131072,
49
+ "mlp_bias": false,
50
+ "model_type": "llama",
51
+ "num_attention_heads": 64,
52
+ "num_hidden_layers": 80,
53
+ "num_key_value_heads": 8,
54
+ "pretraining_tp": 1,
55
+ "rms_norm_eps": 1e-05,
56
+ "rope_scaling": {
57
+ "factor": 8.0,
58
+ "high_freq_factor": 4.0,
59
+ "low_freq_factor": 1.0,
60
+ "original_max_position_embeddings": 8192,
61
+ "rope_type": "llama3"
62
+ },
63
+ "rope_theta": 500000.0,
64
+ "tie_word_embeddings": false,
65
+ "torch_dtype": "bfloat16",
66
+ "transformers_version": "4.44.0",
67
+ "use_cache": true,
68
+ "vocab_size": 128256
69
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.44.0"
12
+ }
model-00001-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11d6fcf810820cdc9c9397edc4bb32a7c5f0e98786b7a4b1164da2c7adbb2102
3
+ size 4947658664
model-00002-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a159cd8c9ee808a05fcd9c82c983d733ec6203f69a33c1f992d9d3185a794b2f
3
+ size 4974573352
model-00003-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4065845dae154b51e7bdd2bef23f33bba5c0b2d993b6e2432362df3d1f452f0b
3
+ size 4974606440
model-00004-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef9ec6b80c4a20a9e8453e1534b82b81ba364765653e0be3989d512a35ff1f7d
3
+ size 4931320776
model-00005-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:729acec917af0e3d0879c984083588374c53b1d90dcd98a91f39c486c9e59197
3
+ size 4974573440
model-00006-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d905973d70cfd5fe1a5739b8c9ffee4a8a2c8a26c4f7cb1e4e95557e893f672
3
+ size 4974573440
model-00007-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cce8d04f466c284a31fa9ca3d663e8b5110e0c2553edf7eca2e5fb5c31974b53
3
+ size 4974606440
model-00008-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d9e1da03d6aec2dff2bafa33e46889f46906d67a7afdeded7264e736ea68a11
3
+ size 4748705944
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff