psyche commited on
Commit
c695479
1 Parent(s): 22087f7

Upload LlamaForCausalLM

Browse files
config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama-Meta-Llama-3-70B-Instruct-awq",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128009,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 8192,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 28672,
14
+ "max_position_embeddings": 8192,
15
+ "mlp_bias": false,
16
+ "model_type": "llama",
17
+ "num_attention_heads": 64,
18
+ "num_hidden_layers": 80,
19
+ "num_key_value_heads": 8,
20
+ "pretraining_tp": 1,
21
+ "quantization_config": {
22
+ "backend": "autoawq",
23
+ "bits": 4,
24
+ "do_fuse": false,
25
+ "exllama_config": null,
26
+ "fuse_max_seq_len": null,
27
+ "group_size": 128,
28
+ "modules_to_fuse": null,
29
+ "modules_to_not_convert": null,
30
+ "quant_method": "awq",
31
+ "version": "gemm",
32
+ "zero_point": true
33
+ },
34
+ "rms_norm_eps": 1e-05,
35
+ "rope_scaling": null,
36
+ "rope_theta": 500000.0,
37
+ "tie_word_embeddings": false,
38
+ "torch_dtype": "float16",
39
+ "transformers_version": "4.41.2",
40
+ "use_cache": false,
41
+ "vocab_size": 128256
42
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128009
7
+ ],
8
+ "max_length": 4096,
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.41.2"
12
+ }
model-00001-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:541fb369e14a2b96c99954c184ca496a0ce1b5be64ad21696e623b77e9e911bb
3
+ size 4969219128
model-00002-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b95b65a3cd928727ccff4c1c1b5883884220041f250932f6aa66aafbbbb21e6
3
+ size 4890226896
model-00003-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a2daebde0029ba2e52f2f556f18a532da041245f6db580f10da254f8f4d9bb9
3
+ size 4890226992
model-00004-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce536253a29927d86b1eb9317eab42803670213167455d1710b68f67c0f2cfa0
3
+ size 4890226992
model-00005-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a20ac97b86f4b4b7d9f1a263f4bd43e7821ce2394ccb1690168313b943d4894f
3
+ size 4890226992
model-00006-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4c94f301115f509a14f91f9b58edbb413a085813155010bd31d30228afb5a0d
3
+ size 4890226992
model-00007-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44627a425fc5f059a643c96815eab712f546d8b2622e6d60cf749c4b96ceb40f
3
+ size 4890226992
model-00008-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c06400758aeaa102d153f0bfeecb3d01bfb3301780e56f6cc535f815b658650
3
+ size 3356068840
model-00009-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7f36afb2837cc4b8d6caa0b167f14fc354549c55d6afe96cccea3dd48e84598
3
+ size 2101346432
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff