nawage commited on
Commit
e543f4f
1 Parent(s): e894ba6

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,17 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ llama_float16_tp1_rank0.engine-part_aa filter=lfs diff=lfs merge=lfs -text
37
+ llama_float16_tp1_rank0.engine-part_ab filter=lfs diff=lfs merge=lfs -text
38
+ llama_float16_tp1_rank0.engine-part_ac filter=lfs diff=lfs merge=lfs -text
39
+ llama_float16_tp1_rank0.engine-part_ad filter=lfs diff=lfs merge=lfs -text
40
+ llama_float16_tp1_rank0.engine-part_ae filter=lfs diff=lfs merge=lfs -text
41
+ llama_float16_tp1_rank0.engine-part_af filter=lfs diff=lfs merge=lfs -text
42
+ llama_float16_tp1_rank0.engine-part_ag filter=lfs diff=lfs merge=lfs -text
43
+ llama_float16_tp1_rank0.engine-part_ah filter=lfs diff=lfs merge=lfs -text
44
+ llama_float16_tp1_rank0.engine-part_ai filter=lfs diff=lfs merge=lfs -text
45
+ llama_float16_tp1_rank0.engine-part_aj filter=lfs diff=lfs merge=lfs -text
46
+ llama_float16_tp1_rank0.engine-part_ak filter=lfs diff=lfs merge=lfs -text
47
+ llama_float16_tp1_rank0.engine-part_al filter=lfs diff=lfs merge=lfs -text
48
+ llama_float16_tp1_rank0.engine-part_am filter=lfs diff=lfs merge=lfs -text
49
+ model.cache filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "builder_config": {
3
+ "gather_all_token_logits": false,
4
+ "hidden_act": "silu",
5
+ "hidden_size": 8192,
6
+ "int8": false,
7
+ "max_batch_size": 8,
8
+ "max_beam_width": 1,
9
+ "max_input_len": 2048,
10
+ "max_num_tokens": null,
11
+ "max_output_len": 512,
12
+ "max_position_embeddings": 4096,
13
+ "max_prompt_embedding_table_size": 0,
14
+ "name": "llama",
15
+ "num_heads": 64,
16
+ "num_kv_heads": 8,
17
+ "num_layers": 80,
18
+ "parallel_build": false,
19
+ "pipeline_parallel": 1,
20
+ "precision": "float16",
21
+ "quant_mode": 384,
22
+ "tensor_parallel": 1,
23
+ "use_refit": false,
24
+ "vocab_size": 32000
25
+ },
26
+ "plugin_config": {
27
+ "attention_qk_half_accumulation": false,
28
+ "bert_attention_plugin": false,
29
+ "context_fmha_type": 1,
30
+ "gemm_plugin": false,
31
+ "gpt_attention_plugin": "float16",
32
+ "identity_plugin": false,
33
+ "layernorm_plugin": false,
34
+ "layernorm_quantization_plugin": false,
35
+ "lookup_plugin": false,
36
+ "lora_plugin": false,
37
+ "multi_block_mode": false,
38
+ "nccl_plugin": false,
39
+ "paged_kv_cache": true,
40
+ "quantize_per_token_plugin": false,
41
+ "quantize_tensor_plugin": false,
42
+ "remove_input_padding": true,
43
+ "rmsnorm_plugin": false,
44
+ "rmsnorm_quantization_plugin": false,
45
+ "smooth_quant_gemm_plugin": false,
46
+ "tokens_per_block": 64,
47
+ "use_custom_all_reduce": false,
48
+ "weight_only_groupwise_quant_matmul_plugin": false,
49
+ "weight_only_quant_matmul_plugin": false
50
+ }
51
+ }
llama_float16_tp1_rank0.engine-part_aa ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76bc11e1d0e1cd03037b45ec3a29277946ad554791fd994c9b86b7e4586a58d4
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_ab ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b43cfe277ee4b87f1d13ccd15ca54ae210d9a095d567dcf83c91886bafa2a0ca
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_ac ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcd7d4782cafc1c0f52a51c3e8c54a80de9baf4fc5ee76db71cb96473bd08d18
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_ad ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b1ac3e215050e20296e2f85e3889bf1495168e91b32806f650db85e78c589f1
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_ae ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8aad0f007c94d8bdc7f70f791af28fbba735e4dfa71bc8486e46da7024839106
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_af ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df66ec8e367ef10352630f10a6285bfbd02387517ab435d67b765dfbabbd1600
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_ag ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22abc681b10a88515db7c10c5340ac5436e81b7ef8170963d945fea8809805e6
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_ah ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20448b41660bcbc5c36f6729ba561f33e0d787b914d4a93e1bec70bced3dc477
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_ai ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ae70db0bcb79acea024674c7d61802ecfd2fe2f9abe1ba1c2b0c72e69f412ae
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_aj ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66c50f43221410fe70803935857d425a1d7a2e5cdc8fdfa1fcbfbba920eb885d
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_ak ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:684292af31364a47b604e7995859b0ca74bbe06e6b696216af1e5e03fc976cb9
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_al ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:130bde5376f5847b371c0ac2d2fb75800508ff407244351ef91d1a862a07aabc
3
+ size 5368709120
llama_float16_tp1_rank0.engine-part_am ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da15afac251834d2e36ccf1774c1a75f57175b46f36b771a6406a8006b22e7bd
3
+ size 5101690284
model.cache ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3cb6d983697b6dc1a2ae730d906a7b70add339620bdcff0a499f8c2a6086ab48
3
+ size 5089224