dacorvo HF staff commited on
Commit
02b3502
1 Parent(s): 9921fe9

Delete neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4b3ca94bb4445bc28bc8.json +0 -1
  2. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/f9a5439ee67f962284df.json +0 -1
  3. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/2b51ed50a475725e6b3e.json +0 -1
  4. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/335f8d1c7218c7410000.json +0 -1
  5. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/36e1657c052cd92f031f.json +0 -1
  6. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/65cb8c493bb52d550958.json +0 -1
  7. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/73b7467bea092a4ea612.json +0 -1
  8. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/d5e93094d604b84cb59a.json +0 -1
  9. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/2713954a7f357d88b849.json +0 -1
  10. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/2d94b70ec1b5a0628be0.json +0 -1
  11. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/HuggingFaceTB/cosmo-1b/83c64ad31c0699e3053e.json +0 -1
  12. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/0b3e037f44d96a1e7239.json +0 -1
  13. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/8d31ca1db6445239b6cc.json +0 -1
  14. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/c5422e38f76211f16ecf.json +0 -1
  15. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/f6a36646c9688804bab7.json +0 -1
  16. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/f9468c010d2a222046f8.json +0 -1
  17. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/dacorvo/tiny-random-llama/62a76db84304b34ae305.json +0 -1
  18. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/dacorvo/tiny-random-llama/fb4b47ea0eea68e736b2.json +0 -1
  19. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/1ab4fec65fe40b15d8fc.json +0 -1
  20. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/55d02a833c68f65f88c9.json +0 -1
  21. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/5d4688c06ca1eea9cd2d.json +0 -1
  22. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/642817569926cd4530c8.json +0 -1
  23. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/a4fb9901e6811328a109.json +0 -1
  24. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/a8c0b413e79c91496630.json +0 -1
  25. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/c29571197f94c8546e45.json +0 -1
  26. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/d68250e23574798328a7.json +0 -1
  27. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/d9b1eaeab430d169db57.json +0 -1
  28. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/0c05f781075e377b3fe0.json +0 -1
  29. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/4ff3c41f54b65e2012a7.json +0 -1
  30. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/57d08ed2158d3bc4efd8.json +0 -1
  31. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/5a8294b279e725cf8542.json +0 -1
  32. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/735e2b4b3a5019f203a7.json +0 -1
  33. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/b4d9aa50677e27d82b4f.json +0 -1
  34. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/b57ff0785cfe94aa0718.json +0 -1
  35. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/be28f38f27aeb510e1c8.json +0 -1
  36. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/c9527c01253d9424170a.json +0 -1
  37. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/d6a72bcc14cde1f0364f.json +0 -1
  38. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/f6a3964311a50e56da2e.json +0 -1
  39. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B-Instruct/8991f74549273593691d.json +0 -1
  40. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/09e30e2742ba4beafc57.json +0 -1
  41. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/26575c75a97054312245.json +0 -1
  42. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/43bdf99803590a3bbce8.json +0 -1
  43. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/48cfba5e4ff5369b6e51.json +0 -1
  44. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/62340426bd628112eec5.json +0 -1
  45. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/638108a35a53ccc460bd.json +0 -1
  46. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/75d01b56a778419c897d.json +0 -1
  47. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/77bf56a610a467c3b01c.json +0 -1
  48. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/7b660ccd0835c30eeb1e.json +0 -1
  49. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/7cf5159a98ef6aa54442.json +0 -1
  50. neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/922a2f19b0e3177426d7.json +0 -1
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4b3ca94bb4445bc28bc8.json DELETED
@@ -1 +0,0 @@
1
- {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/f9a5439ee67f962284df.json DELETED
@@ -1 +0,0 @@
1
- {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/2b51ed50a475725e6b3e.json DELETED
@@ -1 +0,0 @@
1
- {"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/335f8d1c7218c7410000.json DELETED
@@ -1 +0,0 @@
1
- {"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/36e1657c052cd92f031f.json DELETED
@@ -1 +0,0 @@
1
- {"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp32", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/65cb8c493bb52d550958.json DELETED
@@ -1 +0,0 @@
1
- {"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/73b7467bea092a4ea612.json DELETED
@@ -1 +0,0 @@
1
- {"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/d5e93094d604b84cb59a.json DELETED
@@ -1 +0,0 @@
1
- {"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/2713954a7f357d88b849.json DELETED
@@ -1 +0,0 @@
1
- {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/2d94b70ec1b5a0628be0.json DELETED
@@ -1 +0,0 @@
1
- {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/HuggingFaceTB/cosmo-1b/83c64ad31c0699e3053e.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 2048, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "HuggingFaceTB/cosmo-1b", "checkpoint_revision": "0d5e341cfe835dffc81b6186f9715c094889f8ce", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/0b3e037f44d96a1e7239.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/8d31ca1db6445239b6cc.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "f16", "batch_size": 4, "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/c5422e38f76211f16ecf.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/f6a36646c9688804bab7.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/f9468c010d2a222046f8.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "f16", "batch_size": 2, "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/dacorvo/tiny-random-llama/62a76db84304b34ae305.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/dacorvo/tiny-random-llama/fb4b47ea0eea68e736b2.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/1ab4fec65fe40b15d8fc.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/55d02a833c68f65f88c9.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 8, "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/5d4688c06ca1eea9cd2d.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/642817569926cd4530c8.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 16, "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/a4fb9901e6811328a109.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 16, "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/a8c0b413e79c91496630.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/c29571197f94c8546e45.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 8, "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/d68250e23574798328a7.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-13b-chat-hf/d9b1eaeab430d169db57.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "a2cb7a712bb6e5e736ca7f8cd98167f81a0b5bd8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/0c05f781075e377b3fe0.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/4ff3c41f54b65e2012a7.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "92011f62d7604e261f748ec0cfe6329f31193e33", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/57d08ed2158d3bc4efd8.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 16, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/5a8294b279e725cf8542.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/735e2b4b3a5019f203a7.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/b4d9aa50677e27d82b4f.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 8, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/b57ff0785cfe94aa0718.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 16, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/be28f38f27aeb510e1c8.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/c9527c01253d9424170a.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/d6a72bcc14cde1f0364f.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/f6a3964311a50e56da2e.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 8, "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B-Instruct/8991f74549273593691d.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3-8B-Instruct", "checkpoint_revision": "339ce92d052f002cdbac4a4bd551d1c61dd8345e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/09e30e2742ba4beafc57.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/26575c75a97054312245.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 48, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 12, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/43bdf99803590a3bbce8.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 8, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/48cfba5e4ff5369b6e51.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/62340426bd628112eec5.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/638108a35a53ccc460bd.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/75d01b56a778419c897d.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "cd892e8f4da1043d4b01d5ea182a2e8412bf658f", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/77bf56a610a467c3b01c.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/7b660ccd0835c30eeb1e.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/7cf5159a98ef6aa54442.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "561487d18c41c76bcb5fc6cfb73a324982f04f47", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
 
 
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/922a2f19b0e3177426d7.json DELETED
@@ -1 +0,0 @@
1
- {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}