Delete neuronxcc-2.16.345.0+69131dd3
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/diffusion-transformer/hf-internal-testing/tiny-pixart-alpha-pipe/44b00dfe26235af3373e.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/gpt2/gpt2/3892f63e7f690f4188da.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/granite/ibm-granite/granite-3.1-2b-instruct/0ddbb732227781dfd94e.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/3f28317b31b832f9e0a5.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/6747313734a381250e06.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Llama-3.1-8B-Instruct/265c1d1225333cc0849e.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Llama-3.1-8B-Instruct/b028b4d610cf98561509.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/dff07cb8f2225021b799.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/e25a1be56f2179c58881.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/e5d618ed67d77f02ccc7.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/fc7db36812dad445f1ce.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/ff2847a6365be5997689.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/mistral/optimum/mistral-1.1b-testing/6c7f4839115cfbc538b6.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-0.5B/7c39734d49aec5838764.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-0.5B/92847d25a28c7f96643c.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/7785fd32dc5cc6e67e73.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/9cacb6bedffe5e244f12.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/stable-diffusion/stabilityai/stable-diffusion-2-1-base/c8f06b213d2820fe0b10.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/t5/hf-internal-testing/tiny-random-t5/6209f30b3b7385142fb6.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/t5/hf-internal-testing/tiny-random-t5/f36b9ceaed4475e45dbe.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/MODULE_01c540090ff8bced389b+613edded/compile_flags.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/MODULE_01c540090ff8bced389b+613edded/model.done +0 -0
- neuronxcc-2.16.345.0+69131dd3/MODULE_01c540090ff8bced389b+613edded/model.hlo_module.pb +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_01c540090ff8bced389b+613edded/model.neff +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_0b08b19b4706a50917ba+613edded/compile_flags.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/MODULE_0b08b19b4706a50917ba+613edded/model.done +0 -0
- neuronxcc-2.16.345.0+69131dd3/MODULE_0b08b19b4706a50917ba+613edded/model.hlo_module.pb +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_0b08b19b4706a50917ba+613edded/model.neff +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_0e5d2d7a05382ecb734b+68bc9deb/compile_flags.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/MODULE_0e5d2d7a05382ecb734b+68bc9deb/model.done +0 -0
- neuronxcc-2.16.345.0+69131dd3/MODULE_0e5d2d7a05382ecb734b+68bc9deb/model.hlo_module.pb +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_0e5d2d7a05382ecb734b+68bc9deb/model.neff +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_10065576681553525797+e30acd3a/compile_flags.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/MODULE_10065576681553525797+e30acd3a/model.hlo_module.pb +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_10ff4d6a928b94472f16+613edded/compile_flags.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/MODULE_10ff4d6a928b94472f16+613edded/model.done +0 -0
- neuronxcc-2.16.345.0+69131dd3/MODULE_10ff4d6a928b94472f16+613edded/model.hlo_module.pb +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_10ff4d6a928b94472f16+613edded/model.neff +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_11228616321528259762+e30acd3a/compile_flags.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/MODULE_11228616321528259762+e30acd3a/model.done +0 -0
- neuronxcc-2.16.345.0+69131dd3/MODULE_11228616321528259762+e30acd3a/model.hlo_module.pb +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_11228616321528259762+e30acd3a/model.neff +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_12221176946427745622+e30acd3a/compile_flags.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/MODULE_12221176946427745622+e30acd3a/model.done +0 -0
- neuronxcc-2.16.345.0+69131dd3/MODULE_12221176946427745622+e30acd3a/model.hlo_module.pb +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_12221176946427745622+e30acd3a/model.neff +0 -0
- neuronxcc-2.16.345.0+69131dd3/MODULE_12273156641748218648+e30acd3a/compile_flags.json +0 -1
- neuronxcc-2.16.345.0+69131dd3/MODULE_12273156641748218648+e30acd3a/model.done +0 -0
- neuronxcc-2.16.345.0+69131dd3/MODULE_12273156641748218648+e30acd3a/model.hlo_module.pb +0 -3
- neuronxcc-2.16.345.0+69131dd3/MODULE_12273156641748218648+e30acd3a/model.neff +0 -3
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/diffusion-transformer/hf-internal-testing/tiny-pixart-alpha-pipe/44b00dfe26235af3373e.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"model_type": "diffusion-transformer", "text_encoder": {"_attn_implementation_autoset": true, "architectures": ["T5EncoderModel"], "classifier_dropout": 0.0, "d_ff": 37, "d_kv": 8, "d_model": 32, "decoder_start_token_id": 0, "dense_act_fn": "relu", "dropout_rate": 0.1, "feed_forward_proj": "relu", "gradient_checkpointing": false, "initializer_factor": 0.002, "is_encoder_decoder": true, "is_gated_act": false, "layer_norm_epsilon": 1e-06, "model_type": "t5", "neuron": {"auto_cast": null, "auto_cast_type": null, "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "dynamic_batch_size": false, "inline_weights_to_neff": false, "optlevel": "2", "output_attentions": false, "output_hidden_states": false, "static_batch_size": 1, "static_sequence_length": 16, "tensor_parallel_size": 1}, "num_decoder_layers": 5, "num_heads": 4, "num_layers": 5, "relative_attention_max_distance": 128, "relative_attention_num_buckets": 8, "task": "feature-extraction", "use_cache": true, "vocab_size": 1103}, "transformer": {"_class_name": "PixArtTransformer2DModel", "activation_fn": "gelu-approximate", "attention_bias": true, "attention_head_dim": 8, "attention_type": "default", "caption_channels": 32, "cross_attention_dim": 24, "dropout": 0.0, "in_channels": 4, "interpolation_scale": null, "neuron": {"auto_cast": null, "auto_cast_type": null, "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "dynamic_batch_size": false, "inline_weights_to_neff": false, "optlevel": "2", "output_attentions": false, "output_hidden_states": false, "static_batch_size": 1, "static_encoder_hidden_size": 32, "static_height": 8, "static_num_channels": 4, "static_patch_size": 2, "static_sequence_length": 16, "static_vae_scale_factor": 1, "static_width": 8, "tensor_parallel_size": 1}, "norm_elementwise_affine": false, "norm_eps": 1e-06, "norm_num_groups": 32, "norm_type": "ada_norm_single", "num_attention_heads": 3, "num_embeds_ada_norm": 1000, "num_layers": 2, "out_channels": 8, "patch_size": 2, "task": "semantic-segmentation", "upcast_attention": false, "use_additional_conditions": null}}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/gpt2/gpt2/3892f63e7f690f4188da.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/granite/ibm-granite/granite-3.1-2b-instruct/0ddbb732227781dfd94e.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["GraniteForCausalLM"], "attention_bias": false, "attention_dropout": 0.1, "attention_multiplier": 0.015625, "bos_token_id": 0, "embedding_multiplier": 12.0, "eos_token_id": 0, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "logits_scaling": 8.0, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "granite", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "ibm-granite/granite-3.1-2b-instruct", "checkpoint_revision": "a06c9a0fef05d8111ca1f77b60f477443c526043", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 40, "num_key_value_heads": 8, "pad_token_id": 0, "residual_multiplier": 0.22, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 5000000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 49155}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/3f28317b31b832f9e0a5.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128003, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Hermes-2-Theta-Llama-3-8B", "checkpoint_revision": "57a73110702e7b05ba3f39fef36297454c680725", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 2, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/6747313734a381250e06.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128003, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Hermes-2-Theta-Llama-3-8B", "checkpoint_revision": "57a73110702e7b05ba3f39fef36297454c680725", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 2, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Llama-3.1-8B-Instruct/265c1d1225333cc0849e.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "meta-llama/Llama-3.1-8B-Instruct", "checkpoint_revision": "0e9e39f249a16976918f6564b8830bc894c89659", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Llama-3.1-8B-Instruct/b028b4d610cf98561509.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "meta-llama/Llama-3.1-8B-Instruct", "checkpoint_revision": "0e9e39f249a16976918f6564b8830bc894c89659", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 4, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/dff07cb8f2225021b799.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/e25a1be56f2179c58881.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 16, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/e5d618ed67d77f02ccc7.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/fc7db36812dad445f1ce.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/ff2847a6365be5997689.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/mistral/optimum/mistral-1.1b-testing/6c7f4839115cfbc538b6.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 64, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5632, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "optimum/mistral-1.1b-testing", "checkpoint_revision": "ce03bc8d47dbd2c173ff65f3a8de1325ba724195", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 22, "num_key_value_heads": 4, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-0.5B/7c39734d49aec5838764.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 896, "initializer_range": 0.02, "intermediate_size": 4864, "max_position_embeddings": 32768, "max_window_layers": 24, "model_type": "qwen2", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "Qwen/Qwen2.5-0.5B", "checkpoint_revision": "060db6499f32faf8b98477b0a26969ef7d8b9987", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 14, "num_hidden_layers": 24, "num_key_value_heads": 2, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "use_mrope": false, "use_sliding_window": false, "vocab_size": 151936}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-0.5B/92847d25a28c7f96643c.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 896, "initializer_range": 0.02, "intermediate_size": 4864, "max_position_embeddings": 32768, "max_window_layers": 24, "model_type": "qwen2", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "Qwen/Qwen2.5-0.5B", "checkpoint_revision": "060db6499f32faf8b98477b0a26969ef7d8b9987", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 14, "num_hidden_layers": 24, "num_key_value_heads": 2, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "use_mrope": false, "use_sliding_window": false, "vocab_size": 151936}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/7785fd32dc5cc6e67e73.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "Qwen/Qwen2.5-7B-Instruct", "checkpoint_revision": "bb46c15ee4bb56c5b63245ef50fd7637234d6f75", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 1, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/9cacb6bedffe5e244f12.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "Qwen/Qwen2.5-7B-Instruct", "checkpoint_revision": "a09a35458c702b33eeacc393d103063234e8bc28", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/stable-diffusion/stabilityai/stable-diffusion-2-1-base/c8f06b213d2820fe0b10.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"model_type": "stable-diffusion", "text_encoder": {"_attn_implementation_autoset": true, "architectures": ["CLIPTextModel"], "attention_dropout": 0.0, "dropout": 0.0, "hidden_act": "gelu", "hidden_size": 1024, "initializer_factor": 1.0, "initializer_range": 0.02, "intermediate_size": 4096, "layer_norm_eps": 1e-05, "max_position_embeddings": 77, "model_type": "clip_text_model", "neuron": {"auto_cast": "matmul", "auto_cast_type": "bf16", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "dynamic_batch_size": false, "inline_weights_to_neff": false, "optlevel": "2", "output_attentions": false, "output_hidden_states": false, "static_batch_size": 1, "static_sequence_length": 77, "tensor_parallel_size": 1}, "num_attention_heads": 16, "num_hidden_layers": 23, "task": "feature-extraction", "vocab_size": 49408}, "unet": {"_class_name": "UNet2DConditionModel", "act_fn": "silu", "addition_embed_type": null, "addition_embed_type_num_heads": 64, "addition_time_embed_dim": null, "attention_head_dim": [5, 10, 20, 20], "attention_type": "default", "block_out_channels": [320, 640, 1280, 1280], "center_input_sample": false, "class_embed_type": null, "class_embeddings_concat": false, "conv_in_kernel": 3, "conv_out_kernel": 3, "cross_attention_dim": 1024, "cross_attention_norm": null, "down_block_types": ["CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D"], "downsample_padding": 1, "dropout": 0.0, "dual_cross_attention": false, "encoder_hid_dim": null, "encoder_hid_dim_type": null, "flip_sin_to_cos": true, "freq_shift": 0, "in_channels": 4, "layers_per_block": 2, "mid_block_only_cross_attention": null, "mid_block_scale_factor": 1, "mid_block_type": "UNetMidBlock2DCrossAttn", "neuron": {"auto_cast": "matmul", "auto_cast_type": "bf16", "compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "dynamic_batch_size": false, "inline_weights_to_neff": false, "optlevel": "2", "output_attentions": false, "output_hidden_states": false, "static_batch_size": 1, "static_height": 64, "static_num_channels": 4, "static_sequence_length": 77, "static_vae_scale_factor": 8, "static_width": 64, "tensor_parallel_size": 1}, "norm_eps": 1e-05, "norm_num_groups": 32, "num_attention_heads": null, "num_class_embeds": null, "only_cross_attention": false, "out_channels": 4, "projection_class_embeddings_input_dim": null, "resnet_out_scale_factor": 1.0, "resnet_skip_time_act": false, "resnet_time_scale_shift": "default", "reverse_transformer_layers_per_block": null, "task": "semantic-segmentation", "time_cond_proj_dim": null, "time_embedding_act_fn": null, "time_embedding_dim": null, "time_embedding_type": "positional", "timestep_post_act": null, "transformer_layers_per_block": 1, "up_block_types": ["UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D"], "upcast_attention": false, "use_linear_projection": true}}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/t5/hf-internal-testing/tiny-random-t5/6209f30b3b7385142fb6.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"decoder": {"_attn_implementation_autoset": true, "classifier_dropout": 0.0, "d_ff": 37, "d_kv": 8, "d_model": 32, "decoder_start_token_id": 0, "dense_act_fn": "relu", "dropout_rate": 0.1, "feed_forward_proj": "relu", "gradient_checkpointing": false, "initializer_factor": 0.002, "is_encoder_decoder": true, "is_gated_act": false, "layer_norm_epsilon": 1e-06, "model_type": "t5", "neuron": {"compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "dynamic_batch_size": false, "inline_weights_to_neff": true, "optlevel": "2", "output_attentions": true, "output_hidden_states": true, "static_batch_size": 1, "static_num_beams": 4, "static_sequence_length": 18, "tensor_parallel_size": 1}, "num_decoder_layers": 5, "num_heads": 4, "num_layers": 5, "relative_attention_max_distance": 128, "relative_attention_num_buckets": 8, "task": "text2text-generation", "use_cache": true, "vocab_size": 1103}, "encoder": {"_attn_implementation_autoset": true, "classifier_dropout": 0.0, "d_ff": 37, "d_kv": 8, "d_model": 32, "decoder_start_token_id": 0, "dense_act_fn": "relu", "dropout_rate": 0.1, "feed_forward_proj": "relu", "gradient_checkpointing": false, "initializer_factor": 0.002, "is_encoder_decoder": true, "is_gated_act": false, "layer_norm_epsilon": 1e-06, "model_type": "t5", "neuron": {"compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "dynamic_batch_size": false, "inline_weights_to_neff": true, "optlevel": "2", "output_attentions": true, "output_hidden_states": true, "static_batch_size": 1, "static_num_beams": 4, "static_sequence_length": 18, "tensor_parallel_size": 1}, "num_decoder_layers": 5, "num_heads": 4, "num_layers": 5, "relative_attention_max_distance": 128, "relative_attention_num_buckets": 8, "task": "text2text-generation", "use_cache": true, "vocab_size": 1103}, "model_type": "t5"}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/0_REGISTRY/0.0.28.dev0/inference/t5/hf-internal-testing/tiny-random-t5/f36b9ceaed4475e45dbe.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"decoder": {"classifier_dropout": 0.0, "d_ff": 37, "d_kv": 8, "d_model": 32, "decoder_start_token_id": 0, "dense_act_fn": "relu", "dropout_rate": 0.1, "feed_forward_proj": "relu", "gradient_checkpointing": false, "initializer_factor": 0.002, "is_encoder_decoder": true, "is_gated_act": false, "layer_norm_epsilon": 1e-06, "model_type": "t5", "neuron": {"compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "dynamic_batch_size": false, "inline_weights_to_neff": true, "optlevel": "2", "output_attentions": true, "output_hidden_states": true, "static_batch_size": 1, "static_num_beams": 4, "static_sequence_length": 18, "tensor_parallel_size": 1}, "num_decoder_layers": 5, "num_heads": 4, "num_layers": 5, "relative_attention_max_distance": 128, "relative_attention_num_buckets": 8, "task": "text2text-generation", "use_cache": true, "vocab_size": 1103}, "encoder": {"classifier_dropout": 0.0, "d_ff": 37, "d_kv": 8, "d_model": 32, "decoder_start_token_id": 0, "dense_act_fn": "relu", "dropout_rate": 0.1, "feed_forward_proj": "relu", "gradient_checkpointing": false, "initializer_factor": 0.002, "is_encoder_decoder": true, "is_gated_act": false, "layer_norm_epsilon": 1e-06, "model_type": "t5", "neuron": {"compiler_type": "neuronx-cc", "compiler_version": "2.16.345.0+69131dd3", "dynamic_batch_size": false, "inline_weights_to_neff": true, "optlevel": "2", "output_attentions": true, "output_hidden_states": true, "static_batch_size": 1, "static_num_beams": 4, "static_sequence_length": 18, "tensor_parallel_size": 1}, "num_decoder_layers": 5, "num_heads": 4, "num_layers": 5, "relative_attention_max_distance": 128, "relative_attention_num_buckets": 8, "task": "text2text-generation", "use_cache": true, "vocab_size": 1103}, "model_type": "t5"}
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_01c540090ff8bced389b+613edded/compile_flags.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_01c540090ff8bced389b+613edded/model.done
DELETED
|
File without changes
|
neuronxcc-2.16.345.0+69131dd3/MODULE_01c540090ff8bced389b+613edded/model.hlo_module.pb
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:fae6bba5e76799bce81338669eab39b6f67989cebf3e30d0ed3bff2a98afc556
|
| 3 |
-
size 331842
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_01c540090ff8bced389b+613edded/model.neff
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:9c416e8b172b8f2fb1053971923f9c10e0a96c582a4e3c46469e1a0a23109a05
|
| 3 |
-
size 2335744
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_0b08b19b4706a50917ba+613edded/compile_flags.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_0b08b19b4706a50917ba+613edded/model.done
DELETED
|
File without changes
|
neuronxcc-2.16.345.0+69131dd3/MODULE_0b08b19b4706a50917ba+613edded/model.hlo_module.pb
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:376bbe53219bb85008e5cd4702ed2cb62dc11a22ce499d582735a227184c8101
|
| 3 |
-
size 331842
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_0b08b19b4706a50917ba+613edded/model.neff
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:0f4e5a2b606196da86b74ff6d93a8e4450c9be55600291c3a8a8f872c8df0543
|
| 3 |
-
size 2284544
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_0e5d2d7a05382ecb734b+68bc9deb/compile_flags.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=32"]
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_0e5d2d7a05382ecb734b+68bc9deb/model.done
DELETED
|
File without changes
|
neuronxcc-2.16.345.0+69131dd3/MODULE_0e5d2d7a05382ecb734b+68bc9deb/model.hlo_module.pb
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:93c0d29f8420d41e5f30c86965baa348ded69ac492eb6c4d56c4efd8003d09e5
|
| 3 |
-
size 15084
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_0e5d2d7a05382ecb734b+68bc9deb/model.neff
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:ff347ba701a8ca63edaf75f6d34a7eb4c04d95290afbb857ecbc74f8040f6728
|
| 3 |
-
size 984064
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_10065576681553525797+e30acd3a/compile_flags.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
["--target=trn1"]
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_10065576681553525797+e30acd3a/model.hlo_module.pb
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:031e53ef263598eee89ea4b5ce6dcd8725a49663f097f2d07f6eefa95dc9b7d8
|
| 3 |
-
size 33895
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_10ff4d6a928b94472f16+613edded/compile_flags.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_10ff4d6a928b94472f16+613edded/model.done
DELETED
|
File without changes
|
neuronxcc-2.16.345.0+69131dd3/MODULE_10ff4d6a928b94472f16+613edded/model.hlo_module.pb
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:167ab8586ac7af80a539d34957a02160350fbb10eed815e0256c0e862969e0ef
|
| 3 |
-
size 446383
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_10ff4d6a928b94472f16+613edded/model.neff
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:025c53dec2b0154645276a3cdf5c8cfe2a174e0a9e01667d9bfc44b71b3e2d5c
|
| 3 |
-
size 29225984
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_11228616321528259762+e30acd3a/compile_flags.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
["--target=trn1"]
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_11228616321528259762+e30acd3a/model.done
DELETED
|
File without changes
|
neuronxcc-2.16.345.0+69131dd3/MODULE_11228616321528259762+e30acd3a/model.hlo_module.pb
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:d5edb6ab9054f4d8771fe4935fb2142213812d11c9cf6417ffe8dd2920c438bd
|
| 3 |
-
size 58293
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_11228616321528259762+e30acd3a/model.neff
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:e8931b0bf4f316b48938c67cc237e67424881e154b613d6285531895a031c927
|
| 3 |
-
size 154624
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_12221176946427745622+e30acd3a/compile_flags.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
["--target=trn1"]
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_12221176946427745622+e30acd3a/model.done
DELETED
|
File without changes
|
neuronxcc-2.16.345.0+69131dd3/MODULE_12221176946427745622+e30acd3a/model.hlo_module.pb
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:8951be8a3f481871084ea3d98976fc9d70f69c941eb941515f05aaac59197b27
|
| 3 |
-
size 700
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_12221176946427745622+e30acd3a/model.neff
DELETED
|
Binary file (31.7 kB)
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_12273156641748218648+e30acd3a/compile_flags.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
["--target=trn1"]
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_12273156641748218648+e30acd3a/model.done
DELETED
|
File without changes
|
neuronxcc-2.16.345.0+69131dd3/MODULE_12273156641748218648+e30acd3a/model.hlo_module.pb
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:3959ac9e6bfda49d9b997c5c3b30d57fcf43ae8b94074b09c1d427c1438ca2fb
|
| 3 |
-
size 58691
|
|
|
|
|
|
|
|
|
|
|
|
neuronxcc-2.16.345.0+69131dd3/MODULE_12273156641748218648+e30acd3a/model.neff
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:6d64f4598deb33864fd93d4d9349691aa3f5487fe8b42e9b77d764ba01e5e64d
|
| 3 |
-
size 216064
|
|
|
|
|
|
|
|
|
|
|
|