dacorvo HF Staff commited on
Commit
7c2d9b8
·
verified ·
1 Parent(s): 67f1e9d

Synchronizing local compiler cache.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +28 -0
  2. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/llamafactory/tiny-random-Llama-3/7bee60b8f8f792546a54.json +62 -0
  3. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/llamafactory/tiny-random-Llama-3/858f4e0d7893e2c9daf0.json +62 -0
  4. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/llamafactory/tiny-random-Llama-3/90868f09ac5f3109b4c0.json +62 -0
  5. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/llamafactory/tiny-random-Llama-3/c54eb40c84d4cd599524.json +62 -0
  6. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/35bbdc163c6f0f272ac5.json +63 -0
  7. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/5971425e59d052eef4db.json +63 -0
  8. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/8e82c1b287f80ac1c5c5.json +63 -0
  9. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/91d64006319bc202bb12.json +64 -0
  10. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/b97c760ba759013a2971.json +64 -0
  11. neuronxcc-2.21.18209.0+043b1bf7/MODULE_04a651791e36c2cfd4d1+a9d440f5/compile_flags.json +1 -0
  12. neuronxcc-2.21.18209.0+043b1bf7/MODULE_04a651791e36c2cfd4d1+a9d440f5/model.hlo_module.pb +3 -0
  13. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0d48bb2409b9fd56d719+877608f3/compile_flags.json +1 -0
  14. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0d48bb2409b9fd56d719+877608f3/model.done +0 -0
  15. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0d48bb2409b9fd56d719+877608f3/model.hlo_module.pb +3 -0
  16. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0d48bb2409b9fd56d719+877608f3/model.neff +3 -0
  17. neuronxcc-2.21.18209.0+043b1bf7/MODULE_144cc55e8b0792820aa7+877608f3/compile_flags.json +1 -0
  18. neuronxcc-2.21.18209.0+043b1bf7/MODULE_144cc55e8b0792820aa7+877608f3/model.done +0 -0
  19. neuronxcc-2.21.18209.0+043b1bf7/MODULE_144cc55e8b0792820aa7+877608f3/model.hlo_module.pb +3 -0
  20. neuronxcc-2.21.18209.0+043b1bf7/MODULE_144cc55e8b0792820aa7+877608f3/model.neff +3 -0
  21. neuronxcc-2.21.18209.0+043b1bf7/MODULE_4293af2f997e6a59e530+ed72d204/compile_flags.json +1 -0
  22. neuronxcc-2.21.18209.0+043b1bf7/MODULE_4293af2f997e6a59e530+ed72d204/model.done +0 -0
  23. neuronxcc-2.21.18209.0+043b1bf7/MODULE_4293af2f997e6a59e530+ed72d204/model.hlo_module.pb +3 -0
  24. neuronxcc-2.21.18209.0+043b1bf7/MODULE_4293af2f997e6a59e530+ed72d204/model.neff +3 -0
  25. neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/compile_flags.json +1 -0
  26. neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/model.done +0 -0
  27. neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/model.hlo_module.pb +3 -0
  28. neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/model.neff +3 -0
  29. neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/wrapped_neff.hlo +3 -0
  30. neuronxcc-2.21.18209.0+043b1bf7/MODULE_596fadcd279603bd8b65+b6452646/compile_flags.json +1 -0
  31. neuronxcc-2.21.18209.0+043b1bf7/MODULE_596fadcd279603bd8b65+b6452646/model.done +0 -0
  32. neuronxcc-2.21.18209.0+043b1bf7/MODULE_596fadcd279603bd8b65+b6452646/model.hlo_module.pb +3 -0
  33. neuronxcc-2.21.18209.0+043b1bf7/MODULE_596fadcd279603bd8b65+b6452646/model.neff +3 -0
  34. neuronxcc-2.21.18209.0+043b1bf7/MODULE_5b10f7bb486873b498b4+ed72d204/compile_flags.json +1 -0
  35. neuronxcc-2.21.18209.0+043b1bf7/MODULE_5b10f7bb486873b498b4+ed72d204/model.done +0 -0
  36. neuronxcc-2.21.18209.0+043b1bf7/MODULE_5b10f7bb486873b498b4+ed72d204/model.hlo_module.pb +3 -0
  37. neuronxcc-2.21.18209.0+043b1bf7/MODULE_5b10f7bb486873b498b4+ed72d204/model.neff +3 -0
  38. neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/compile_flags.json +1 -0
  39. neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/model.done +0 -0
  40. neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/model.hlo_module.pb +3 -0
  41. neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/model.neff +3 -0
  42. neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/wrapped_neff.hlo +3 -0
  43. neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/compile_flags.json +1 -0
  44. neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/model.done +0 -0
  45. neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/model.hlo_module.pb +3 -0
  46. neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/model.neff +3 -0
  47. neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/wrapped_neff.hlo +3 -0
  48. neuronxcc-2.21.18209.0+043b1bf7/MODULE_6c6ffc55476bfb49f4a5+747527b0/compile_flags.json +1 -0
  49. neuronxcc-2.21.18209.0+043b1bf7/MODULE_6c6ffc55476bfb49f4a5+747527b0/model.done +0 -0
  50. neuronxcc-2.21.18209.0+043b1bf7/MODULE_6c6ffc55476bfb49f4a5+747527b0/model.hlo_module.pb +3 -0
.gitattributes CHANGED
@@ -5082,3 +5082,31 @@ neuronxcc-2.21.18209.0+043b1bf7/MODULE_f06c3b7135d9ee55cb8e+a9d440f5/wrapped_nef
5082
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_0858a4338346f514c1c3+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5083
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_0858a4338346f514c1c3+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5084
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_8368c27a60ca92409518+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5082
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_0858a4338346f514c1c3+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5083
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_0858a4338346f514c1c3+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5084
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_8368c27a60ca92409518+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5085
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_0d48bb2409b9fd56d719+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5086
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_144cc55e8b0792820aa7+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5087
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_4293af2f997e6a59e530+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5088
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/model.neff filter=lfs diff=lfs merge=lfs -text
5089
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5090
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_596fadcd279603bd8b65+b6452646/model.neff filter=lfs diff=lfs merge=lfs -text
5091
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_5b10f7bb486873b498b4+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5092
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5093
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5094
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5095
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5096
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_6c6ffc55476bfb49f4a5+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5097
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_6c6ffc55476bfb49f4a5+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5098
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_7d2a5ede35410975fb54+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5099
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_7d2a5ede35410975fb54+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5100
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_88ba8ae419c66c430faa+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5101
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_9af2a07cb223b67762e0+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5102
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_9af2a07cb223b67762e0+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5103
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_a31ae0e56d2e6654c6f5+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5104
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_a31ae0e56d2e6654c6f5+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5105
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_b8bacfafd74d5ae5ba7b+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5106
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_c10b4f4702736f1b2f66+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5107
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_c72e1741e3ec7f054e8f+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5108
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_c96c9145e08ca93adf22+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5109
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_c96c9145e08ca93adf22+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5110
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_cbf256b61a3a1c5f1f7a+677eeb9d/model.neff filter=lfs diff=lfs merge=lfs -text
5111
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_f753c5295db2f87f81a1+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5112
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_f753c5295db2f87f81a1+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/llamafactory/tiny-random-Llama-3/7bee60b8f8f792546a54.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "batch_size": 2,
21
+ "capacity_factor": null,
22
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
23
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
24
+ "continuous_batching": true,
25
+ "enable_bucketing": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 2,
31
+ "max_context_length": 128,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 128,
34
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
35
+ "on_device_sampling": true,
36
+ "optimum_neuron_version": "0.4.0.dev0",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 128,
40
+ "speculation_length": 0,
41
+ "start_rank_id": 0,
42
+ "target": "trn1",
43
+ "torch_dtype": "bfloat16",
44
+ "tp_degree": 2
45
+ },
46
+ "num_attention_heads": 4,
47
+ "num_hidden_layers": 2,
48
+ "num_key_value_heads": 4,
49
+ "pretraining_tp": 1,
50
+ "rms_norm_eps": 1e-05,
51
+ "rope_scaling": {
52
+ "factor": 8.0,
53
+ "high_freq_factor": 4.0,
54
+ "low_freq_factor": 1.0,
55
+ "original_max_position_embeddings": 8192,
56
+ "rope_type": "llama3"
57
+ },
58
+ "rope_theta": 500000.0,
59
+ "tie_word_embeddings": false,
60
+ "use_cache": true,
61
+ "vocab_size": 128256
62
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/llamafactory/tiny-random-Llama-3/858f4e0d7893e2c9daf0.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
23
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
24
+ "continuous_batching": false,
25
+ "enable_bucketing": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 1,
31
+ "max_context_length": 512,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 512,
34
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
35
+ "on_device_sampling": true,
36
+ "optimum_neuron_version": "0.4.0.dev0",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 512,
40
+ "speculation_length": 0,
41
+ "start_rank_id": 0,
42
+ "target": "trn1",
43
+ "torch_dtype": "float16",
44
+ "tp_degree": 2
45
+ },
46
+ "num_attention_heads": 4,
47
+ "num_hidden_layers": 2,
48
+ "num_key_value_heads": 4,
49
+ "pretraining_tp": 1,
50
+ "rms_norm_eps": 1e-05,
51
+ "rope_scaling": {
52
+ "factor": 8.0,
53
+ "high_freq_factor": 4.0,
54
+ "low_freq_factor": 1.0,
55
+ "original_max_position_embeddings": 8192,
56
+ "rope_type": "llama3"
57
+ },
58
+ "rope_theta": 500000.0,
59
+ "tie_word_embeddings": false,
60
+ "use_cache": true,
61
+ "vocab_size": 128256
62
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/llamafactory/tiny-random-Llama-3/90868f09ac5f3109b4c0.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
23
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
24
+ "continuous_batching": false,
25
+ "enable_bucketing": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 1,
31
+ "max_context_length": 512,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 512,
34
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
35
+ "on_device_sampling": true,
36
+ "optimum_neuron_version": "0.4.0.dev0",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 512,
40
+ "speculation_length": 0,
41
+ "start_rank_id": 0,
42
+ "target": "trn2",
43
+ "torch_dtype": "float16",
44
+ "tp_degree": 2
45
+ },
46
+ "num_attention_heads": 4,
47
+ "num_hidden_layers": 2,
48
+ "num_key_value_heads": 4,
49
+ "pretraining_tp": 1,
50
+ "rms_norm_eps": 1e-05,
51
+ "rope_scaling": {
52
+ "factor": 8.0,
53
+ "high_freq_factor": 4.0,
54
+ "low_freq_factor": 1.0,
55
+ "original_max_position_embeddings": 8192,
56
+ "rope_type": "llama3"
57
+ },
58
+ "rope_theta": 500000.0,
59
+ "tie_word_embeddings": false,
60
+ "use_cache": true,
61
+ "vocab_size": 128256
62
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/llamafactory/tiny-random-Llama-3/c54eb40c84d4cd599524.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 4,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 16,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 64,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "batch_size": 2,
21
+ "capacity_factor": null,
22
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
23
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
24
+ "continuous_batching": true,
25
+ "enable_bucketing": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 2,
31
+ "max_context_length": 128,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 128,
34
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
35
+ "on_device_sampling": true,
36
+ "optimum_neuron_version": "0.4.0.dev0",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 128,
40
+ "speculation_length": 0,
41
+ "start_rank_id": 0,
42
+ "target": "trn2",
43
+ "torch_dtype": "bfloat16",
44
+ "tp_degree": 2
45
+ },
46
+ "num_attention_heads": 4,
47
+ "num_hidden_layers": 2,
48
+ "num_key_value_heads": 4,
49
+ "pretraining_tp": 1,
50
+ "rms_norm_eps": 1e-05,
51
+ "rope_scaling": {
52
+ "factor": 8.0,
53
+ "high_freq_factor": 4.0,
54
+ "low_freq_factor": 1.0,
55
+ "original_max_position_embeddings": 8192,
56
+ "rope_type": "llama3"
57
+ },
58
+ "rope_theta": 500000.0,
59
+ "tie_word_embeddings": false,
60
+ "use_cache": true,
61
+ "vocab_size": 128256
62
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/35bbdc163c6f0f272ac5.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "batch_size": 4,
21
+ "capacity_factor": null,
22
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
23
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
24
+ "continuous_batching": true,
25
+ "enable_bucketing": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 4,
31
+ "max_context_length": 4096,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 4096,
34
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
35
+ "on_device_sampling": true,
36
+ "optimum_neuron_version": "0.4.0.dev0",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 4096,
40
+ "speculation_length": 0,
41
+ "start_rank_id": 0,
42
+ "target": "trn2",
43
+ "torch_dtype": "bfloat16",
44
+ "tp_degree": 2
45
+ },
46
+ "num_attention_heads": 32,
47
+ "num_hidden_layers": 16,
48
+ "num_key_value_heads": 8,
49
+ "pretraining_tp": 1,
50
+ "rms_norm_eps": 1e-05,
51
+ "rope_scaling": {
52
+ "factor": 32.0,
53
+ "high_freq_factor": 4.0,
54
+ "low_freq_factor": 1.0,
55
+ "original_max_position_embeddings": 8192,
56
+ "rope_type": "llama3"
57
+ },
58
+ "rope_theta": 500000.0,
59
+ "tie_word_embeddings": true,
60
+ "unsloth_fixed": true,
61
+ "use_cache": true,
62
+ "vocab_size": 128256
63
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/5971425e59d052eef4db.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "batch_size": 4,
21
+ "capacity_factor": null,
22
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
23
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
24
+ "continuous_batching": true,
25
+ "enable_bucketing": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 4,
31
+ "max_context_length": 4096,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 4096,
34
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
35
+ "on_device_sampling": true,
36
+ "optimum_neuron_version": "0.4.0.dev0",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 4096,
40
+ "speculation_length": 0,
41
+ "start_rank_id": 0,
42
+ "target": "trn1",
43
+ "torch_dtype": "bfloat16",
44
+ "tp_degree": 2
45
+ },
46
+ "num_attention_heads": 32,
47
+ "num_hidden_layers": 16,
48
+ "num_key_value_heads": 8,
49
+ "pretraining_tp": 1,
50
+ "rms_norm_eps": 1e-05,
51
+ "rope_scaling": {
52
+ "factor": 32.0,
53
+ "high_freq_factor": 4.0,
54
+ "low_freq_factor": 1.0,
55
+ "original_max_position_embeddings": 8192,
56
+ "rope_type": "llama3"
57
+ },
58
+ "rope_theta": 500000.0,
59
+ "tie_word_embeddings": true,
60
+ "unsloth_fixed": true,
61
+ "use_cache": true,
62
+ "vocab_size": 128256
63
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/8e82c1b287f80ac1c5c5.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "batch_size": 1,
21
+ "capacity_factor": null,
22
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
23
+ "checkpoint_revision": null,
24
+ "continuous_batching": false,
25
+ "enable_bucketing": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": false,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 1,
31
+ "max_context_length": 4096,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 4096,
34
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
35
+ "on_device_sampling": false,
36
+ "optimum_neuron_version": "0.4.0.dev0",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 4096,
40
+ "speculation_length": 5,
41
+ "start_rank_id": 0,
42
+ "target": "trn1",
43
+ "torch_dtype": "bfloat16",
44
+ "tp_degree": 2
45
+ },
46
+ "num_attention_heads": 32,
47
+ "num_hidden_layers": 16,
48
+ "num_key_value_heads": 8,
49
+ "pretraining_tp": 1,
50
+ "rms_norm_eps": 1e-05,
51
+ "rope_scaling": {
52
+ "factor": 32.0,
53
+ "high_freq_factor": 4.0,
54
+ "low_freq_factor": 1.0,
55
+ "original_max_position_embeddings": 8192,
56
+ "rope_type": "llama3"
57
+ },
58
+ "rope_theta": 500000.0,
59
+ "tie_word_embeddings": true,
60
+ "unsloth_fixed": true,
61
+ "use_cache": true,
62
+ "vocab_size": 128256
63
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/91d64006319bc202bb12.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "batch_size": 4,
21
+ "capacity_factor": null,
22
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
23
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
24
+ "continuous_batching": true,
25
+ "enable_bucketing": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "logical_nc_config": 1,
31
+ "max_batch_size": 4,
32
+ "max_context_length": 4096,
33
+ "max_topk": 256,
34
+ "n_active_tokens": 4096,
35
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
36
+ "on_device_sampling": true,
37
+ "optimum_neuron_version": "0.4.0.dev0",
38
+ "output_logits": false,
39
+ "pp_degree": 1,
40
+ "sequence_length": 4096,
41
+ "speculation_length": 0,
42
+ "start_rank_id": 0,
43
+ "target": "trn1",
44
+ "torch_dtype": "bfloat16",
45
+ "tp_degree": 2
46
+ },
47
+ "num_attention_heads": 32,
48
+ "num_hidden_layers": 16,
49
+ "num_key_value_heads": 8,
50
+ "pretraining_tp": 1,
51
+ "rms_norm_eps": 1e-05,
52
+ "rope_scaling": {
53
+ "factor": 32.0,
54
+ "high_freq_factor": 4.0,
55
+ "low_freq_factor": 1.0,
56
+ "original_max_position_embeddings": 8192,
57
+ "rope_type": "llama3"
58
+ },
59
+ "rope_theta": 500000.0,
60
+ "tie_word_embeddings": true,
61
+ "unsloth_fixed": true,
62
+ "use_cache": true,
63
+ "vocab_size": 128256
64
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama/unsloth/Llama-3.2-1B-Instruct/b97c760ba759013a2971.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "unsloth/Llama-3.2-1B-Instruct",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "neuron": {
19
+ "_serialized_key": "NxDNeuronConfig",
20
+ "batch_size": 4,
21
+ "capacity_factor": null,
22
+ "checkpoint_id": "unsloth/Llama-3.2-1B-Instruct",
23
+ "checkpoint_revision": "5a8abab4a5d6f164389b1079fb721cfab8d7126c",
24
+ "continuous_batching": true,
25
+ "enable_bucketing": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "logical_nc_config": 1,
31
+ "max_batch_size": 4,
32
+ "max_context_length": 4096,
33
+ "max_topk": 256,
34
+ "n_active_tokens": 4096,
35
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
36
+ "on_device_sampling": true,
37
+ "optimum_neuron_version": "0.4.0.dev0",
38
+ "output_logits": false,
39
+ "pp_degree": 1,
40
+ "sequence_length": 4096,
41
+ "speculation_length": 0,
42
+ "start_rank_id": 0,
43
+ "target": "trn2",
44
+ "torch_dtype": "bfloat16",
45
+ "tp_degree": 2
46
+ },
47
+ "num_attention_heads": 32,
48
+ "num_hidden_layers": 16,
49
+ "num_key_value_heads": 8,
50
+ "pretraining_tp": 1,
51
+ "rms_norm_eps": 1e-05,
52
+ "rope_scaling": {
53
+ "factor": 32.0,
54
+ "high_freq_factor": 4.0,
55
+ "low_freq_factor": 1.0,
56
+ "original_max_position_embeddings": 8192,
57
+ "rope_type": "llama3"
58
+ },
59
+ "rope_theta": 500000.0,
60
+ "tie_word_embeddings": true,
61
+ "unsloth_fixed": true,
62
+ "use_cache": true,
63
+ "vocab_size": 128256
64
+ }
neuronxcc-2.21.18209.0+043b1bf7/MODULE_04a651791e36c2cfd4d1+a9d440f5/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_04a651791e36c2cfd4d1+a9d440f5/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:911e6e37d08b0e38efd539ec20caddc0e77d717bcaa800bcb88bacbd150d1e95
3
+ size 691139
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0d48bb2409b9fd56d719+877608f3/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0d48bb2409b9fd56d719+877608f3/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0d48bb2409b9fd56d719+877608f3/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68e9e600acec93fb8d4fa9742de5648b0f2c7eeb24ba072fb3410f6f445bf47e
3
+ size 1284980
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0d48bb2409b9fd56d719+877608f3/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65d0969600f67a5bdf9d23c2323ace4ccf6edb77cd4330e2d90552ea98311423
3
+ size 605184
neuronxcc-2.21.18209.0+043b1bf7/MODULE_144cc55e8b0792820aa7+877608f3/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_144cc55e8b0792820aa7+877608f3/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_144cc55e8b0792820aa7+877608f3/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c1f8405558ed131fe9548c773e9b915c6dc96f676107cedda65b6a9cf969fba
3
+ size 1501213
neuronxcc-2.21.18209.0+043b1bf7/MODULE_144cc55e8b0792820aa7+877608f3/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1162607b2a3d5bc5dc6618cdc90bc8fdf46ee7e6179cba7e020de64bec652957
3
+ size 7363584
neuronxcc-2.21.18209.0+043b1bf7/MODULE_4293af2f997e6a59e530+ed72d204/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_4293af2f997e6a59e530+ed72d204/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_4293af2f997e6a59e530+ed72d204/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:400c878c86e4fa6f5b5ae47c51668cf1bc947006e193d941e55b68316247ad63
3
+ size 87854
neuronxcc-2.21.18209.0+043b1bf7/MODULE_4293af2f997e6a59e530+ed72d204/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31fad8ae71aab9ec9352f98b5c014f6608a43d6d828142ae2d5b79309f981303
3
+ size 400384
neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6820471cc3742b4adf59a1b20142e67c937c1514499868f53f2b80d1077c6f62
3
+ size 418223
neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8710ff580957991adbd175d321e516ded7ea97cccfc5dcd75cb1a9b0e64d227
3
+ size 3083264
neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cba6c2d85f5d93856b6+690e2d39/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6a556cd6fe4560e978200ecdd64beaa35ab3ee2614b2c5de61c387b997171e6
3
+ size 3157508
neuronxcc-2.21.18209.0+043b1bf7/MODULE_596fadcd279603bd8b65+b6452646/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_596fadcd279603bd8b65+b6452646/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_596fadcd279603bd8b65+b6452646/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:114e6541a2ea809b0dfc0aef23148d2ac33eaf99d7fdca3c6d6336946926504c
3
+ size 509380
neuronxcc-2.21.18209.0+043b1bf7/MODULE_596fadcd279603bd8b65+b6452646/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ceeb73e1d39f204f2d5b6fe1d0f5f5ecea5529ce10edde0bdb5ee788508fd80
3
+ size 34100224
neuronxcc-2.21.18209.0+043b1bf7/MODULE_5b10f7bb486873b498b4+ed72d204/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_5b10f7bb486873b498b4+ed72d204/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_5b10f7bb486873b498b4+ed72d204/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01b3cfd2da3b26f46400eedc55c66740742be739481d71c13d69f1a6c8a5caca
3
+ size 509380
neuronxcc-2.21.18209.0+043b1bf7/MODULE_5b10f7bb486873b498b4+ed72d204/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ebe59132f4cfa1e51306f00b90dd3e4d964c2b52799eaccfec5bbf0ae04c7ac
3
+ size 41585664
neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72a89b81cdfd1f7ecc8ac622f9b63727994d156bcc48d3352e607e6e8e511cfd
3
+ size 1574268
neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0d066ef761ee9860297dad1e6d7e44f91927f243e87667f945a482e737f8c27
3
+ size 779264
neuronxcc-2.21.18209.0+043b1bf7/MODULE_5e068a10d22bfaff89f6+747527b0/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fe197d03460730cc644166215fa75f68171db2f467c181e9d126a9d8bca3a33
3
+ size 787211
neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:581d703b845bd5403f4104e5e4e05a8b83c0664434bf290de96203b8d392bcbb
3
+ size 1284382
neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4be13705b296fbd2c3e864682960468372ba221c9a9d0d63280894e8c96a0e2a
3
+ size 513024
neuronxcc-2.21.18209.0+043b1bf7/MODULE_65712ebe0144fc2b3197+747527b0/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3b5870b78a4bd9c000f9624508f3dc0c14ef9b907893634f93c3d0dcdf9fbab
3
+ size 520943
neuronxcc-2.21.18209.0+043b1bf7/MODULE_6c6ffc55476bfb49f4a5+747527b0/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_6c6ffc55476bfb49f4a5+747527b0/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_6c6ffc55476bfb49f4a5+747527b0/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:317710548b5edeacc5f3f03a392a524bad93de31e8aad242ce4566c00177ff67
3
+ size 2531606