dacorvo HF Staff commited on
Commit
142cac1
·
verified ·
1 Parent(s): 561c64d

Synchronizing local compiler cache.

Browse files
Files changed (36) hide show
  1. .gitattributes +9 -0
  2. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/10a1c1ce29aad9ca9b5c.json +220 -0
  3. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/1f457e6566a58a85f853.json +220 -0
  4. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/de08f599efc64352109e.json +220 -0
  5. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/e02d974ce330732de102.json +220 -0
  6. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E/93c0d83dcabd882421c2.json +220 -0
  7. neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/compile_flags.json +1 -0
  8. neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/model.done +0 -0
  9. neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/model.hlo_module.pb +3 -0
  10. neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/model.neff +3 -0
  11. neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/wrapped_neff.hlo +3 -0
  12. neuronxcc-2.21.18209.0+043b1bf7/MODULE_28ec0a0d6855a563f4c7+877608f3/compile_flags.json +1 -0
  13. neuronxcc-2.21.18209.0+043b1bf7/MODULE_28ec0a0d6855a563f4c7+877608f3/model.done +0 -0
  14. neuronxcc-2.21.18209.0+043b1bf7/MODULE_28ec0a0d6855a563f4c7+877608f3/model.hlo_module.pb +3 -0
  15. neuronxcc-2.21.18209.0+043b1bf7/MODULE_28ec0a0d6855a563f4c7+877608f3/model.neff +3 -0
  16. neuronxcc-2.21.18209.0+043b1bf7/MODULE_885e8321983058041589+747527b0/compile_flags.json +1 -0
  17. neuronxcc-2.21.18209.0+043b1bf7/MODULE_885e8321983058041589+747527b0/model.hlo_module.pb +3 -0
  18. neuronxcc-2.21.18209.0+043b1bf7/MODULE_885e8321983058041589+747527b0/model.hlo_module.pb.lock +0 -0
  19. neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/compile_flags.json +1 -0
  20. neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/model.done +0 -0
  21. neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/model.hlo_module.pb +3 -0
  22. neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/model.neff +3 -0
  23. neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/wrapped_neff.hlo +3 -0
  24. neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/compile_flags.json +1 -0
  25. neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/model.done +0 -0
  26. neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/model.hlo_module.pb +3 -0
  27. neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/model.neff +3 -0
  28. neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/wrapped_neff.hlo +3 -0
  29. neuronxcc-2.21.18209.0+043b1bf7/MODULE_e8986cc6bab02dbec28c+877608f3/compile_flags.json +1 -0
  30. neuronxcc-2.21.18209.0+043b1bf7/MODULE_e8986cc6bab02dbec28c+877608f3/model.done +0 -0
  31. neuronxcc-2.21.18209.0+043b1bf7/MODULE_e8986cc6bab02dbec28c+877608f3/model.hlo_module.pb +3 -0
  32. neuronxcc-2.21.18209.0+043b1bf7/MODULE_e8986cc6bab02dbec28c+877608f3/model.neff +3 -0
  33. neuronxcc-2.21.18209.0+043b1bf7/MODULE_e90deecb7cf108fcddbb+877608f3/compile_flags.json +1 -0
  34. neuronxcc-2.21.18209.0+043b1bf7/MODULE_e90deecb7cf108fcddbb+877608f3/model.done +0 -0
  35. neuronxcc-2.21.18209.0+043b1bf7/MODULE_e90deecb7cf108fcddbb+877608f3/model.hlo_module.pb +3 -0
  36. neuronxcc-2.21.18209.0+043b1bf7/MODULE_e90deecb7cf108fcddbb+877608f3/model.neff +3 -0
.gitattributes CHANGED
@@ -11728,3 +11728,12 @@ neuronxcc-2.21.18209.0+043b1bf7/MODULE_2353929736538704055+a3455b04/model.neff f
11728
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_3855315285394483972+a3455b04/model.neff filter=lfs diff=lfs merge=lfs -text
11729
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_5434430890383489368+a3455b04/model.neff filter=lfs diff=lfs merge=lfs -text
11730
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_6274406549387831778+a3455b04/model.neff filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
11728
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_3855315285394483972+a3455b04/model.neff filter=lfs diff=lfs merge=lfs -text
11729
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_5434430890383489368+a3455b04/model.neff filter=lfs diff=lfs merge=lfs -text
11730
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_6274406549387831778+a3455b04/model.neff filter=lfs diff=lfs merge=lfs -text
11731
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
11732
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
11733
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_28ec0a0d6855a563f4c7+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
11734
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
11735
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
11736
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
11737
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
11738
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_e8986cc6bab02dbec28c+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
11739
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_e90deecb7cf108fcddbb+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/10a1c1ce29aad9ca9b5c.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 4,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 64,
133
+ "max_batch_size": 4,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 64
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/1f457e6566a58a85f853.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 32,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 64,
133
+ "max_batch_size": 32,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 64
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/de08f599efc64352109e.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 8,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 64,
133
+ "max_batch_size": 8,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 64
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/e02d974ce330732de102.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 16,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 64,
133
+ "max_batch_size": 16,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 64
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E/93c0d83dcabd882421c2.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 262144,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 8,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E",
126
+ "checkpoint_revision": "14d516bdff6ac06cec40678529222f193386189c",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 64,
133
+ "max_batch_size": 8,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 64
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c03d04371e8662dcea794b1a740f645a2a9dfe0768478eacbed4e27172f8c56
3
+ size 105611491
neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33cc845d5c2418f1bd21d6e8931b91b60cebcf088b298c2c34b17a0863523e17
3
+ size 11971584
neuronxcc-2.21.18209.0+043b1bf7/MODULE_24aaef3b233504b9e5bb+747527b0/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66aa30ecbfad84614abcc03888ab04c610b8c3305166b9a95a0c45816be9a55a
3
+ size 12310102
neuronxcc-2.21.18209.0+043b1bf7/MODULE_28ec0a0d6855a563f4c7+877608f3/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_28ec0a0d6855a563f4c7+877608f3/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_28ec0a0d6855a563f4c7+877608f3/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0a9200b5b481968b5512f10d6a2bcf00ec3eee27a9e489b3c981caa193e8bda
3
+ size 104845521
neuronxcc-2.21.18209.0+043b1bf7/MODULE_28ec0a0d6855a563f4c7+877608f3/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c02babfdcdd5d71e9b421b857dc57289505d7347c954d4b0553055b2bcea29ac
3
+ size 29656064
neuronxcc-2.21.18209.0+043b1bf7/MODULE_885e8321983058041589+747527b0/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_885e8321983058041589+747527b0/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e235082bc9e8642c49d0296feff2840f5c6126407caa2e7731ad9f90b4e7e73
3
+ size 103352274
neuronxcc-2.21.18209.0+043b1bf7/MODULE_885e8321983058041589+747527b0/model.hlo_module.pb.lock ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3d451d085f2826a327fda873dec0257f84f308fc6df767a81c1db4837c3b982
3
+ size 105010693
neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:157304c65ec7f7d483dfc76e7a7335347cc54323ac7a152d1f8b657eab2260d8
3
+ size 15729664
neuronxcc-2.21.18209.0+043b1bf7/MODULE_bf1787a568857a366d85+747527b0/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9283439fb1f5ec5e1db37fd8d852a1429b7b3f19611f2dad4db638f8582a712f
3
+ size 16067167
neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de1dbe66d66e7280ec8311690fb8dc7b52ecec4c692de7941e8edaac769fb2af
3
+ size 106181249
neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28338e597f92e53db3f1d19172619b837f5a5a7ab7ecff699d29592a160d2527
3
+ size 20921344
neuronxcc-2.21.18209.0+043b1bf7/MODULE_cd7cf2688a87e8564211+747527b0/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a586ab25f1b49ffef862d31a3df73be42035763e6e0215ff286a960aa7879bea
3
+ size 21258847
neuronxcc-2.21.18209.0+043b1bf7/MODULE_e8986cc6bab02dbec28c+877608f3/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_e8986cc6bab02dbec28c+877608f3/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_e8986cc6bab02dbec28c+877608f3/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e8b11db15fc5be106b02e76ac948f6c74510ee16ffc6615ab7a45c1967f1844
3
+ size 104846536
neuronxcc-2.21.18209.0+043b1bf7/MODULE_e8986cc6bab02dbec28c+877608f3/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0361f46d756fbbbd708e2bfa3f7b5a35d8cc7d042ef8e966deee0be4e8c87c64
3
+ size 29809664
neuronxcc-2.21.18209.0+043b1bf7/MODULE_e90deecb7cf108fcddbb+877608f3/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/context_encoding_model/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_e90deecb7cf108fcddbb+877608f3/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_e90deecb7cf108fcddbb+877608f3/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b792ab27a947714891fefbd86c148ba1694c000ec585ef2a0729524fbffc72d8
3
+ size 104845521
neuronxcc-2.21.18209.0+043b1bf7/MODULE_e90deecb7cf108fcddbb+877608f3/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89fd71916c2bf1daf9d6f3954606d68dd3b179c8369f0666be2bee6958b4425e
3
+ size 29656064