CompactAI commited on
Commit
3e128f4
Β·
verified Β·
1 Parent(s): 9378f13

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -12,7 +12,7 @@ pipeline_tag: text-generation
12
 
13
  # Qwen3-0.6B-python-safe
14
 
15
- > 🎯 **PYTHON-optimized** | πŸ“¦ **Safe** pruning | ⚑ **1% weights pruned**
16
 
17
  This model is a **conservatively pruned** version of [Qwen/Qwen3-0.6B](https://huggingface.co/Qwen/Qwen3-0.6B), specialized for **PYTHON** tasks using activation-aware weight pruning (Wanda-style).
18
 
@@ -20,7 +20,7 @@ This model is a **conservatively pruned** version of [Qwen/Qwen3-0.6B](https://h
20
 
21
  - **Specialization**: Optimized for Python tasks
22
  - **Pruning Method**: Wanda-style (|W| Γ— |activation|) importance scoring
23
- - **Size Reduction**: 1% weights pruned
24
  - **Use Case**: High accuracy retention, ideal for production use
25
 
26
  ## πŸ“Š Performance Comparison
@@ -30,13 +30,13 @@ This model is a **conservatively pruned** version of [Qwen/Qwen3-0.6B](https://h
30
  | **Python** | 30.0% | 30.0% ⭐ | β†’ |
31
  | Html | 0.0% | 0.0% | β†’ |
32
  | Trivia | 90.0% | 90.0% | β†’ |
33
- | Math | 96.7% | 93.3% | ↓ 3.3% |
34
  | Reasoning | 36.7% | 36.7% | β†’ |
35
- | Medical | 83.3% | 86.7% | ↑ 3.3% |
36
- | Linux | 93.3% | 93.3% | β†’ |
37
- | Writing | 53.3% | 56.7% | ↑ 3.3% |
38
 
39
- **Average**: 60.4% β†’ 60.8% (+0.4%)
40
 
41
  **Python Retention**: 100.0% of original performance
42
 
@@ -64,7 +64,7 @@ print(tokenizer.decode(outputs[0], skip_special_tokens=True))
64
  | Specialization | Python |
65
  | Prune Mode | Safe |
66
  | Pruning Method | Activation-based weight pruning (Wanda) |
67
- | Weight Reduction | 1% weights pruned |
68
 
69
  ## πŸ”— Related Models
70
 
 
12
 
13
  # Qwen3-0.6B-python-safe
14
 
15
+ > 🎯 **PYTHON-optimized** | πŸ“¦ **Safe** pruning | ⚑ **2% weights pruned**
16
 
17
  This model is a **conservatively pruned** version of [Qwen/Qwen3-0.6B](https://huggingface.co/Qwen/Qwen3-0.6B), specialized for **PYTHON** tasks using activation-aware weight pruning (Wanda-style).
18
 
 
20
 
21
  - **Specialization**: Optimized for Python tasks
22
  - **Pruning Method**: Wanda-style (|W| Γ— |activation|) importance scoring
23
+ - **Size Reduction**: 2% weights pruned
24
  - **Use Case**: High accuracy retention, ideal for production use
25
 
26
  ## πŸ“Š Performance Comparison
 
30
  | **Python** | 30.0% | 30.0% ⭐ | β†’ |
31
  | Html | 0.0% | 0.0% | β†’ |
32
  | Trivia | 90.0% | 90.0% | β†’ |
33
+ | Math | 96.7% | 96.7% | β†’ |
34
  | Reasoning | 36.7% | 36.7% | β†’ |
35
+ | Medical | 83.3% | 83.3% | β†’ |
36
+ | Linux | 93.3% | 90.0% | ↓ 3.3% |
37
+ | Writing | 53.3% | 53.3% | β†’ |
38
 
39
+ **Average**: 60.4% β†’ 60.0% (-0.4%)
40
 
41
  **Python Retention**: 100.0% of original performance
42
 
 
64
  | Specialization | Python |
65
  | Prune Mode | Safe |
66
  | Pruning Method | Activation-based weight pruning (Wanda) |
67
+ | Weight Reduction | 2% weights pruned |
68
 
69
  ## πŸ”— Related Models
70
 
comparison_graph.png CHANGED
config.json CHANGED
@@ -48,12 +48,15 @@
48
  "num_attention_heads": 16,
49
  "num_hidden_layers": 28,
50
  "num_key_value_heads": 8,
 
51
  "rms_norm_eps": 1e-06,
52
- "rope_scaling": null,
53
- "rope_theta": 1000000,
 
 
54
  "sliding_window": null,
55
  "tie_word_embeddings": true,
56
- "transformers_version": "4.57.6",
57
  "use_cache": true,
58
  "use_sliding_window": false,
59
  "vocab_size": 151936
 
48
  "num_attention_heads": 16,
49
  "num_hidden_layers": 28,
50
  "num_key_value_heads": 8,
51
+ "pad_token_id": null,
52
  "rms_norm_eps": 1e-06,
53
+ "rope_parameters": {
54
+ "rope_theta": 1000000,
55
+ "rope_type": "default"
56
+ },
57
  "sliding_window": null,
58
  "tie_word_embeddings": true,
59
+ "transformers_version": "5.0.0",
60
  "use_cache": true,
61
  "use_sliding_window": false,
62
  "vocab_size": 151936
generation_config.json CHANGED
@@ -1,9 +1,10 @@
1
  {
2
  "bos_token_id": 151643,
 
3
  "eos_token_id": [
4
  151645,
5
  151643
6
  ],
7
  "pad_token_id": 151643,
8
- "transformers_version": "4.57.6"
9
  }
 
1
  {
2
  "bos_token_id": 151643,
3
+ "do_sample": false,
4
  "eos_token_id": [
5
  151645,
6
  151643
7
  ],
8
  "pad_token_id": 151643,
9
+ "transformers_version": "5.0.0"
10
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d4e986e264eec11cb280bcab9c4592f081d35918421e069483071c316a5b6b20
3
- size 1192134784
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4850d9e7783a1ad308c0a8eafcbc093a2666f5403f153835a9a8d5ad668eb2a9
3
+ size 1503300016
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dab3ef598a088c1ffec65f944ad95060dd22f3de49a128a0e30dcd20364f22ef
3
- size 11422921
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fea4f89c198c65a418ebfd87d7480db83fe21f31c7f56cd2ecea1110b1dff53e
3
+ size 11422917
tokenizer_config.json CHANGED
@@ -1,217 +1,11 @@
1
  {
2
- "add_bos_token": false,
3
  "add_prefix_space": false,
4
- "added_tokens_decoder": {
5
- "151643": {
6
- "content": "<|endoftext|>",
7
- "lstrip": false,
8
- "normalized": false,
9
- "rstrip": false,
10
- "single_word": false,
11
- "special": true
12
- },
13
- "151644": {
14
- "content": "<|im_start|>",
15
- "lstrip": false,
16
- "normalized": false,
17
- "rstrip": false,
18
- "single_word": false,
19
- "special": true
20
- },
21
- "151645": {
22
- "content": "<|im_end|>",
23
- "lstrip": false,
24
- "normalized": false,
25
- "rstrip": false,
26
- "single_word": false,
27
- "special": true
28
- },
29
- "151646": {
30
- "content": "<|object_ref_start|>",
31
- "lstrip": false,
32
- "normalized": false,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": true
36
- },
37
- "151647": {
38
- "content": "<|object_ref_end|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": true
44
- },
45
- "151648": {
46
- "content": "<|box_start|>",
47
- "lstrip": false,
48
- "normalized": false,
49
- "rstrip": false,
50
- "single_word": false,
51
- "special": true
52
- },
53
- "151649": {
54
- "content": "<|box_end|>",
55
- "lstrip": false,
56
- "normalized": false,
57
- "rstrip": false,
58
- "single_word": false,
59
- "special": true
60
- },
61
- "151650": {
62
- "content": "<|quad_start|>",
63
- "lstrip": false,
64
- "normalized": false,
65
- "rstrip": false,
66
- "single_word": false,
67
- "special": true
68
- },
69
- "151651": {
70
- "content": "<|quad_end|>",
71
- "lstrip": false,
72
- "normalized": false,
73
- "rstrip": false,
74
- "single_word": false,
75
- "special": true
76
- },
77
- "151652": {
78
- "content": "<|vision_start|>",
79
- "lstrip": false,
80
- "normalized": false,
81
- "rstrip": false,
82
- "single_word": false,
83
- "special": true
84
- },
85
- "151653": {
86
- "content": "<|vision_end|>",
87
- "lstrip": false,
88
- "normalized": false,
89
- "rstrip": false,
90
- "single_word": false,
91
- "special": true
92
- },
93
- "151654": {
94
- "content": "<|vision_pad|>",
95
- "lstrip": false,
96
- "normalized": false,
97
- "rstrip": false,
98
- "single_word": false,
99
- "special": true
100
- },
101
- "151655": {
102
- "content": "<|image_pad|>",
103
- "lstrip": false,
104
- "normalized": false,
105
- "rstrip": false,
106
- "single_word": false,
107
- "special": true
108
- },
109
- "151656": {
110
- "content": "<|video_pad|>",
111
- "lstrip": false,
112
- "normalized": false,
113
- "rstrip": false,
114
- "single_word": false,
115
- "special": true
116
- },
117
- "151657": {
118
- "content": "<tool_call>",
119
- "lstrip": false,
120
- "normalized": false,
121
- "rstrip": false,
122
- "single_word": false,
123
- "special": false
124
- },
125
- "151658": {
126
- "content": "</tool_call>",
127
- "lstrip": false,
128
- "normalized": false,
129
- "rstrip": false,
130
- "single_word": false,
131
- "special": false
132
- },
133
- "151659": {
134
- "content": "<|fim_prefix|>",
135
- "lstrip": false,
136
- "normalized": false,
137
- "rstrip": false,
138
- "single_word": false,
139
- "special": false
140
- },
141
- "151660": {
142
- "content": "<|fim_middle|>",
143
- "lstrip": false,
144
- "normalized": false,
145
- "rstrip": false,
146
- "single_word": false,
147
- "special": false
148
- },
149
- "151661": {
150
- "content": "<|fim_suffix|>",
151
- "lstrip": false,
152
- "normalized": false,
153
- "rstrip": false,
154
- "single_word": false,
155
- "special": false
156
- },
157
- "151662": {
158
- "content": "<|fim_pad|>",
159
- "lstrip": false,
160
- "normalized": false,
161
- "rstrip": false,
162
- "single_word": false,
163
- "special": false
164
- },
165
- "151663": {
166
- "content": "<|repo_name|>",
167
- "lstrip": false,
168
- "normalized": false,
169
- "rstrip": false,
170
- "single_word": false,
171
- "special": false
172
- },
173
- "151664": {
174
- "content": "<|file_sep|>",
175
- "lstrip": false,
176
- "normalized": false,
177
- "rstrip": false,
178
- "single_word": false,
179
- "special": false
180
- },
181
- "151665": {
182
- "content": "<tool_response>",
183
- "lstrip": false,
184
- "normalized": false,
185
- "rstrip": false,
186
- "single_word": false,
187
- "special": false
188
- },
189
- "151666": {
190
- "content": "</tool_response>",
191
- "lstrip": false,
192
- "normalized": false,
193
- "rstrip": false,
194
- "single_word": false,
195
- "special": false
196
- },
197
- "151667": {
198
- "content": "<think>",
199
- "lstrip": false,
200
- "normalized": false,
201
- "rstrip": false,
202
- "single_word": false,
203
- "special": false
204
- },
205
- "151668": {
206
- "content": "</think>",
207
- "lstrip": false,
208
- "normalized": false,
209
- "rstrip": false,
210
- "single_word": false,
211
- "special": false
212
- }
213
- },
214
- "additional_special_tokens": [
215
  "<|im_start|>",
216
  "<|im_end|>",
217
  "<|object_ref_start|>",
@@ -226,11 +20,7 @@
226
  "<|image_pad|>",
227
  "<|video_pad|>"
228
  ],
229
- "bos_token": null,
230
- "clean_up_tokenization_spaces": false,
231
- "eos_token": "<|im_end|>",
232
- "errors": "replace",
233
- "extra_special_tokens": {},
234
  "model_max_length": 131072,
235
  "pad_token": "<|endoftext|>",
236
  "split_special_tokens": false,
 
1
  {
 
2
  "add_prefix_space": false,
3
+ "backend": "tokenizers",
4
+ "bos_token": null,
5
+ "clean_up_tokenization_spaces": false,
6
+ "eos_token": "<|im_end|>",
7
+ "errors": "replace",
8
+ "extra_special_tokens": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  "<|im_start|>",
10
  "<|im_end|>",
11
  "<|object_ref_start|>",
 
20
  "<|image_pad|>",
21
  "<|video_pad|>"
22
  ],
23
+ "is_local": false,
 
 
 
 
24
  "model_max_length": 131072,
25
  "pad_token": "<|endoftext|>",
26
  "split_special_tokens": false,