Upload batch1/summary.json with huggingface_hub
Browse files- batch1/summary.json +90 -0
batch1/summary.json
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"results": [
|
| 3 |
+
{
|
| 4 |
+
"model": "HuggingFaceTB/SmolLM2-1.7B",
|
| 5 |
+
"size_gb": 3.42,
|
| 6 |
+
"exp_entropy": 2.5748,
|
| 7 |
+
"mlp_entropy": 2.5625,
|
| 8 |
+
"attn_entropy": 2.6114,
|
| 9 |
+
"emb_entropy": 2.5781,
|
| 10 |
+
"mantissa_entropy": 6.9719,
|
| 11 |
+
"huffman_pct": 33.71,
|
| 12 |
+
"mlp_huffman_pct": 33.8,
|
| 13 |
+
"attn_huffman_pct": 33.45,
|
| 14 |
+
"palette_pct": 24.71,
|
| 15 |
+
"top16_cov": 0.999873,
|
| 16 |
+
"verbatim_rows": 0.007834,
|
| 17 |
+
"mlp_verbatim": 0.007317,
|
| 18 |
+
"attn_verbatim": 0.008444,
|
| 19 |
+
"mlp_frac": 0.7058,
|
| 20 |
+
"mlp_only_pct": 23.86,
|
| 21 |
+
"time_s": 46.8
|
| 22 |
+
},
|
| 23 |
+
{
|
| 24 |
+
"model": "Qwen/Qwen3-0.6B",
|
| 25 |
+
"size_gb": 1.5,
|
| 26 |
+
"exp_entropy": 2.6182,
|
| 27 |
+
"mlp_entropy": 2.6192,
|
| 28 |
+
"attn_entropy": 2.6736,
|
| 29 |
+
"emb_entropy": 2.5863,
|
| 30 |
+
"mantissa_entropy": 6.9729,
|
| 31 |
+
"huffman_pct": 33.42,
|
| 32 |
+
"mlp_huffman_pct": 33.4,
|
| 33 |
+
"attn_huffman_pct": 33.06,
|
| 34 |
+
"palette_pct": 24.69,
|
| 35 |
+
"top16_cov": 0.999865,
|
| 36 |
+
"verbatim_rows": 0.008355,
|
| 37 |
+
"mlp_verbatim": 0.008413,
|
| 38 |
+
"attn_verbatim": 0.010869,
|
| 39 |
+
"mlp_frac": 0.3516,
|
| 40 |
+
"mlp_only_pct": 11.74,
|
| 41 |
+
"time_s": 20.3
|
| 42 |
+
},
|
| 43 |
+
{
|
| 44 |
+
"model": "Qwen/Qwen3-4B",
|
| 45 |
+
"size_gb": 8.04,
|
| 46 |
+
"exp_entropy": 2.624,
|
| 47 |
+
"mlp_entropy": 2.6293,
|
| 48 |
+
"attn_entropy": 2.6217,
|
| 49 |
+
"emb_entropy": 2.5931,
|
| 50 |
+
"mantissa_entropy": 6.9725,
|
| 51 |
+
"huffman_pct": 33.35,
|
| 52 |
+
"mlp_huffman_pct": 33.31,
|
| 53 |
+
"attn_huffman_pct": 33.36,
|
| 54 |
+
"palette_pct": 24.69,
|
| 55 |
+
"top16_cov": 0.999857,
|
| 56 |
+
"verbatim_rows": 0.008671,
|
| 57 |
+
"mlp_verbatim": 0.00909,
|
| 58 |
+
"attn_verbatim": 0.009036,
|
| 59 |
+
"mlp_frac": 0.6686,
|
| 60 |
+
"mlp_only_pct": 22.28,
|
| 61 |
+
"time_s": 108.6
|
| 62 |
+
},
|
| 63 |
+
{
|
| 64 |
+
"model": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
|
| 65 |
+
"size_gb": 15.23,
|
| 66 |
+
"exp_entropy": 2.6299,
|
| 67 |
+
"mlp_entropy": 2.6077,
|
| 68 |
+
"attn_entropy": 2.6297,
|
| 69 |
+
"emb_entropy": 2.7463,
|
| 70 |
+
"mantissa_entropy": 6.9708,
|
| 71 |
+
"huffman_pct": 33.36,
|
| 72 |
+
"mlp_huffman_pct": 33.5,
|
| 73 |
+
"attn_huffman_pct": 33.33,
|
| 74 |
+
"palette_pct": 24.61,
|
| 75 |
+
"top16_cov": 0.999684,
|
| 76 |
+
"verbatim_rows": 0.01153,
|
| 77 |
+
"mlp_verbatim": 0.008837,
|
| 78 |
+
"attn_verbatim": 0.009992,
|
| 79 |
+
"mlp_frac": 0.7489,
|
| 80 |
+
"mlp_only_pct": 25.09,
|
| 81 |
+
"time_s": 201.6
|
| 82 |
+
}
|
| 83 |
+
],
|
| 84 |
+
"failures": [
|
| 85 |
+
{
|
| 86 |
+
"model": "google/gemma-3-4b-pt",
|
| 87 |
+
"error": "403 Client Error. (Request ID: Root=1-69e795ba-449280e969a231201e6a48ee;a2aebf99-596b-4757-b64c-5e8f7cead780)\n\nCannot access gated repo for url https://huggingface.co/google/gemma-3-4b-pt/resolve/main/model-00001-of-00002.safetensors.\nAccess to model google/gemma-3-4b-pt is restricted and you are not in the authorized list. Visit https://huggingface.co/google/gemma-3-4b-pt to ask for access."
|
| 88 |
+
}
|
| 89 |
+
]
|
| 90 |
+
}
|