Updated with commit 67080e163add50e278dce1407538e5cd03ef9d6d See: https://github.com/huggingface/tokenizers/commit/67080e163add50e278dce1407538e5cd03ef9d6d
97f22d0
- accelerate
- api-inference
- course
- datasets-server
- datasets
- diffusers
- evaluate
- hub
- huggingface_hub
- inference-endpoints
- optimum
- safetensors
- sagemaker
- simenv
- simulate
- timm
- tokenizers
- transformers
-
2.32 kB
-
18 Bytes
-
533 kB
LFS
-
155 kB
LFS
-
12.9 MB
LFS
-
1.05 MB
LFS
-
1.01 MB
LFS
-
1.05 MB
LFS
-
1.05 MB
LFS
-
862 kB
LFS
-
866 kB
LFS
-
863 kB
LFS
-
338 MB
LFS
-
273 kB
LFS
-
651 kB
LFS
-
478 kB
LFS
-
476 kB
LFS
-
177 kB
LFS
-
592 kB
LFS
-
583 kB
LFS
-
37.9 kB
LFS
-
75.5 kB
LFS
-
75.7 kB
LFS
-
75.7 kB
LFS
-
135 kB
LFS
-
215 kB
LFS
-
216 kB
LFS
-
806 kB
LFS
-
324 kB
LFS
-
15.4 MB
LFS