fp16 and fp32 model weights
Browse files- config.json +3 -3
- generation_config.json +1 -1
- model.fp32.safetensors +3 -0
- model.safetensors +2 -2
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"activation_dropout": 0.0,
|
4 |
"activation_function": "gelu",
|
5 |
"apply_spec_augment": false,
|
@@ -42,8 +42,8 @@
|
|
42 |
"num_mel_bins": 128,
|
43 |
"pad_token_id": 50256,
|
44 |
"scale_embedding": false,
|
45 |
-
"torch_dtype": "
|
46 |
-
"transformers_version": "4.
|
47 |
"use_cache": true,
|
48 |
"use_weighted_layer_sum": false,
|
49 |
"vocab_size": 51866
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "eustlb/distil-large-v3-fr",
|
3 |
"activation_dropout": 0.0,
|
4 |
"activation_function": "gelu",
|
5 |
"apply_spec_augment": false,
|
|
|
42 |
"num_mel_bins": 128,
|
43 |
"pad_token_id": 50256,
|
44 |
"scale_embedding": false,
|
45 |
+
"torch_dtype": "float16",
|
46 |
+
"transformers_version": "4.42.0.dev0",
|
47 |
"use_cache": true,
|
48 |
"use_weighted_layer_sum": false,
|
49 |
"vocab_size": 51866
|
generation_config.json
CHANGED
@@ -293,5 +293,5 @@
|
|
293 |
"transcribe": 50360,
|
294 |
"translate": 50359
|
295 |
},
|
296 |
-
"transformers_version": "4.
|
297 |
}
|
|
|
293 |
"transcribe": 50360,
|
294 |
"translate": 50359
|
295 |
},
|
296 |
+
"transformers_version": "4.42.0.dev0"
|
297 |
}
|
model.fp32.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2f42f648b5c9864ad3ed0323b0ca72eb0ad251888e5298061d51d688698ce845
|
3 |
+
size 3025686376
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3127829c28bfa127e4e18522deb8f1637ff1d9714ee0466b7784dd09d488266d
|
3 |
+
size 1512874472
|