File size: 91 Bytes
16a2b4d |
1 2 3 4 5 6 |
{
"_from_model_config": true,
"transformers_version": "4.41.1",
"use_cache": false
}
|
16a2b4d |
1 2 3 4 5 6 |
{
"_from_model_config": true,
"transformers_version": "4.41.1",
"use_cache": false
}
|