runtime error
Exit code: 1. Reason: in [False] + ([True] if CUDA_AVAILABLE else [])} File "/home/user/app/app.py", line 12, in <dictcomp> models = {gpu: KModel().to('cuda' if gpu else 'cpu').eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])} File "/usr/local/lib/python3.10/site-packages/kokoro/model.py", line 67, in __init__ model = hf_hub_download(repo_id=repo_id, filename=KModel.MODEL_NAMES[repo_id]) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn return fn(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 862, in hf_hub_download return _hf_hub_download_to_cache_dir( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 1011, in _hf_hub_download_to_cache_dir _download_to_tmp_and_move( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 1547, in _download_to_tmp_and_move http_get( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 371, in http_get r = _request_wrapper( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 303, in _request_wrapper response = get_session().request(method=method, url=url, **params) File "/usr/local/lib/python3.10/site-packages/requests/sessions.py", line 589, in request resp = self.send(prep, **send_kwargs) File "/usr/local/lib/python3.10/site-packages/requests/sessions.py", line 703, in send r = adapter.send(request, **kwargs) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_http.py", line 96, in send return super().send(request, *args, **kwargs) File "/usr/local/lib/python3.10/site-packages/requests/adapters.py", line 682, in send raise ConnectionError(err, request=request) requests.exceptions.ConnectionError: (ProtocolError('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')), '(Request ID: 268b8f1c-0b2a-46fe-b14e-c9044c50472c)')
Container logs:
Fetching error logs...