runtime error

e-packages/fsspec/asyn.py", line 61, in _runner result[0] = await coro File "/home/user/.local/lib/python3.10/site-packages/gradio/examples.py", line 258, in create await self.cache() File "/home/user/.local/lib/python3.10/site-packages/gradio/examples.py", line 292, in cache prediction = await Context.root_block.process_api( File "/home/user/.local/lib/python3.10/site-packages/gradio/blocks.py", line 991, in process_api result = await self.call_function(fn_index, inputs, iterator) File "/home/user/.local/lib/python3.10/site-packages/gradio/blocks.py", line 833, in call_function prediction = await anyio.to_thread.run_sync( File "/home/user/.local/lib/python3.10/site-packages/anyio/to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( File "/home/user/.local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread return await future File "/home/user/.local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 807, in run result = context.run(func, *args) File "/home/user/.local/lib/python3.10/site-packages/gradio/external.py", line 279, in query_huggingface_api raise ValueError( ValueError: Could not complete request to HuggingFace API, Status Code: 500, Error: unknown error, Warnings: ['CUDA out of memory. Tried to allocate 26.00 MiB (GPU 0; 14.76 GiB total capacity; 868.52 MiB already allocated; 6.75 MiB free; 880.00 MiB reserved in total by PyTorch) If reserved memory is >> allocated memory try setting max_split_size_mb to avoid fragmentation. See documentation for Memory Management and PYTORCH_CUDA_ALLOC_CONF', 'There was an inference error: CUDA out of memory. Tried to allocate 26.00 MiB (GPU 0; 14.76 GiB total capacity; 868.52 MiB already allocated; 6.75 MiB free; 880.00 MiB reserved in total by PyTorch) If reserved memory is >> allocated memory try setting max_split_size_mb to avoid fragmentation. See documentation for Memory Management and PYTORCH_CUDA_ALLOC_CONF']

Container logs:

Fetching error logs...