runtime error

.py", line 223, in get_config_dict config_file = hf_hub_download( File "/home/user/.local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 118, in _inner_fn return fn(*args, **kwargs) File "/home/user/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 1217, in hf_hub_download headers = build_hf_headers( File "/home/user/.local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 118, in _inner_fn return fn(*args, **kwargs) File "/home/user/.local/lib/python3.10/site-packages/huggingface_hub/utils/_headers.py", line 121, in build_hf_headers token_to_send = get_token_to_send(token) File "/home/user/.local/lib/python3.10/site-packages/huggingface_hub/utils/_headers.py", line 153, in get_token_to_send raise LocalTokenNotFoundError( huggingface_hub.utils._headers.LocalTokenNotFoundError: Token is required (`token=True`), but no token found. You need to provide a token or be logged in to Hugging Face with `huggingface-cli login` or `huggingface_hub.login`. See https://huggingface.co/settings/tokens. During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/user/app/app.py", line 19, in <module> pipe = StableDiffusionPipeline.from_pretrained("CompVis/stable-diffusion-v1-4", use_auth_token=True, revision="fp16", torch_dtype=torch.float16).to("cuda") File "/home/user/.local/lib/python3.10/site-packages/diffusers/pipeline_utils.py", line 345, in from_pretrained config_dict = cls.get_config_dict( File "/home/user/.local/lib/python3.10/site-packages/diffusers/configuration_utils.py", line 268, in get_config_dict raise EnvironmentError( OSError: Can't load config for 'CompVis/stable-diffusion-v1-4'. If you were trying to load it from 'https://huggingface.co/models', make sure you don't have a local directory with the same name. Otherwise, make sure 'CompVis/stable-diffusion-v1-4' is the correct path to a directory containing a model_index.json file

Container logs:

Fetching error logs...