runtime error

ctx.invoke(self.callback, **ctx.params) File "/home/user/.local/lib/python3.9/site-packages/click/core.py", line 783, in invoke return __callback(*args, **kwargs) File "/home/user/.local/lib/python3.9/site-packages/chainlit/cli/__init__.py", line 153, in chainlit_run run_chainlit(target) File "/home/user/.local/lib/python3.9/site-packages/chainlit/cli/__init__.py", line 47, in run_chainlit load_module(config.run.module_name) File "/home/user/.local/lib/python3.9/site-packages/chainlit/config.py", line 242, in load_module spec.loader.exec_module(module) File "<frozen importlib._bootstrap_external>", line 850, in exec_module File "<frozen importlib._bootstrap>", line 228, in _call_with_frames_removed File "app.py", line 33, in <module> index = GPTVectorStoreIndex.from_documents(documents) File "/home/user/.local/lib/python3.9/site-packages/llama_index/indices/base.py", line 92, in from_documents service_context = service_context or ServiceContext.from_defaults() File "/home/user/.local/lib/python3.9/site-packages/llama_index/indices/service_context.py", line 140, in from_defaults llm_predictor = llm_predictor or LLMPredictor(llm=llm) File "/home/user/.local/lib/python3.9/site-packages/llama_index/llm_predictor/base.py", line 91, in __init__ self._llm = resolve_llm(llm) File "/home/user/.local/lib/python3.9/site-packages/llama_index/llms/utils.py", line 40, in resolve_llm llm = LlamaCPP( File "/home/user/.local/lib/python3.9/site-packages/llama_index/llms/llama_cpp.py", line 110, in __init__ self._model = Llama(model_path=model_path, **model_kwargs) File "/home/user/.local/lib/python3.9/site-packages/llama_cpp/llama.py", line 318, in __init__ self._n_vocab = self.n_vocab() File "/home/user/.local/lib/python3.9/site-packages/llama_cpp/llama.py", line 1636, in n_vocab return self._model.n_vocab() File "/home/user/.local/lib/python3.9/site-packages/llama_cpp/_internals.py", line 67, in n_vocab assert self.model is not None AssertionError

Container logs:

Fetching error logs...