import os from langchain_community.llms import HuggingFaceEndpoint from pydantic import ValidationError def load_llm(repo_id="mistralai/Mistral-7B-Instruct-v0.2", token=os.getenv("HUGGINGFACE_API_TOKEN")): ''' Load the LLM from the HuggingFace model hub Args: repo_id (str): The HuggingFace model ID Returns: llm (HuggingFaceEndpoint): The LLM model ''' try: api_token = os.getenv("HUGGINGFACE_API_TOKEN") llm = HuggingFaceEndpoint( repo_id=repo_id,temperature=0.2, model_kwargs={"max_length": 128, "api_token": api_token} ) return llm except ValidationError as e: print("Validation Error:", e) # Log or handle the validation error appropriately return None except Exception as e: print("Error:", e) # Log or handle other exceptions return None def guardrails(): return None