import os from langchain_community.llms import HuggingFaceEndpoint from pydantic import ValidationError def load_llm(repo_id="mistralai/Mistral-7B-Instruct-v0.2", token): ''' Load the LLM from the HuggingFace model hub Args: repo_id (str): The HuggingFace model ID Returns: llm (HuggingFaceEndpoint): The LLM model ''' try: api_token = os.getenv("HUGGINGFACE_API_TOKEN") llm = HuggingFaceEndpoint( repo_id=repo_id, max_length=128, temperature=0.2, token=api_token) return llm except ValidationError as e: print("Validation Error:", e) # Log or handle the validation error appropriately return None except Exception as e: print("Error:", e) # Log or handle other exceptions return None def guardrails(): return None