from langchain_core.language_models.llms import LLM from langchain_core.callbacks.manager import CallbackManagerForLLMRun from typing import Any, List, Optional class FakeLLM(LLM): """ An LLM class that returns nothing of value and is a temp class designed to work in Langchain. """ def _call( self, prompt: str, stop: Optional[List[str]] = None, run_manager: Optional[CallbackManagerForLLMRun] = None, **kwargs: Any, ) -> str: if stop is not None: raise ValueError("stop kwargs are not permitted.") return prompt @property def _llm_type(self) -> str: """Get the type of language model used by this chat model. Used for logging purposes only.""" return "custom"