Spaces:
Runtime error
Runtime error
from llama_index.llms.openai_like import OpenAILike | |
from llama_index.llms.openai import OpenAI | |
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI | |
from args import LLMInterface, Args | |
class LLMFactory(): | |
def create(cls, interface: LLMInterface, system_prompt, temperature = None, max_tokens = None): | |
if interface == LLMInterface.OPENAILIKE: | |
return cls._openailike_create(system_prompt, temperature, max_tokens) | |
elif interface == LLMInterface.OPENAI: | |
return cls._openai_create(system_prompt, temperature, max_tokens) | |
elif interface == LLMInterface.HUGGINGFACE: | |
return cls._hf_create(system_prompt, temperature, max_tokens) | |
else: | |
raise ValueError(f"Interface '{interface}' is not supported !") | |
def _openailike_create(system_prompt, temperature=None, max_tokens=None): | |
kwargs = { | |
"model": Args.model_name, | |
"api_base": Args.api_base, | |
"api_key": Args.api_key, | |
"system_prompt": system_prompt, | |
} | |
if temperature is not None: | |
kwargs["temperature"] = temperature | |
if max_tokens is not None: | |
kwargs["max_tokens"] = max_tokens | |
llm = OpenAILike(**kwargs) | |
return llm | |
def _openai_create(system_prompt, temperature = None, max_tokens = None): | |
kwargs = { | |
"model": Args.model_name, | |
"api_key": Args.api_key, | |
"system_prompt": system_prompt, | |
} | |
if temperature is not None: | |
kwargs["temperature"] = temperature | |
if max_tokens is not None: | |
kwargs["max_tokens"] = max_tokens | |
llm = OpenAI(**kwargs) | |
return llm | |
def _hf_create(system_prompt, temperature = None, max_tokens = None): | |
kwargs = { | |
"model_name": Args.model_name, | |
"system_prompt": system_prompt, | |
"token": Args.token, | |
} | |
if temperature is not None: | |
kwargs["temperature"] = temperature | |
if max_tokens is not None: | |
kwargs["max_tokens"] = max_tokens | |
llm = HuggingFaceInferenceAPI(**kwargs) | |
return llm |