Spaces:
Sleeping
Sleeping
File size: 1,945 Bytes
184a47b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
import sys
sys.path.append("../llm")
from llm.wenxin_llm import Wenxin_LLM
from llm.spark_llm import Spark_LLM
from llm.zhipuai_llm import ZhipuAILLM
from langchain.chat_models import ChatOpenAI
from llm.call_llm import parse_llm_api_key
def model_to_llm(model:str=None, temperature:float=0.0, appid:str=None, api_key:str=None,Spark_api_secret:str=None,Wenxin_secret_key:str=None):
"""
星火:model,temperature,appid,api_key,api_secret
百度问心:model,temperature,api_key,api_secret
智谱:model,temperature,api_key
OpenAI:model,temperature,api_key
"""
if model in ["gpt-3.5-turbo", "gpt-3.5-turbo-16k-0613", "gpt-3.5-turbo-0613", "gpt-4", "gpt-4-32k"]:
if api_key == None:
api_key = parse_llm_api_key("openai")
llm = ChatOpenAI(model_name = model, temperature = temperature , openai_api_key = api_key)
elif model in ["ERNIE-Bot", "ERNIE-Bot-4", "ERNIE-Bot-turbo"]:
if api_key == None or Wenxin_secret_key == None:
api_key, Wenxin_secret_key = parse_llm_api_key("wenxin")
llm = Wenxin_LLM(model=model, temperature = temperature, api_key=api_key, secret_key=Wenxin_secret_key)
elif model in ["Spark-1.5", "Spark-2.0"]:
if api_key == None or appid == None and Spark_api_secret == None:
api_key, appid, Spark_api_secret = parse_llm_api_key("spark")
llm = Spark_LLM(model=model, temperature = temperature, appid=appid, api_secret=Spark_api_secret, api_key=api_key)
elif model in ["chatglm_pro", "chatglm_std", "chatglm_lite"]:
if api_key == None:
api_key = parse_llm_api_key("zhipuai")
llm = ZhipuAILLM(model=model, zhipuai_api_key=api_key, temperature = temperature)
else:
raise ValueError(f"model{model} not support!!!")
return llm |