Spaces:
Sleeping
Sleeping
File size: 2,125 Bytes
d20eb01 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
import os
from datetime import datetime
from langchain import OpenAI
from langchain.chains import load_chain
from langchain.chains.question_answering import load_qa_chain
from langchain.embeddings import OpenAIEmbeddings
from langchain.llms import OpenAIChat
from config.config import MAX_TOKENS, TOOLS_DEFAULT_LIST
def set_openai_api_key(api_key, use_gpt4):
"""Set the api key and return chain.
If no api_key, then None is returned.
"""
if api_key and api_key.startswith("sk-") and len(api_key) > 50:
os.environ["OPENAI_API_KEY"] = api_key
print("\n\n ++++++++++++++ Setting OpenAI API key ++++++++++++++ \n\n")
print(str(datetime.now()) + ": Before OpenAI, OPENAI_API_KEY length: " + str(
len(os.environ["OPENAI_API_KEY"])))
if use_gpt4:
llm = OpenAIChat(temperature=0, max_tokens=MAX_TOKENS, model_name="gpt-4")
print("Trying to use llm OpenAIChat with gpt-4")
else:
print("Trying to use llm OpenAI with text-davinci-003")
llm = OpenAI(temperature=0, max_tokens=MAX_TOKENS, model_name="text-davinci-003")
print(str(datetime.now()) + ": After OpenAI, OPENAI_API_KEY length: " + str(
len(os.environ["OPENAI_API_KEY"])))
chain, express_chain, memory = load_chain(TOOLS_DEFAULT_LIST, llm)
# Pertains to question answering functionality
embeddings = OpenAIEmbeddings()
if use_gpt4:
qa_chain = load_qa_chain(OpenAIChat(temperature=0, model_name="gpt-4"), chain_type="stuff")
print("Trying to use qa_chain OpenAIChat with gpt-4")
else:
print("Trying to use qa_chain OpenAI with text-davinci-003")
qa_chain = OpenAI(temperature=0, max_tokens=MAX_TOKENS, model_name="text-davinci-003")
print(str(datetime.now()) + ": After load_chain, OPENAI_API_KEY length: " + str(
len(os.environ["OPENAI_API_KEY"])))
os.environ["OPENAI_API_KEY"] = ""
return chain, express_chain, llm, embeddings, qa_chain, memory, use_gpt4
return None, None, None, None, None, None, None
|