Spaces:
Sleeping
Sleeping
Added gpt-4 support
Browse files
app.py
CHANGED
@@ -131,12 +131,12 @@ def get_response(query, chat_history, CRqa):
|
|
131 |
return result['answer'], result['source_documents']
|
132 |
|
133 |
|
134 |
-
def setup_em_llm(OPENAI_API_KEY, temperature):
|
135 |
# Set up OpenAI embeddings
|
136 |
embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
|
137 |
-
# Use Open AI LLM with gpt-3.5-turbo.
|
138 |
# Set the temperature to be 0 if you do not want it to make up things
|
139 |
-
llm = ChatOpenAI(temperature=temperature, model_name=
|
140 |
openai_api_key=OPENAI_API_KEY)
|
141 |
return embeddings, llm
|
142 |
|
@@ -168,6 +168,7 @@ def main(pinecone_index_name, chroma_collection_name, persist_directory, docsear
|
|
168 |
r_pinecone = st.radio('Use Pinecone?', ('Yes', 'No'))
|
169 |
r_ingest = st.radio(
|
170 |
'Ingest file(s)?', ('Yes', 'No'))
|
|
|
171 |
with col2:
|
172 |
OPENAI_API_KEY = st.text_input(
|
173 |
"OpenAI API key:", type="password")
|
@@ -175,7 +176,7 @@ def main(pinecone_index_name, chroma_collection_name, persist_directory, docsear
|
|
175 |
k_sources = st.slider('# source(s) to print out', 0, 20, 2)
|
176 |
with col3:
|
177 |
if OPENAI_API_KEY:
|
178 |
-
embeddings, llm = setup_em_llm(OPENAI_API_KEY, temperature)
|
179 |
if r_pinecone.lower() == 'yes':
|
180 |
use_pinecone = True
|
181 |
PINECONE_API_KEY = st.text_input(
|
|
|
131 |
return result['answer'], result['source_documents']
|
132 |
|
133 |
|
134 |
+
def setup_em_llm(OPENAI_API_KEY, temperature, r_llm):
|
135 |
# Set up OpenAI embeddings
|
136 |
embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
|
137 |
+
# Use Open AI LLM with gpt-3.5-turbo or gpt-4.
|
138 |
# Set the temperature to be 0 if you do not want it to make up things
|
139 |
+
llm = ChatOpenAI(temperature=temperature, model_name=r_llm, streaming=True,
|
140 |
openai_api_key=OPENAI_API_KEY)
|
141 |
return embeddings, llm
|
142 |
|
|
|
168 |
r_pinecone = st.radio('Use Pinecone?', ('Yes', 'No'))
|
169 |
r_ingest = st.radio(
|
170 |
'Ingest file(s)?', ('Yes', 'No'))
|
171 |
+
r_llm = st.multiselect('LLM:',['gpt-3.5-turbo','gpt-4'],'gpt-3.5-turbo')
|
172 |
with col2:
|
173 |
OPENAI_API_KEY = st.text_input(
|
174 |
"OpenAI API key:", type="password")
|
|
|
176 |
k_sources = st.slider('# source(s) to print out', 0, 20, 2)
|
177 |
with col3:
|
178 |
if OPENAI_API_KEY:
|
179 |
+
embeddings, llm = setup_em_llm(OPENAI_API_KEY, temperature, r_llm)
|
180 |
if r_pinecone.lower() == 'yes':
|
181 |
use_pinecone = True
|
182 |
PINECONE_API_KEY = st.text_input(
|