firqaaa commited on
Commit
2f17205
1 Parent(s): ed150f8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -68,7 +68,7 @@ class Journal:
68
  def __repr__(self):
69
  return f"Journal(name='{self.name}', bytes='{self.bytes}')"
70
 
71
- llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-1106")
72
 
73
  textex_chain = create_extraction_chain(textex_schema, llm)
74
  tablex_chain = create_extraction_chain(tablex_schema, llm)
@@ -198,7 +198,7 @@ if uploaded_files:
198
  embeddings = OpenAIEmbeddings()
199
 
200
  db = Chroma.from_documents(docs, embeddings)
201
- llm_table = ChatOpenAI(model_name="gpt-3.5-turbo-1106", temperature=0)
202
  qa_chain = RetrievalQA.from_chain_type(llm_table, retriever=db.as_retriever())
203
 
204
  # List of questions
@@ -652,7 +652,7 @@ if uploaded_files:
652
  embeddings = OpenAIEmbeddings()
653
 
654
  db = Chroma.from_documents(docs, embeddings)
655
- llm_table = ChatOpenAI(model_name="gpt-3.5-turbo-1106", temperature=0)
656
  qa_chain = RetrievalQA.from_chain_type(llm_table, retriever=db.as_retriever())
657
 
658
  # List of questions
 
68
  def __repr__(self):
69
  return f"Journal(name='{self.name}', bytes='{self.bytes}')"
70
 
71
+ llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-16k")
72
 
73
  textex_chain = create_extraction_chain(textex_schema, llm)
74
  tablex_chain = create_extraction_chain(tablex_schema, llm)
 
198
  embeddings = OpenAIEmbeddings()
199
 
200
  db = Chroma.from_documents(docs, embeddings)
201
+ llm_table = ChatOpenAI(model_name="gpt-3.5-turbo-16k", temperature=0)
202
  qa_chain = RetrievalQA.from_chain_type(llm_table, retriever=db.as_retriever())
203
 
204
  # List of questions
 
652
  embeddings = OpenAIEmbeddings()
653
 
654
  db = Chroma.from_documents(docs, embeddings)
655
+ llm_table = ChatOpenAI(model_name="gpt-3.5-turbo-16k", temperature=0)
656
  qa_chain = RetrievalQA.from_chain_type(llm_table, retriever=db.as_retriever())
657
 
658
  # List of questions