ProfessorLeVesseur commited on
Commit
c00ac5b
·
verified ·
1 Parent(s): 5cb4ba3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -3
app.py CHANGED
@@ -47,15 +47,19 @@ def calculate_embedding_cost(text):
47
  return total_tokens, total_tokens / 1000 * 0.0004
48
 
49
 
 
 
 
 
 
50
  def ask_with_memory(vector_store, query, chat_history=[]):
51
  from langchain.chains import ConversationalRetrievalChain
52
  from langchain.chat_models import ChatOpenAI
53
 
54
  llm = ChatOpenAI(model_name='gpt-3.5-turbo', temperature=1, openai_api_key=openai_api_key)
55
-
56
  # The retriever is created with metadata filter directly in search_kwargs
57
- retriever = vector_store.as_retriever(search_type='similarity', search_kwargs={'k': 3, 'filter': {'source':{'$eq': 'https://mimtsstac.org/sites/default/files/session-documents/Intensifying%20Literacy%20Instruction%20-%20Essential%20Practices%20%28NATIONAL%29.pdf'}}})
58
-
59
  chain= ConversationalRetrievalChain.from_llm(llm, retriever)
60
  result = chain({'question': query, 'chat_history': st.session_state['history']})
61
  # Append to chat history as a dictionary
 
47
  return total_tokens, total_tokens / 1000 * 0.0004
48
 
49
 
50
+ # Define the metadata for filtering
51
+ metadata = {'source': 'https://mimtsstac.org/sites/default/files/session-documents/Intensifying%20Literacy%20Instruction%20-%20Essential%20Practices%20%28NATIONAL%29.pdf'}
52
+
53
+ metadata_filter = {"source": {"$eq": metadata['source']}}
54
+
55
  def ask_with_memory(vector_store, query, chat_history=[]):
56
  from langchain.chains import ConversationalRetrievalChain
57
  from langchain.chat_models import ChatOpenAI
58
 
59
  llm = ChatOpenAI(model_name='gpt-3.5-turbo', temperature=1, openai_api_key=openai_api_key)
 
60
  # The retriever is created with metadata filter directly in search_kwargs
61
+ retriever = vector_store.as_retriever(search_type='similarity', search_kwargs={'k': 3, 'metadata': metadata_filter})
62
+
63
  chain= ConversationalRetrievalChain.from_llm(llm, retriever)
64
  result = chain({'question': query, 'chat_history': st.session_state['history']})
65
  # Append to chat history as a dictionary