ryanrwatkins commited on
Commit
53b5315
1 Parent(s): c61a6f8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -10
app.py CHANGED
@@ -842,30 +842,50 @@ questions = ["what does DTC stands for?",
842
 
843
  # Instantiate the retriever and the ConversationalRetrievalChain :
844
 
 
845
  retriever_HF = retrieval_blocks(
846
  create_vectorstore=False,
847
- LLM_service="Huggingface",
848
  vectorstore_name="Vit_All_HF_Embeddings",
849
  retriever_type="Cohere_reranker",
850
- base_retriever_search_type="similarity", base_retriever_k=12,
851
- compression_retriever_k=16,
852
  cohere_api_key=cohere_api_key,cohere_top_n=10,
853
  )
854
 
855
- chain_gemini,memory_gemini = ConversationalRetrievalChain(
856
  llm = instantiate_LLM(
857
- LLM_provider="Google",api_key=google_api_key,temperature=0.5,model_name="gemini-pro"
 
858
  ),
859
  condense_question_llm = instantiate_LLM(
860
- LLM_provider="Google",api_key=google_api_key,temperature=0.1,model_name="gemini-pro"),
861
- retriever=base_retriever_HF,
 
 
862
  language="english",
863
- llm_provider="Google",
864
- model_name="gemini-pro"
865
  )
866
 
867
 
868
- memory_gemini.clear()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
869
 
870
  for i,question in enumerate(questions):
871
  response = chain_gemini.invoke({"question":question})
 
842
 
843
  # Instantiate the retriever and the ConversationalRetrievalChain :
844
 
845
+
846
  retriever_HF = retrieval_blocks(
847
  create_vectorstore=False,
848
+ LLM_service="HuggingFace",
849
  vectorstore_name="Vit_All_HF_Embeddings",
850
  retriever_type="Cohere_reranker",
851
+ base_retriever_search_type="similarity", base_retriever_k=16,
852
+ compression_retriever_k=20,
853
  cohere_api_key=cohere_api_key,cohere_top_n=10,
854
  )
855
 
856
+ chain_HF,memory_HF = custom_ConversationalRetrievalChain(
857
  llm = instantiate_LLM(
858
+ LLM_provider="HuggingFace",api_key=HF_key,temperature=0.5,
859
+ model_name="mistralai/Mistral-7B-Instruct-v0.2"
860
  ),
861
  condense_question_llm = instantiate_LLM(
862
+ LLM_provider="HuggingFace",api_key=HF_key,temperature=0.5,
863
+ model_name="mistralai/Mistral-7B-Instruct-v0.2"
864
+ ),
865
+ retriever=retriever_HF,
866
  language="english",
867
+ llm_provider="HuggingFace",
868
+ model_name="Mistral-7B-Instruct-v0.2"
869
  )
870
 
871
 
872
+ memory_HF.clear()
873
+
874
+ response = chain_HF.invoke({"question":questions[0]})
875
+
876
+ answer = response['answer']
877
+ answer = answer[answer.find("\nAnswer: ")+len("\nAnswer: "):]
878
+
879
+ gr.markdown("**Question:** "+questions[0]+"\n\n"\
880
+ +"**Standalone_question:**"+response['standalone_question']+"\n\n"\
881
+ +"**Answer:** "+answer)
882
+
883
+
884
+
885
+
886
+
887
+
888
+
889
 
890
  for i,question in enumerate(questions):
891
  response = chain_gemini.invoke({"question":question})