ryanrwatkins commited on
Commit
98fd77d
1 Parent(s): 7a61c37

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -549,7 +549,7 @@ and has {vector_store._collection.count()} chunks.")
549
  # Can use any of these LLMs for responses, for now I am Gemini-Pro for the bot (this is for responses now, not embeddings)
550
 
551
 
552
- def instantiate_LLM(LLM_provider,api_key,temperature=0.7,top_p=0.95,model_name=None):
553
  """Instantiate LLM in Langchain.
554
  Parameters:
555
  LLM_provider (str): the LLM provider; in ["OpenAI","Google","HuggingFace"]
@@ -688,13 +688,13 @@ chain = ConversationalRetrievalChain.from_llm(
688
  template=standalone_question_template),
689
  combine_docs_chain_kwargs={'prompt': ChatPromptTemplate.from_template(answer_template())},
690
  condense_question_llm=instantiate_LLM(
691
- LLM_provider="Google",api_key=google_api_key,temperature=0.1,
692
  model_name="gemini-pro"),
693
  memory=create_memory("gemini-pro"),
694
  retriever = compression_retriever_HF,
695
  #retriever = base_retriever_HF, #base_retriever_HF
696
  llm=instantiate_LLM(
697
- LLM_provider="Google",api_key=google_api_key,temperature=0.7,
698
  model_name="gemini-pro"),
699
  chain_type= "stuff",
700
  verbose= True,
 
549
  # Can use any of these LLMs for responses, for now I am Gemini-Pro for the bot (this is for responses now, not embeddings)
550
 
551
 
552
+ def instantiate_LLM(LLM_provider,api_key,temperature=0.8,top_p=0.95,model_name=None):
553
  """Instantiate LLM in Langchain.
554
  Parameters:
555
  LLM_provider (str): the LLM provider; in ["OpenAI","Google","HuggingFace"]
 
688
  template=standalone_question_template),
689
  combine_docs_chain_kwargs={'prompt': ChatPromptTemplate.from_template(answer_template())},
690
  condense_question_llm=instantiate_LLM(
691
+ LLM_provider="Google",api_key=google_api_key,temperature=0.3,
692
  model_name="gemini-pro"),
693
  memory=create_memory("gemini-pro"),
694
  retriever = compression_retriever_HF,
695
  #retriever = base_retriever_HF, #base_retriever_HF
696
  llm=instantiate_LLM(
697
+ LLM_provider="Google",api_key=google_api_key,temperature=0.8,
698
  model_name="gemini-pro"),
699
  chain_type= "stuff",
700
  verbose= True,