fsal commited on
Commit
60ad63d
1 Parent(s): ef29135

standardize llm in retriever

Browse files
langchain-streamlit-demo/app.py CHANGED
@@ -148,6 +148,7 @@ def get_texts_and_retriever_cacheable_wrapper(
148
  chunk_size=chunk_size,
149
  chunk_overlap=chunk_overlap,
150
  k=k,
 
151
  azure_kwargs=azure_kwargs,
152
  use_azure=use_azure,
153
  )
 
148
  chunk_size=chunk_size,
149
  chunk_overlap=chunk_overlap,
150
  k=k,
151
+ model=model,
152
  azure_kwargs=azure_kwargs,
153
  use_azure=use_azure,
154
  )
langchain-streamlit-demo/defaults.py CHANGED
@@ -21,9 +21,7 @@ MODEL_DICT = {
21
 
22
  SUPPORTED_MODELS = list(MODEL_DICT.keys())
23
 
24
- DEFAULT_MODEL = os.environ.get(
25
- "DEFAULT_MODEL", "gpt-3.5-turbo"
26
- ) # "gpt-4-turbo-preview")
27
 
28
  DEFAULT_SYSTEM_PROMPT = os.environ.get(
29
  "DEFAULT_SYSTEM_PROMPT",
 
21
 
22
  SUPPORTED_MODELS = list(MODEL_DICT.keys())
23
 
24
+ DEFAULT_MODEL = os.environ.get("DEFAULT_MODEL", "gpt-4-turbo-preview")
 
 
25
 
26
  DEFAULT_SYSTEM_PROMPT = os.environ.get(
27
  "DEFAULT_SYSTEM_PROMPT",
langchain-streamlit-demo/llm_resources.py CHANGED
@@ -5,6 +5,7 @@ from typing import Dict, List, Optional, Tuple
5
  from defaults import (
6
  DEFAULT_CHUNK_OVERLAP,
7
  DEFAULT_CHUNK_SIZE,
 
8
  DEFAULT_RETRIEVER_K,
9
  DEFAULT_SYSTEM_PROMPT,
10
  )
@@ -243,6 +244,7 @@ def get_texts_and_multiretriever(
243
  chunk_size: int = DEFAULT_CHUNK_SIZE,
244
  chunk_overlap: int = DEFAULT_CHUNK_OVERLAP,
245
  k: int = DEFAULT_RETRIEVER_K,
 
246
  azure_kwargs: Optional[Dict[str, str]] = None,
247
  use_azure: bool = False,
248
  ) -> Tuple[List[Document], BaseRetriever]:
@@ -301,7 +303,7 @@ def get_texts_and_multiretriever(
301
  multiquerystore = FAISS.from_documents(multiquery_texts, embeddings)
302
  multiquery_retriever = MultiQueryRetriever.from_llm(
303
  retriever=multiquerystore.as_retriever(search_kwargs={"k": k}),
304
- llm=ChatOpenAI(),
305
  )
306
 
307
  ensemble_retriever = EnsembleRetriever(
 
5
  from defaults import (
6
  DEFAULT_CHUNK_OVERLAP,
7
  DEFAULT_CHUNK_SIZE,
8
+ DEFAULT_MODEL,
9
  DEFAULT_RETRIEVER_K,
10
  DEFAULT_SYSTEM_PROMPT,
11
  )
 
244
  chunk_size: int = DEFAULT_CHUNK_SIZE,
245
  chunk_overlap: int = DEFAULT_CHUNK_OVERLAP,
246
  k: int = DEFAULT_RETRIEVER_K,
247
+ model: str = DEFAULT_MODEL,
248
  azure_kwargs: Optional[Dict[str, str]] = None,
249
  use_azure: bool = False,
250
  ) -> Tuple[List[Document], BaseRetriever]:
 
303
  multiquerystore = FAISS.from_documents(multiquery_texts, embeddings)
304
  multiquery_retriever = MultiQueryRetriever.from_llm(
305
  retriever=multiquerystore.as_retriever(search_kwargs={"k": k}),
306
+ llm=ChatOpenAI(model=model, temperature=0.0),
307
  )
308
 
309
  ensemble_retriever = EnsembleRetriever(