fsal commited on
Commit
9986d52
1 Parent(s): f44052b

fix license and remove azure

Browse files
.env-example DELETED
@@ -1,22 +0,0 @@
1
- APP_PORT=7860
2
-
3
- LANGCHAIN_ENDPOINT="https://api.smith.langchain.com"
4
- LANGCHAIN_API_KEY="ls__..."
5
- LANGCHAIN_TRACING_V2="true"
6
- LANGCHAIN_PROJECT="streamlit_test"
7
-
8
- ANYSCALE_API_KEY="secret_..."
9
- OPENAI_API_KEY="sk-..."
10
- ANTHROPIC_API_KEY="sk-ant-..."
11
-
12
- DEFAULT_SYSTEM_PROMPT="You are a helpful chatbot."
13
-
14
- DEFAULT_MODEL="gpt-3.5-turbo"
15
-
16
- DEFAULT_TEMPERATURE=0.7
17
- MIN_TEMPERATURE=0.0
18
- MAX_TEMPERATURE=1.0
19
-
20
- DEFAULT_MAX_TOKENS=1000
21
- MIN_MAX_TOKENS=1
22
- MAX_MAX_TOKENS=100000
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
LICENSE CHANGED
@@ -1,6 +1,6 @@
1
  MIT License
2
 
3
- Copyright (c) 2023 Federico Sallemi
4
 
5
  Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6
 
 
1
  MIT License
2
 
3
+ Copyright (c) 2023 Joshua Sundance Bailey
4
 
5
  Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6
 
langchain-streamlit-demo/app.py CHANGED
@@ -1,13 +1,13 @@
1
  from datetime import datetime
2
- from typing import Tuple, List, Dict, Any, Union, Optional
3
 
4
  import anthropic
5
  import langsmith.utils
6
  import openai
7
  import streamlit as st
 
8
  from langchain.agents import load_tools
9
  from langchain.agents.tools import tool
10
- from langchain.callbacks import StreamlitCallbackHandler
11
  from langchain.callbacks.base import BaseCallbackHandler
12
  from langchain.callbacks.manager import Callbacks
13
  from langchain.callbacks.tracers.langchain import LangChainTracer, wait_for_all_tracers
@@ -16,12 +16,10 @@ from langchain.memory import ConversationBufferMemory, StreamlitChatMessageHisto
16
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
17
  from langchain.schema.document import Document
18
  from langchain.schema.retriever import BaseRetriever
19
- from langchain.tools import DuckDuckGoSearchRun, WikipediaQueryRun
20
- from langchain.utilities import WikipediaAPIWrapper
 
21
  from langsmith.client import Client
22
- from streamlit_feedback import streamlit_feedback
23
-
24
- from defaults import default_values
25
  from llm_resources import (
26
  get_agent,
27
  get_doc_agent,
@@ -31,6 +29,7 @@ from llm_resources import (
31
  )
32
  from python_coder import get_agent as get_python_agent
33
  from research_assistant.chain import get_chain as get_research_assistant_chain
 
34
 
35
  __version__ = "2.1.4"
36
 
@@ -99,41 +98,41 @@ def azure_state_or_default(*args):
99
  )
100
 
101
 
102
- azure_state_or_default(
103
- "AZURE_OPENAI_BASE_URL",
104
- "AZURE_OPENAI_API_VERSION",
105
- "AZURE_OPENAI_DEPLOYMENT_NAME",
106
- "AZURE_OPENAI_EMB_DEPLOYMENT_NAME",
107
- "AZURE_OPENAI_API_KEY",
108
- "AZURE_OPENAI_MODEL_VERSION",
109
- )
110
-
111
- st.session_state.AZURE_AVAILABLE = all(
112
- [
113
- st.session_state.AZURE_OPENAI_BASE_URL,
114
- st.session_state.AZURE_OPENAI_API_VERSION,
115
- st.session_state.AZURE_OPENAI_DEPLOYMENT_NAME,
116
- st.session_state.AZURE_OPENAI_API_KEY,
117
- st.session_state.AZURE_OPENAI_MODEL_VERSION,
118
- ],
119
- )
120
-
121
- st.session_state.AZURE_EMB_AVAILABLE = (
122
- st.session_state.AZURE_AVAILABLE
123
- and st.session_state.AZURE_OPENAI_EMB_DEPLOYMENT_NAME
124
- )
125
-
126
- AZURE_KWARGS = (
127
- None
128
- if not st.session_state.AZURE_EMB_AVAILABLE
129
- else {
130
- "openai_api_base": st.session_state.AZURE_OPENAI_BASE_URL,
131
- "openai_api_version": st.session_state.AZURE_OPENAI_API_VERSION,
132
- "deployment": st.session_state.AZURE_OPENAI_EMB_DEPLOYMENT_NAME,
133
- "openai_api_key": st.session_state.AZURE_OPENAI_API_KEY,
134
- "openai_api_type": "azure",
135
- }
136
- )
137
 
138
 
139
  @st.cache_resource
@@ -209,7 +208,7 @@ with sidebar:
209
  help="How many document chunks will be used for context?",
210
  value=default_values.DEFAULT_RETRIEVER_K,
211
  min_value=1,
212
- max_value=10,
213
  )
214
 
215
  chunk_size = st.slider(
@@ -255,14 +254,14 @@ with sidebar:
255
  index=0,
256
  help=chain_type_help,
257
  )
258
- use_azure = st.toggle(
259
- label="Use Azure OpenAI",
260
- value=st.session_state.AZURE_EMB_AVAILABLE,
261
- help="Use Azure for embeddings instead of using OpenAI directly.",
262
- )
263
 
264
  if uploaded_file:
265
- if st.session_state.AZURE_EMB_AVAILABLE or openai_api_key:
266
  (
267
  st.session_state.texts,
268
  st.session_state.retriever,
@@ -272,8 +271,8 @@ with sidebar:
272
  chunk_size=chunk_size,
273
  chunk_overlap=chunk_overlap,
274
  k=k,
275
- azure_kwargs=AZURE_KWARGS,
276
- use_azure=use_azure,
277
  )
278
  else:
279
  st.error("Please enter a valid OpenAI API key.", icon="❌")
@@ -313,11 +312,15 @@ with sidebar:
313
 
314
  # --- LangSmith Options ---
315
  if default_values.SHOW_LANGSMITH_OPTIONS:
316
- with st.expander("LangSmith Options", expanded=False):
 
 
 
317
  st.session_state.LANGSMITH_API_KEY = st.text_input(
318
  "LangSmith API Key (optional)",
319
  value=st.session_state.LANGSMITH_API_KEY,
320
  type="password",
 
321
  )
322
 
323
  st.session_state.LANGSMITH_PROJECT = st.text_input(
@@ -336,38 +339,38 @@ with sidebar:
336
  )
337
 
338
  # --- Azure Options ---
339
- if default_values.SHOW_AZURE_OPTIONS:
340
- with st.expander("Azure Options", expanded=False):
341
- st.session_state.AZURE_OPENAI_BASE_URL = st.text_input(
342
- "AZURE_OPENAI_BASE_URL",
343
- value=st.session_state.AZURE_OPENAI_BASE_URL,
344
- )
345
-
346
- st.session_state.AZURE_OPENAI_API_VERSION = st.text_input(
347
- "AZURE_OPENAI_API_VERSION",
348
- value=st.session_state.AZURE_OPENAI_API_VERSION,
349
- )
350
-
351
- st.session_state.AZURE_OPENAI_DEPLOYMENT_NAME = st.text_input(
352
- "AZURE_OPENAI_DEPLOYMENT_NAME",
353
- value=st.session_state.AZURE_OPENAI_DEPLOYMENT_NAME,
354
- )
355
-
356
- st.session_state.AZURE_OPENAI_EMB_DEPLOYMENT_NAME = st.text_input(
357
- "AZURE_OPENAI_EMB_DEPLOYMENT_NAME",
358
- value=st.session_state.AZURE_OPENAI_EMB_DEPLOYMENT_NAME,
359
- )
360
-
361
- st.session_state.AZURE_OPENAI_API_KEY = st.text_input(
362
- "AZURE_OPENAI_API_KEY",
363
- value=st.session_state.AZURE_OPENAI_API_KEY,
364
- type="password",
365
- )
366
-
367
- st.session_state.AZURE_OPENAI_MODEL_VERSION = st.text_input(
368
- "AZURE_OPENAI_MODEL_VERSION",
369
- value=st.session_state.AZURE_OPENAI_MODEL_VERSION,
370
- )
371
 
372
 
373
  # --- LLM Instantiation ---
@@ -378,13 +381,13 @@ get_llm_args = dict(
378
  temperature=temperature,
379
  max_tokens=max_tokens,
380
  azure_available=st.session_state.AZURE_AVAILABLE,
381
- azure_dict={
382
- "AZURE_OPENAI_BASE_URL": st.session_state.AZURE_OPENAI_BASE_URL,
383
- "AZURE_OPENAI_API_VERSION": st.session_state.AZURE_OPENAI_API_VERSION,
384
- "AZURE_OPENAI_DEPLOYMENT_NAME": st.session_state.AZURE_OPENAI_DEPLOYMENT_NAME,
385
- "AZURE_OPENAI_API_KEY": st.session_state.AZURE_OPENAI_API_KEY,
386
- "AZURE_OPENAI_MODEL_VERSION": st.session_state.AZURE_OPENAI_MODEL_VERSION,
387
- },
388
  )
389
  get_llm_args_temp_zero = get_llm_args | {"temperature": 0.0}
390
  st.session_state.llm = get_llm(**get_llm_args)
 
1
  from datetime import datetime
2
+ from typing import Any, Dict, List, Optional, Tuple, Union
3
 
4
  import anthropic
5
  import langsmith.utils
6
  import openai
7
  import streamlit as st
8
+ from defaults import default_values
9
  from langchain.agents import load_tools
10
  from langchain.agents.tools import tool
 
11
  from langchain.callbacks.base import BaseCallbackHandler
12
  from langchain.callbacks.manager import Callbacks
13
  from langchain.callbacks.tracers.langchain import LangChainTracer, wait_for_all_tracers
 
16
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
17
  from langchain.schema.document import Document
18
  from langchain.schema.retriever import BaseRetriever
19
+ from langchain_community.callbacks import StreamlitCallbackHandler
20
+ from langchain_community.tools import DuckDuckGoSearchRun, WikipediaQueryRun
21
+ from langchain_community.utilities import WikipediaAPIWrapper
22
  from langsmith.client import Client
 
 
 
23
  from llm_resources import (
24
  get_agent,
25
  get_doc_agent,
 
29
  )
30
  from python_coder import get_agent as get_python_agent
31
  from research_assistant.chain import get_chain as get_research_assistant_chain
32
+ from streamlit_feedback import streamlit_feedback
33
 
34
  __version__ = "2.1.4"
35
 
 
98
  )
99
 
100
 
101
+ # azure_state_or_default(
102
+ # "AZURE_OPENAI_BASE_URL",
103
+ # "AZURE_OPENAI_API_VERSION",
104
+ # "AZURE_OPENAI_DEPLOYMENT_NAME",
105
+ # "AZURE_OPENAI_EMB_DEPLOYMENT_NAME",
106
+ # "AZURE_OPENAI_API_KEY",
107
+ # "AZURE_OPENAI_MODEL_VERSION",
108
+ # )
109
+
110
+ # st.session_state.AZURE_AVAILABLE = all(
111
+ # [
112
+ # st.session_state.AZURE_OPENAI_BASE_URL,
113
+ # st.session_state.AZURE_OPENAI_API_VERSION,
114
+ # st.session_state.AZURE_OPENAI_DEPLOYMENT_NAME,
115
+ # st.session_state.AZURE_OPENAI_API_KEY,
116
+ # st.session_state.AZURE_OPENAI_MODEL_VERSION,
117
+ # ],
118
+ # )
119
+
120
+ # st.session_state.AZURE_EMB_AVAILABLE = (
121
+ # st.session_state.AZURE_AVAILABLE
122
+ # and st.session_state.AZURE_OPENAI_EMB_DEPLOYMENT_NAME
123
+ # )
124
+
125
+ # AZURE_KWARGS = (
126
+ # None
127
+ # if not st.session_state.AZURE_EMB_AVAILABLE
128
+ # else {
129
+ # "openai_api_base": st.session_state.AZURE_OPENAI_BASE_URL,
130
+ # "openai_api_version": st.session_state.AZURE_OPENAI_API_VERSION,
131
+ # "deployment": st.session_state.AZURE_OPENAI_EMB_DEPLOYMENT_NAME,
132
+ # "openai_api_key": st.session_state.AZURE_OPENAI_API_KEY,
133
+ # "openai_api_type": "azure",
134
+ # }
135
+ # )
136
 
137
 
138
  @st.cache_resource
 
208
  help="How many document chunks will be used for context?",
209
  value=default_values.DEFAULT_RETRIEVER_K,
210
  min_value=1,
211
+ max_value=1000,
212
  )
213
 
214
  chunk_size = st.slider(
 
254
  index=0,
255
  help=chain_type_help,
256
  )
257
+ # use_azure = st.toggle(
258
+ # label="Use Azure OpenAI",
259
+ # value=st.session_state.AZURE_EMB_AVAILABLE,
260
+ # help="Use Azure for embeddings instead of using OpenAI directly.",
261
+ # )
262
 
263
  if uploaded_file:
264
+ if openai_api_key:
265
  (
266
  st.session_state.texts,
267
  st.session_state.retriever,
 
271
  chunk_size=chunk_size,
272
  chunk_overlap=chunk_overlap,
273
  k=k,
274
+ # azure_kwargs=AZURE_KWARGS,
275
+ use_azure=False,
276
  )
277
  else:
278
  st.error("Please enter a valid OpenAI API key.", icon="❌")
 
312
 
313
  # --- LangSmith Options ---
314
  if default_values.SHOW_LANGSMITH_OPTIONS:
315
+ with st.expander(
316
+ "LangSmith Options",
317
+ expanded=False,
318
+ ):
319
  st.session_state.LANGSMITH_API_KEY = st.text_input(
320
  "LangSmith API Key (optional)",
321
  value=st.session_state.LANGSMITH_API_KEY,
322
  type="password",
323
+ disabled=True,
324
  )
325
 
326
  st.session_state.LANGSMITH_PROJECT = st.text_input(
 
339
  )
340
 
341
  # --- Azure Options ---
342
+ # if default_values.SHOW_AZURE_OPTIONS:
343
+ # with st.expander("Azure Options", expanded=False):
344
+ # st.session_state.AZURE_OPENAI_BASE_URL = st.text_input(
345
+ # "AZURE_OPENAI_BASE_URL",
346
+ # value=st.session_state.AZURE_OPENAI_BASE_URL,
347
+ # )
348
+
349
+ # st.session_state.AZURE_OPENAI_API_VERSION = st.text_input(
350
+ # "AZURE_OPENAI_API_VERSION",
351
+ # value=st.session_state.AZURE_OPENAI_API_VERSION,
352
+ # )
353
+
354
+ # st.session_state.AZURE_OPENAI_DEPLOYMENT_NAME = st.text_input(
355
+ # "AZURE_OPENAI_DEPLOYMENT_NAME",
356
+ # value=st.session_state.AZURE_OPENAI_DEPLOYMENT_NAME,
357
+ # )
358
+
359
+ # st.session_state.AZURE_OPENAI_EMB_DEPLOYMENT_NAME = st.text_input(
360
+ # "AZURE_OPENAI_EMB_DEPLOYMENT_NAME",
361
+ # value=st.session_state.AZURE_OPENAI_EMB_DEPLOYMENT_NAME,
362
+ # )
363
+
364
+ # st.session_state.AZURE_OPENAI_API_KEY = st.text_input(
365
+ # "AZURE_OPENAI_API_KEY",
366
+ # value=st.session_state.AZURE_OPENAI_API_KEY,
367
+ # type="password",
368
+ # )
369
+
370
+ # st.session_state.AZURE_OPENAI_MODEL_VERSION = st.text_input(
371
+ # "AZURE_OPENAI_MODEL_VERSION",
372
+ # value=st.session_state.AZURE_OPENAI_MODEL_VERSION,
373
+ # )
374
 
375
 
376
  # --- LLM Instantiation ---
 
381
  temperature=temperature,
382
  max_tokens=max_tokens,
383
  azure_available=st.session_state.AZURE_AVAILABLE,
384
+ # azure_dict={
385
+ # "AZURE_OPENAI_BASE_URL": st.session_state.AZURE_OPENAI_BASE_URL,
386
+ # "AZURE_OPENAI_API_VERSION": st.session_state.AZURE_OPENAI_API_VERSION,
387
+ # "AZURE_OPENAI_DEPLOYMENT_NAME": st.session_state.AZURE_OPENAI_DEPLOYMENT_NAME,
388
+ # "AZURE_OPENAI_API_KEY": st.session_state.AZURE_OPENAI_API_KEY,
389
+ # "AZURE_OPENAI_MODEL_VERSION": st.session_state.AZURE_OPENAI_MODEL_VERSION,
390
+ # },
391
  )
392
  get_llm_args_temp_zero = get_llm_args | {"temperature": 0.0}
393
  st.session_state.llm = get_llm(**get_llm_args)
langchain-streamlit-demo/defaults.py CHANGED
@@ -1,10 +1,10 @@
1
  import os
2
  from collections import namedtuple
3
 
4
-
5
  MODEL_DICT = {
6
  "gpt-3.5-turbo": "OpenAI",
7
  "gpt-4": "OpenAI",
 
8
  "gpt-4-1106-preview": "OpenAI",
9
  "claude-instant-1": "Anthropic",
10
  "claude-instant-1.2": "Anthropic",
@@ -21,7 +21,7 @@ MODEL_DICT = {
21
 
22
  SUPPORTED_MODELS = list(MODEL_DICT.keys())
23
 
24
- DEFAULT_MODEL = os.environ.get("DEFAULT_MODEL", "gpt-3.5-turbo")
25
 
26
  DEFAULT_SYSTEM_PROMPT = os.environ.get(
27
  "DEFAULT_SYSTEM_PROMPT",
@@ -38,16 +38,16 @@ DEFAULT_MAX_TOKENS = int(os.environ.get("DEFAULT_MAX_TOKENS", 1000))
38
 
39
  DEFAULT_LANGSMITH_PROJECT = os.environ.get("LANGCHAIN_PROJECT")
40
 
41
- AZURE_VARS = [
42
- "AZURE_OPENAI_BASE_URL",
43
- "AZURE_OPENAI_API_VERSION",
44
- "AZURE_OPENAI_DEPLOYMENT_NAME",
45
- "AZURE_OPENAI_EMB_DEPLOYMENT_NAME",
46
- "AZURE_OPENAI_API_KEY",
47
- "AZURE_OPENAI_MODEL_VERSION",
48
- ]
49
 
50
- AZURE_DICT = {v: os.environ.get(v, "") for v in AZURE_VARS}
51
 
52
 
53
  SHOW_LANGSMITH_OPTIONS = (
@@ -89,8 +89,8 @@ DEFAULT_VALUES = namedtuple(
89
  "MAX_MAX_TOKENS",
90
  "DEFAULT_MAX_TOKENS",
91
  "DEFAULT_LANGSMITH_PROJECT",
92
- "AZURE_VARS",
93
- "AZURE_DICT",
94
  "PROVIDER_KEY_DICT",
95
  "OPENAI_API_KEY",
96
  "MIN_CHUNK_SIZE",
@@ -118,8 +118,8 @@ default_values = DEFAULT_VALUES(
118
  MAX_MAX_TOKENS,
119
  DEFAULT_MAX_TOKENS,
120
  DEFAULT_LANGSMITH_PROJECT,
121
- AZURE_VARS,
122
- AZURE_DICT,
123
  PROVIDER_KEY_DICT,
124
  OPENAI_API_KEY,
125
  MIN_CHUNK_SIZE,
 
1
  import os
2
  from collections import namedtuple
3
 
 
4
  MODEL_DICT = {
5
  "gpt-3.5-turbo": "OpenAI",
6
  "gpt-4": "OpenAI",
7
+ "gpt-4-turbo-preview": "OpenAI",
8
  "gpt-4-1106-preview": "OpenAI",
9
  "claude-instant-1": "Anthropic",
10
  "claude-instant-1.2": "Anthropic",
 
21
 
22
  SUPPORTED_MODELS = list(MODEL_DICT.keys())
23
 
24
+ DEFAULT_MODEL = os.environ.get("DEFAULT_MODEL", "gpt-4-turbo-preview")
25
 
26
  DEFAULT_SYSTEM_PROMPT = os.environ.get(
27
  "DEFAULT_SYSTEM_PROMPT",
 
38
 
39
  DEFAULT_LANGSMITH_PROJECT = os.environ.get("LANGCHAIN_PROJECT")
40
 
41
+ # AZURE_VARS = [
42
+ # "AZURE_OPENAI_BASE_URL",
43
+ # "AZURE_OPENAI_API_VERSION",
44
+ # "AZURE_OPENAI_DEPLOYMENT_NAME",
45
+ # "AZURE_OPENAI_EMB_DEPLOYMENT_NAME",
46
+ # "AZURE_OPENAI_API_KEY",
47
+ # "AZURE_OPENAI_MODEL_VERSION",
48
+ # ]
49
 
50
+ # AZURE_DICT = {v: os.environ.get(v, "") for v in AZURE_VARS}
51
 
52
 
53
  SHOW_LANGSMITH_OPTIONS = (
 
89
  "MAX_MAX_TOKENS",
90
  "DEFAULT_MAX_TOKENS",
91
  "DEFAULT_LANGSMITH_PROJECT",
92
+ # "AZURE_VARS",
93
+ # "AZURE_DICT",
94
  "PROVIDER_KEY_DICT",
95
  "OPENAI_API_KEY",
96
  "MIN_CHUNK_SIZE",
 
118
  MAX_MAX_TOKENS,
119
  DEFAULT_MAX_TOKENS,
120
  DEFAULT_LANGSMITH_PROJECT,
121
+ # AZURE_VARS,
122
+ # AZURE_DICT,
123
  PROVIDER_KEY_DICT,
124
  OPENAI_API_KEY,
125
  MIN_CHUNK_SIZE,
langchain-streamlit-demo/llm_resources.py CHANGED
@@ -1,27 +1,27 @@
1
  import uuid
2
  from tempfile import NamedTemporaryFile
3
- from typing import Tuple, List, Optional, Dict
4
 
 
5
  from langchain.agents import AgentExecutor, AgentType, initialize_agent
6
  from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
7
  from langchain.callbacks.base import BaseCallbackHandler
8
- from langchain.chains import LLMChain
9
- from langchain.chains import RetrievalQA
10
  from langchain.chat_models import (
11
  AzureChatOpenAI,
12
- ChatOpenAI,
13
  ChatAnthropic,
14
  ChatAnyscale,
 
15
  )
16
  from langchain.document_loaders import PyPDFLoader
17
  from langchain.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
18
  from langchain.llms.base import BaseLLM
19
  from langchain.memory import ConversationBufferMemory
20
- from langchain.prompts import MessagesPlaceholder, ChatPromptTemplate
21
  from langchain.retrievers import EnsembleRetriever
22
  from langchain.retrievers.multi_query import MultiQueryRetriever
23
  from langchain.retrievers.multi_vector import MultiVectorRetriever
24
- from langchain.schema import Document, BaseRetriever
25
  from langchain.schema.chat_history import BaseChatMessageHistory
26
  from langchain.schema.runnable import RunnablePassthrough
27
  from langchain.storage import InMemoryStore
@@ -29,8 +29,6 @@ from langchain.text_splitter import RecursiveCharacterTextSplitter
29
  from langchain.tools.base import BaseTool
30
  from langchain.vectorstores import FAISS
31
  from langchain_core.messages import SystemMessage
32
-
33
- from defaults import DEFAULT_CHUNK_SIZE, DEFAULT_CHUNK_OVERLAP, DEFAULT_RETRIEVER_K
34
  from qagen import get_rag_qa_gen_chain
35
  from summarize import get_rag_summarization_chain
36
 
@@ -166,8 +164,8 @@ def get_llm(
166
  provider_api_key: str,
167
  temperature: float,
168
  max_tokens: int,
169
- azure_available: bool,
170
- azure_dict: dict[str, str],
171
  ):
172
  if azure_available and provider == "Azure OpenAI":
173
  return AzureChatOpenAI(
 
1
  import uuid
2
  from tempfile import NamedTemporaryFile
3
+ from typing import Dict, List, Optional, Tuple
4
 
5
+ from defaults import DEFAULT_CHUNK_OVERLAP, DEFAULT_CHUNK_SIZE, DEFAULT_RETRIEVER_K
6
  from langchain.agents import AgentExecutor, AgentType, initialize_agent
7
  from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
8
  from langchain.callbacks.base import BaseCallbackHandler
9
+ from langchain.chains import LLMChain, RetrievalQA
 
10
  from langchain.chat_models import (
11
  AzureChatOpenAI,
 
12
  ChatAnthropic,
13
  ChatAnyscale,
14
+ ChatOpenAI,
15
  )
16
  from langchain.document_loaders import PyPDFLoader
17
  from langchain.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
18
  from langchain.llms.base import BaseLLM
19
  from langchain.memory import ConversationBufferMemory
20
+ from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
21
  from langchain.retrievers import EnsembleRetriever
22
  from langchain.retrievers.multi_query import MultiQueryRetriever
23
  from langchain.retrievers.multi_vector import MultiVectorRetriever
24
+ from langchain.schema import BaseRetriever, Document
25
  from langchain.schema.chat_history import BaseChatMessageHistory
26
  from langchain.schema.runnable import RunnablePassthrough
27
  from langchain.storage import InMemoryStore
 
29
  from langchain.tools.base import BaseTool
30
  from langchain.vectorstores import FAISS
31
  from langchain_core.messages import SystemMessage
 
 
32
  from qagen import get_rag_qa_gen_chain
33
  from summarize import get_rag_summarization_chain
34
 
 
164
  provider_api_key: str,
165
  temperature: float,
166
  max_tokens: int,
167
+ azure_available: bool = False,
168
+ azure_dict: dict[str, str] = {},
169
  ):
170
  if azure_available and provider == "Azure OpenAI":
171
  return AzureChatOpenAI(
requirements.txt CHANGED
@@ -5,8 +5,9 @@ certifi>=2023.7.22 # not directly required, pinned by Snyk to avoid a vulnerabil
5
  duckduckgo-search==4.3.1
6
  faiss-cpu==1.7.4
7
  langchain==0.1.4
 
8
  langsmith==0.0.84
9
- mypy==1.8.0
10
  numexpr==2.9.0
11
  numpy>=1.22.2 # not directly required, pinned by Snyk to avoid a vulnerability
12
  openai==1.10.0
 
5
  duckduckgo-search==4.3.1
6
  faiss-cpu==1.7.4
7
  langchain==0.1.4
8
+ langchain-community==0.0.16
9
  langsmith==0.0.84
10
+ # mypy==1.8.0
11
  numexpr==2.9.0
12
  numpy>=1.22.2 # not directly required, pinned by Snyk to avoid a vulnerability
13
  openai==1.10.0