LOUIS SANNA
fix(chat): fix context
6570b59
from langchain.chat_models import AzureChatOpenAI, ChatOpenAI
import os
try:
from dotenv import load_dotenv
load_dotenv()
except:
pass
def get_llm(max_tokens=1000, temperature=0.0, verbose=True, streaming=False, **kwargs):
if has_azure_openai_config():
return get_azure_llm(
max_tokens=max_tokens,
temperature=temperature,
verbose=verbose,
streaming=streaming,
**kwargs,
)
return get_open_ai_llm(
max_tokens=max_tokens,
temperature=temperature,
verbose=verbose,
streaming=streaming,
**kwargs,
)
def has_azure_openai_config():
"""
Checks if the necessary environment variables for Azure Blob Storage are set.
Returns True if they are set, False otherwise.
"""
return all(
key in os.environ
for key in [
"AZURE_OPENAI_API_BASE_URL",
"AZURE_OPENAI_API_VERSION",
"AZURE_OPENAI_API_DEPLOYMENT_NAME",
"AZURE_OPENAI_API_KEY",
]
)
def get_open_ai_llm(**kwargs):
return ChatOpenAI(**kwargs)
def get_azure_llm(**kwargs):
llm = AzureChatOpenAI(
openai_api_base=os.environ["AZURE_OPENAI_API_BASE_URL"],
openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],
deployment_name=os.environ["AZURE_OPENAI_API_DEPLOYMENT_NAME"],
openai_api_key=os.environ["AZURE_OPENAI_API_KEY"],
openai_api_type="azure",
**kwargs,
)
return llm