TheoLvs's picture
Updated app 1.2.0
f0fc5f8
raw
history blame
720 Bytes
from langchain.chat_models import AzureChatOpenAI
# LOAD ENVIRONMENT VARIABLES
from dotenv import load_dotenv
import os
load_dotenv()
def get_llm(max_tokens = 1024,temperature = 0.0,verbose = True,streaming = False, **kwargs):
llm = AzureChatOpenAI(
openai_api_base=os.environ["AZURE_OPENAI_API_BASE_URL"],
openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],
deployment_name=os.environ["AZURE_OPENAI_API_DEPLOYMENT_NAME"],
openai_api_key=os.environ["AZURE_OPENAI_API_KEY"],
openai_api_type = "azure",
max_tokens = max_tokens,
temperature = temperature,
verbose = verbose,
streaming = streaming,
**kwargs,
)
return llm