#=============
# gedit /etc/environment
# OPENAI_API_TYPE="azure"
# OPENAI_API_VERSION="2023-06-01-preview"
# OPENAI_API_BASE="https://ufgtb24.openai.azure.com/"
# OPENAI_API_KEY="eea8c45f610d48149ef8cf35593d5f45"
# re log in system
#=============
# no need for proxy

#===== openai method
import os

import openai
openai.api_type = "azure"
openai.api_version = "2023-05-15"
# openai.api_version = "2023-06-13"
openai.api_base = os.getenv("OPENAI_API_BASE")  # Your Azure OpenAI resource's endpoint value.
openai.api_key = os.getenv("OPENAI_API_KEY")
print(openai.api_version)

response = openai.ChatCompletion.create(
    deployment_id='gpt35',
    messages=[
        {"role": "user", "content": "What would be a good company name for a company that makes colorful socks?"},
    ],
    temperature=0.9,
    n=1,
    stream=True
)
chat=[]
for chunk in response:
    delta = chunk["choices"][0]["delta"]  # type: ignore
    msg = delta.get("content", "")
    # print(msg, end="")
    chat.append(msg)


full_reply_content = ''.join([m for m in chat])
print(full_reply_content)

# # ====== langchain method
# from langchain.llms import AzureOpenAI
# llm = AzureOpenAI(
#     engine='gpt35',
#     model_name="gpt-4",
#     # model_name="gpt-3.5-turbo",
#     temperature=0.7,
#     max_tokens=100,
# )
# t1=llm("What would be a good company name for a company that makes colorful socks?")
# print(t1)