amitpuri's picture
Update app.py
7c4cca4
raw
history blame
No virus
4.45 kB
import os
import gradio as gr
import openai
#from dotenv import load_dotenv
#load_dotenv()
llm_api_options = ["OpenAI API","Azure OpenAI API","Google PaLM API", "Llama 2"]
TEST_MESSAGE = "Write an introductory paragraph to explain Generative AI to the reader of this content."
openai_models = ["gpt-4", "gpt-4-0613", "gpt-4-32k", "gpt-4-32k-0613", "gpt-3.5-turbo",
"gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613", "text-davinci-003",
"text-davinci-002", "text-curie-001", "text-babbage-001", "text-ada-001"]
azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
azure_deployment_name = os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME")
def openai_text_completion(prompt: str, model: str):
try:
system_prompt: str = "Explain in detail to help student understand the concept.",
assistant_prompt: str = None,
messages = [
{"role": "user", "content": f"{prompt}"},
{"role": "system", "content": f"{system_prompt}"},
{"role": "assistant", "content": f"{assistant_prompt}"}
]
openai.api_key = os.getenv("OPENAI_API_KEY")
openai.api_version = '2020-11-07'
completion = openai.ChatCompletion.create(
model = model,
messages = messages,
temperature = 0.7
)
response = completion["choices"][0]["message"].content
return "", response
except openai.error.ServiceUnavailableError:
print(f"Exception Name: {type(exception).__name__}")
print(exception)
return f" {optionSelection} test_handler Error - {exception}", ""
def azure_openai_text_completion(prompt: str, model: str):
try:
system_prompt: str = "Explain in detail to help student understand the concept.",
assistant_prompt: str = None,
messages = [
{"role": "user", "content": f"{prompt}"},
{"role": "system", "content": f"{system_prompt}"},
{"role": "assistant", "content": f"{assistant_prompt}"}
]
openai.api_key = os.getenv("AZURE_OPENAI_KEY")
openai.api_type = "azure"
openai.api_version = "2023-05-15"
openai.api_base = f"https://{azure_endpoint}.openai.azure.com"
completion = openai.ChatCompletion.create(
model = model,
engine = azure_deployment_name,
messages = messages,
temperature = 0.7
)
response = completion["choices"][0]["message"].content
return "", response
except openai.error.ServiceUnavailableError:
print(f"Exception Name: {type(exception).__name__}")
print(exception)
return f" {optionSelection} test_handler Error - {exception}", ""
def test_handler(optionSelection, prompt: str = TEST_MESSAGE, model: str ="gpt-4"):
match optionSelection:
case "OpenAI API":
message, response = openai_text_completion(prompt,model)
return message, response
case "Azure OpenAI API":
message, response = azure_openai_text_completion(prompt,model)
return message, response
case "Google PaLM API":
return "", ""
case "Llama 2":
return "", ""
case _:
if optionSelection not in llm_api_options:
return ValueError("Invalid choice!"), ""
with gr.Blocks() as LLMDemoTabbedScreen:
with gr.Tab("Text-to-Text (Text Completion)"):
llm_options = gr.Radio(llm_api_options, label="Select one", info="Which service do you want to use?", value="OpenAI API")
with gr.Tab("Open AI"):
openai_model = gr.Dropdown(openai_models, value="gpt-4", label="Model", info="Select one, for Natural language")
with gr.Row():
with gr.Column():
test_string = gr.Textbox(label="Try String", value=TEST_MESSAGE, lines=2)
test_string_response = gr.Textbox(label="Response")
test_string_output_info = gr.Label(value="Output Info", label="Info")
test_button = gr.Button("Try it")
test_button.click(
fn=test_handler,
inputs=[llm_options, test_string, openai_model],
outputs=[test_string_output_info, test_string_response]
)
if __name__ == "__main__":
LLMDemoTabbedScreen.launch()