|
import requests |
|
import os |
|
from openai import OpenAI |
|
from fastapi import FastAPI, Body |
|
import gradio as gr |
|
|
|
|
|
CUSTOM_PATH = "/" |
|
client = OpenAI(api_key=os.environ['OPEN_API_KEY']) |
|
|
|
response = client.completions.create( |
|
model="gpt-3.5-turbo-instruct", |
|
prompt="Write a tagline for an ice cream shop." |
|
) |
|
|
|
|
|
app = FastAPI() |
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/chat") |
|
async def chat(body: dict = Body(...)): |
|
|
|
if(body["context"] == "Grammar Checker"): |
|
content = "You are a grammar expert. Grammar check and suggest with the corrected paragraph. Also calculate a confidence score for the generated content considering the punctuations, correctness, clarity, enganement and delivery" |
|
elif(body["context"] == "Gift Recommender"): |
|
content = "You are a Gift Recommender. Suggest Gifts based on the age and gender." |
|
else: |
|
content = "You are a helpful assistant." |
|
|
|
response = client.chat.completions.create( |
|
model="gpt-3.5-turbo", |
|
messages=[ |
|
{"role": "system", "content": content+" The output should be in more natural tone."}, |
|
{"role": "user", "content": body['prompt']} |
|
] |
|
) |
|
return response |
|
|
|
|
|
def converse(inp, history, context): |
|
response = requests.post("http://localhost:8000/chat", json={"prompt": inp, "context": context}) |
|
return response.json()['choices'][0]['message']['content'] |
|
|
|
|
|
|
|
title = "Welcome to the Chatbot Interface!" |
|
description = "Feel free to start a conversation with the chatbot." |
|
|
|
dropdown = gr.Dropdown( |
|
["Assistant", "Grammar Checker", "Gift Recommender"], label="Context", value="Assistant" |
|
) |
|
|
|
|
|
chatapp = gr.ChatInterface( |
|
fn=converse, |
|
title=title, |
|
description=description, |
|
additional_inputs=[dropdown] |
|
) |
|
|
|
|
|
app = gr.mount_gradio_app(app, chatapp, path=CUSTOM_PATH) |
|
|
|
|
|
|