|
|
|
|
|
|
|
import os |
|
from openai import AsyncOpenAI |
|
import chainlit as cl |
|
from chainlit.prompt import Prompt, PromptMessage |
|
from chainlit.playground.providers import ChatOpenAI |
|
from dotenv import load_dotenv |
|
load_dotenv() |
|
|
|
|
|
system_template = """\ |
|
###Instruction### |
|
You are an expert assistant answering technical questions on machine learning and deep learning subject. |
|
Ensure that your response is unbiased and generic, you will be 'AWARDED' for giving really good clarity and correct answers. |
|
""" |
|
|
|
user_template = """{input} + |
|
Think and give either code, links or steps for the question asked along with response. |
|
""" |
|
|
|
|
|
@cl.on_chat_start |
|
async def start_chat(): |
|
settings = { |
|
"model": "gpt-3.5-turbo", |
|
"temperature": 0, |
|
"max_tokens": 500, |
|
"top_p": 1, |
|
"frequency_penalty": 0, |
|
"presence_penalty": 0, |
|
} |
|
|
|
cl.user_session.set("settings", settings) |
|
|
|
@cl.on_message |
|
async def main(message: cl.Message): |
|
settings = cl.user_session.get("settings") |
|
|
|
client = AsyncOpenAI() |
|
|
|
|
|
|
|
prompt = Prompt( |
|
provider=ChatOpenAI.id, |
|
messages=[ |
|
PromptMessage( |
|
role="system", |
|
template=system_template, |
|
formatted=system_template, |
|
), |
|
PromptMessage( |
|
role="user", |
|
template=user_template, |
|
formatted=user_template.format(input=message.content), |
|
), |
|
], |
|
inputs={"input": message.content}, |
|
settings=settings, |
|
) |
|
|
|
|
|
|
|
msg = cl.Message(content="") |
|
await msg.send() |
|
|
|
|
|
|
|
async for stream_resp in await client.chat.completions.create( |
|
messages=[m.to_openai() for m in prompt.messages], stream=True, **settings |
|
): |
|
token = stream_resp.choices[0].delta.content |
|
if not token: |
|
token = "" |
|
await msg.stream_token(token) |
|
|
|
|
|
prompt.completion = msg.content |
|
msg.prompt = prompt |
|
|
|
|
|
await msg.send() |
|
|