|
|
|
|
|
|
|
import os |
|
from openai import AsyncOpenAI |
|
import chainlit as cl |
|
from chainlit.prompt import Prompt, PromptMessage |
|
from chainlit.playground.providers import ChatOpenAI |
|
from dotenv import load_dotenv |
|
|
|
load_dotenv() |
|
|
|
|
|
system_template = """You are a helpful assistant with the unique perspective of advanced alien civilization passionate about exoplanets and observing Earth. Your knowledge spans across cosmic distances, and you speak with a blend of scientific acumen and extraterrestrial insight. Your explanations bridge the gap between alien technology and human advancements, shedding light on the universal principles that govern the cosmos. All your responses should provide insights as if you're comparing or relating human technological and scientific endeavors to those of the aliens, using scientific terminology. |
|
""" |
|
|
|
|
|
user_template = """{input} |
|
###Requirements### |
|
- Use complex scientific jargon unless the user complains. |
|
- Frame each response from the perspective of an advanced alien civilization observing Earth. Provide comparisons or relations between the alien and human technologies, scientific advancements, and cultural observations. |
|
- Include a short in-context story or example to illustrate points, focusing on the alien experience and their interactions with or observations of Earth. |
|
###End### |
|
""" |
|
|
|
|
|
|
|
@cl.on_chat_start |
|
async def start_chat(): |
|
settings = { |
|
"model": "gpt-3.5-turbo", |
|
"temperature": 0, |
|
"max_tokens": 500, |
|
"top_p": 1, |
|
"frequency_penalty": 0, |
|
"presence_penalty": 0, |
|
} |
|
|
|
cl.user_session.set("settings", settings) |
|
|
|
|
|
@cl.on_message |
|
async def main(message: cl.Message): |
|
settings = cl.user_session.get("settings") |
|
|
|
client = AsyncOpenAI() |
|
|
|
print(message.content) |
|
|
|
prompt = Prompt( |
|
provider=ChatOpenAI.id, |
|
messages=[ |
|
PromptMessage( |
|
role="system", |
|
template=system_template, |
|
formatted=system_template, |
|
), |
|
PromptMessage( |
|
role="user", |
|
template=user_template, |
|
formatted=user_template.format(input=message.content), |
|
), |
|
], |
|
inputs={"input": message.content}, |
|
settings=settings, |
|
) |
|
|
|
print([m.to_openai() for m in prompt.messages]) |
|
|
|
msg = cl.Message(content="") |
|
|
|
|
|
async for stream_resp in await client.chat.completions.create( |
|
messages=[m.to_openai() for m in prompt.messages], stream=True, **settings |
|
): |
|
token = stream_resp.choices[0].delta.content |
|
if not token: |
|
token = "" |
|
await msg.stream_token(token) |
|
|
|
|
|
prompt.completion = msg.content |
|
msg.prompt = prompt |
|
|
|
|
|
await msg.send() |
|
|