Spaces:
Sleeping
Sleeping
lubomirovna
commited on
Commit
•
7489bfe
1
Parent(s):
4207a6f
Add chat history
Browse files- .chainlit/config.toml +1 -1
- app.py +48 -53
- chainlit.md +1 -1
.chainlit/config.toml
CHANGED
@@ -25,7 +25,7 @@ unsafe_allow_html = false
|
|
25 |
latex = false
|
26 |
|
27 |
# Authorize users to upload files with messages
|
28 |
-
multi_modal =
|
29 |
|
30 |
# Allows user to use speech to text
|
31 |
[features.speech_to_text]
|
|
|
25 |
latex = false
|
26 |
|
27 |
# Authorize users to upload files with messages
|
28 |
+
multi_modal = false
|
29 |
|
30 |
# Allows user to use speech to text
|
31 |
[features.speech_to_text]
|
app.py
CHANGED
@@ -6,84 +6,79 @@ from dotenv import load_dotenv
|
|
6 |
|
7 |
load_dotenv()
|
8 |
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
Thirdlane
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
|
|
|
|
26 |
|
27 |
@cl.on_chat_start
|
28 |
async def start_chat():
|
29 |
settings = {
|
30 |
-
"model":
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
# "presence_penalty": 0,
|
36 |
}
|
37 |
|
|
|
|
|
|
|
|
|
|
|
38 |
cl.user_session.set("settings", settings)
|
|
|
39 |
|
40 |
|
41 |
@cl.on_message # marks a function that should be run each time the chatbot receives a message from a user
|
42 |
async def main(message: cl.Message):
|
43 |
settings = cl.user_session.get("settings")
|
|
|
|
|
44 |
|
45 |
client = AsyncOpenAI()
|
46 |
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
template=system_template,
|
55 |
-
formatted=system_template,
|
56 |
-
),
|
57 |
-
PromptMessage(
|
58 |
-
role="assistant",
|
59 |
-
template=assistant_template,
|
60 |
-
formatted=assistant_template,
|
61 |
-
),
|
62 |
-
PromptMessage(
|
63 |
-
role="user",
|
64 |
-
formatted=message.content,
|
65 |
-
)
|
66 |
-
],
|
67 |
-
inputs={"input": message.content},
|
68 |
-
settings=settings,
|
69 |
-
)
|
70 |
-
|
71 |
-
print([m.to_openai() for m in prompt.messages])
|
72 |
|
73 |
msg = cl.Message(content="")
|
|
|
74 |
|
75 |
# Call OpenAI
|
76 |
async for stream_resp in await client.chat.completions.create(
|
77 |
-
messages=
|
78 |
):
|
79 |
token = stream_resp.choices[0].delta.content
|
80 |
if not token:
|
81 |
token = ""
|
|
|
82 |
await msg.stream_token(token)
|
83 |
|
84 |
-
# Update the prompt object with the completion
|
85 |
-
prompt.completion = msg.content
|
86 |
-
msg.prompt = prompt
|
87 |
-
|
88 |
# Send and close the message stream
|
89 |
await msg.send()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
load_dotenv()
|
8 |
|
9 |
+
CONTEXT = """Third Lane Technologies offers Multi Tenant PBX and other Unified Communications solutions to SMBs, enterprises, public organizations, MSPs and UCaaS providers since 2003.
|
10 |
+
Whether deployed as hosted in public or private cloud or on-premises, Thirdlane supported solutions make advanced business communications simple and affordable.
|
11 |
+
For carriers, hosted telephony and UCaaS providers and MSPs demanding reliable, scalable and highly-customizable virtual PBX for their customers, Thirdlane created Thirdlane Multi Tenant PBX, Thirdlane Scale, and Thirdlane Connect - advanced multi-tenant UCaaS platforms and applications deployed globally since 2006.
|
12 |
+
Thirdlane's solutions are recognized in the industry for being highly customizable and reseller/integrator friendly. Resellers are key to our business, that's why our Thirdlane Business Phone System is delivered exclusively through the worldwide Reseller Partner network.
|
13 |
+
Thirdlane open architecture allows Reseller Partners deploy solutions in the cloud, or on the hardware of their choice, increasing margins, and offering true value-added services for business customers who frequently demand customization or integration beyond what's possible with the other PBXs.
|
14 |
+
Reliability, advanced features, open architecture and market-leading prices made Thirdlane products the clear choice for thousands of customers and partners worldwide.
|
15 |
+
Don't miss an opportunity to get ahead of the competition - contact us today!
|
16 |
+
"""
|
17 |
+
|
18 |
+
INSTRUCTIONS = """You are a helpful representative of Thirdlane company, a unified communications platform for UCaaS providers.
|
19 |
+
Company background: {CONTEXT}.
|
20 |
+
You have a youthful and cheery personality. Keep your responses as concise as possible."""
|
21 |
+
|
22 |
+
MODEL = "gpt-3.5-turbo"
|
23 |
+
TEMPERATURE = 0.5
|
24 |
+
MAX_TOKENS = 500
|
25 |
+
FREQUENCY_PENALTY = 0
|
26 |
+
PRESENCE_PENALTY = 0.6
|
27 |
+
MAX_CONTEXT_QUESTIONS = 10
|
28 |
|
29 |
@cl.on_chat_start
|
30 |
async def start_chat():
|
31 |
settings = {
|
32 |
+
"model": MODEL,
|
33 |
+
"temperature": TEMPERATURE,
|
34 |
+
"max_tokens": MAX_TOKENS,
|
35 |
+
"frequency_penalty": FREQUENCY_PENALTY,
|
36 |
+
"presence_penalty": PRESENCE_PENALTY
|
|
|
37 |
}
|
38 |
|
39 |
+
messages=[
|
40 |
+
{ "role": "system", "content": f"You are a helpful representative of Thirdlane company, a unified communications platform for UCaaS providers. Company background: {CONTEXT}. You have a youthful and cheery personality. Keep your responses as concise as possible." },
|
41 |
+
{ "role": "assistant", "content": "Hello! How can I assist you today?" },
|
42 |
+
]
|
43 |
+
|
44 |
cl.user_session.set("settings", settings)
|
45 |
+
cl.user_session.set("messages", messages)
|
46 |
|
47 |
|
48 |
@cl.on_message # marks a function that should be run each time the chatbot receives a message from a user
|
49 |
async def main(message: cl.Message):
|
50 |
settings = cl.user_session.get("settings")
|
51 |
+
messages = cl.user_session.get("messages")
|
52 |
+
chat_history = cl.user_session.get("chat_history", [])
|
53 |
|
54 |
client = AsyncOpenAI()
|
55 |
|
56 |
+
# Add the previous questions and answers
|
57 |
+
for question, answer in chat_history[-MAX_CONTEXT_QUESTIONS:]:
|
58 |
+
messages.append({"role": "user", "content": question})
|
59 |
+
messages.append({"role": "assistant", "content": answer})
|
60 |
+
|
61 |
+
# Add the new question
|
62 |
+
messages.append({"role": "user", "content": message.content})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
|
64 |
msg = cl.Message(content="")
|
65 |
+
full_response = ""
|
66 |
|
67 |
# Call OpenAI
|
68 |
async for stream_resp in await client.chat.completions.create(
|
69 |
+
messages=messages, stream=True, **settings
|
70 |
):
|
71 |
token = stream_resp.choices[0].delta.content
|
72 |
if not token:
|
73 |
token = ""
|
74 |
+
full_response += token
|
75 |
await msg.stream_token(token)
|
76 |
|
|
|
|
|
|
|
|
|
77 |
# Send and close the message stream
|
78 |
await msg.send()
|
79 |
+
|
80 |
+
# Update chat history
|
81 |
+
chat_history.append((message.content, full_response))
|
82 |
+
cl.user_session.set("chat_history", chat_history)
|
83 |
+
cl.user_session.set("messages", messages)
|
84 |
+
|
chainlit.md
CHANGED
@@ -1 +1 @@
|
|
1 |
-
##
|
|
|
1 |
+
## AI Representative
|