rkoushikroy2
commited on
Commit
β’
890e747
1
Parent(s):
64e66df
Upload 2 files
Browse files- app.py +33 -67
- helper_functions.py +0 -21
app.py
CHANGED
@@ -1,75 +1,41 @@
|
|
1 |
-
# Imports
|
2 |
import gradio as gr
|
3 |
from helper_functions import *
|
4 |
|
5 |
with gr.Blocks() as app:
|
6 |
-
gr.Markdown('#
|
7 |
-
session_data = gr.State([
|
8 |
-
[{"role": "system", "content": pre_text}],[]
|
9 |
-
])
|
10 |
-
def user(user_message, history):
|
11 |
-
return "", history + [[user_message, None]]
|
12 |
|
13 |
-
def
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
|
|
|
|
21 |
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
|
|
|
|
|
|
|
|
|
|
29 |
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
session_data_fn[0] = messages_archived
|
36 |
-
session_data_fn[1] = messages_current
|
37 |
-
return None, session_data_fn
|
38 |
-
|
39 |
-
def get_context_gr(session_data_fn):
|
40 |
-
messages_current = session_data_fn[1]
|
41 |
-
return str(messages_current)
|
42 |
-
|
43 |
-
with gr.Tab("Chat"):
|
44 |
-
with gr.Row():
|
45 |
-
with gr.Column():
|
46 |
-
msg = gr.Textbox()
|
47 |
-
with gr.Row():
|
48 |
-
submit = gr.Button("Submit")
|
49 |
-
clear = gr.Button("Clear")
|
50 |
-
with gr.Column():
|
51 |
-
chatbot = gr.Chatbot()
|
52 |
-
|
53 |
-
with gr.Tab("Prompt"):
|
54 |
-
context = gr.Textbox()
|
55 |
-
submit_p = gr.Button("Check Prompt")
|
56 |
-
# Tab Chat
|
57 |
-
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
|
58 |
-
bot, [chatbot, session_data], [chatbot, session_data]
|
59 |
-
).then(
|
60 |
-
fn = reset_memory, inputs = session_data, outputs = session_data
|
61 |
-
)
|
62 |
-
submit.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(
|
63 |
-
bot, [chatbot, session_data], [chatbot, session_data]
|
64 |
-
).then(
|
65 |
-
fn = reset_memory, inputs = session_data, outputs = session_data
|
66 |
)
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
queue = False
|
72 |
-
)
|
73 |
-
# Tab Prompt
|
74 |
-
submit_p.click(get_context_gr, session_data, context, queue=False)
|
75 |
-
app.launch(debug=True)
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from helper_functions import *
|
3 |
|
4 |
with gr.Blocks() as app:
|
5 |
+
gr.Markdown('# Prompt Generator for FundedNext')
|
6 |
+
session_data = gr.State([{"current_system_prompt": ""}])
|
|
|
|
|
|
|
|
|
7 |
|
8 |
+
def get_prompt(user_message, session_data):
|
9 |
+
if(user_message == ""):
|
10 |
+
return_message = "Please enter your message"
|
11 |
+
return return_message
|
12 |
+
pre_text = session_data[0]["current_system_prompt"]
|
13 |
+
return_message = pre_text + "\n\n" + get_context(user_message)
|
14 |
+
return return_message
|
15 |
+
def set_pre_text(system_prompt, session_data):
|
16 |
+
session_data[0]["current_system_prompt"] = system_prompt
|
17 |
+
return session_data
|
18 |
|
19 |
+
with gr.Tab("Generate Prompt"):
|
20 |
+
user_message = gr.Textbox(label = "Enter your message")
|
21 |
+
prompt = gr.Textbox(label="Generated Prompt", interactive=True, lines=20)
|
22 |
+
with gr.Tab("Edit System Prompt"):
|
23 |
+
system_prompt = gr.Textbox(
|
24 |
+
label="System Prompt", interactive=True, lines=15
|
25 |
+
)
|
26 |
+
gr.Markdown("## System Prompt Examples")
|
27 |
+
gr.Examples(
|
28 |
+
examples = [[pre_text]],
|
29 |
+
inputs = [system_prompt]
|
30 |
+
)
|
31 |
|
32 |
+
user_message.submit(
|
33 |
+
fn = get_prompt, inputs = [user_message, session_data], outputs = prompt
|
34 |
+
).then(lambda:"", inputs=None, outputs=user_message)
|
35 |
+
system_prompt.change(
|
36 |
+
fn = set_pre_text, inputs = [system_prompt, session_data], outputs = session_data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
)
|
38 |
+
|
39 |
+
|
40 |
+
app.launch(auth=(os.getenv("id"), os.getenv("password")), show_api=False)
|
41 |
+
# app.launch()
|
|
|
|
|
|
|
|
|
|
helper_functions.py
CHANGED
@@ -44,24 +44,3 @@ def get_context(query):
|
|
44 |
My question is: β{query}β
|
45 |
"""
|
46 |
|
47 |
-
def get_reply(message, messages_archived, messages_current):
|
48 |
-
|
49 |
-
if message:
|
50 |
-
messages_current = messages_archived.copy()
|
51 |
-
context = get_context(message)
|
52 |
-
messages_current.append(
|
53 |
-
{"role": "user", "content": context}
|
54 |
-
)
|
55 |
-
chat = openai.ChatCompletion.create(
|
56 |
-
model="gpt-3.5-turbo", messages=messages_current, temperature=0
|
57 |
-
)
|
58 |
-
|
59 |
-
reply = chat.choices[0].message.content
|
60 |
-
messages_archived.append({"role": "user", "content": message})
|
61 |
-
messages_archived.append({"role": "assistant", "content": reply})
|
62 |
-
# If no message is provided, return a string that says "No Message Received"
|
63 |
-
else:
|
64 |
-
reply = "No Message Received"
|
65 |
-
|
66 |
-
return reply, messages_archived, messages_current
|
67 |
-
|
|
|
44 |
My question is: β{query}β
|
45 |
"""
|
46 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|