rkoushikroy2 commited on
Commit
890e747
β€’
1 Parent(s): 64e66df

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +33 -67
  2. helper_functions.py +0 -21
app.py CHANGED
@@ -1,75 +1,41 @@
1
- # Imports
2
  import gradio as gr
3
  from helper_functions import *
4
 
5
  with gr.Blocks() as app:
6
- gr.Markdown('# FundedNext Customer Service Chatbot')
7
- session_data = gr.State([
8
- [{"role": "system", "content": pre_text}],[]
9
- ])
10
- def user(user_message, history):
11
- return "", history + [[user_message, None]]
12
 
13
- def bot(history, session_data_fn):
14
- messages_archived = session_data_fn[0]
15
- messages_current = session_data_fn[1]
16
- bot_message, messages_archived, messages_current = get_reply(history[-1][0], messages_archived, messages_current)
17
- history[-1][1] = bot_message
18
- session_data_fn[0] = messages_archived
19
- session_data_fn[1] = messages_current
20
- return history, session_data_fn
 
 
21
 
22
- def reset_memory(session_data_fn):
23
- messages_archived = session_data_fn[0]
24
- # print("Message Archived Len=", len(messages_archived))
25
- if(len(messages_archived)>=21):
26
- messages_archived = messages_archived[0:1] + messages_archived[3:]
27
- session_data_fn[0] = messages_archived
28
- return session_data_fn
 
 
 
 
 
29
 
30
- def clear_data(session_data_fn):
31
- messages_archived = [
32
- {"role": "system", "content": pre_text}
33
- ]
34
- messages_current = []
35
- session_data_fn[0] = messages_archived
36
- session_data_fn[1] = messages_current
37
- return None, session_data_fn
38
-
39
- def get_context_gr(session_data_fn):
40
- messages_current = session_data_fn[1]
41
- return str(messages_current)
42
-
43
- with gr.Tab("Chat"):
44
- with gr.Row():
45
- with gr.Column():
46
- msg = gr.Textbox()
47
- with gr.Row():
48
- submit = gr.Button("Submit")
49
- clear = gr.Button("Clear")
50
- with gr.Column():
51
- chatbot = gr.Chatbot()
52
-
53
- with gr.Tab("Prompt"):
54
- context = gr.Textbox()
55
- submit_p = gr.Button("Check Prompt")
56
- # Tab Chat
57
- msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
58
- bot, [chatbot, session_data], [chatbot, session_data]
59
- ).then(
60
- fn = reset_memory, inputs = session_data, outputs = session_data
61
- )
62
- submit.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(
63
- bot, [chatbot, session_data], [chatbot, session_data]
64
- ).then(
65
- fn = reset_memory, inputs = session_data, outputs = session_data
66
  )
67
- clear.click(
68
- fn = clear_data,
69
- inputs = session_data,
70
- outputs = [chatbot, session_data],
71
- queue = False
72
- )
73
- # Tab Prompt
74
- submit_p.click(get_context_gr, session_data, context, queue=False)
75
- app.launch(debug=True)
 
 
1
  import gradio as gr
2
  from helper_functions import *
3
 
4
  with gr.Blocks() as app:
5
+ gr.Markdown('# Prompt Generator for FundedNext')
6
+ session_data = gr.State([{"current_system_prompt": ""}])
 
 
 
 
7
 
8
+ def get_prompt(user_message, session_data):
9
+ if(user_message == ""):
10
+ return_message = "Please enter your message"
11
+ return return_message
12
+ pre_text = session_data[0]["current_system_prompt"]
13
+ return_message = pre_text + "\n\n" + get_context(user_message)
14
+ return return_message
15
+ def set_pre_text(system_prompt, session_data):
16
+ session_data[0]["current_system_prompt"] = system_prompt
17
+ return session_data
18
 
19
+ with gr.Tab("Generate Prompt"):
20
+ user_message = gr.Textbox(label = "Enter your message")
21
+ prompt = gr.Textbox(label="Generated Prompt", interactive=True, lines=20)
22
+ with gr.Tab("Edit System Prompt"):
23
+ system_prompt = gr.Textbox(
24
+ label="System Prompt", interactive=True, lines=15
25
+ )
26
+ gr.Markdown("## System Prompt Examples")
27
+ gr.Examples(
28
+ examples = [[pre_text]],
29
+ inputs = [system_prompt]
30
+ )
31
 
32
+ user_message.submit(
33
+ fn = get_prompt, inputs = [user_message, session_data], outputs = prompt
34
+ ).then(lambda:"", inputs=None, outputs=user_message)
35
+ system_prompt.change(
36
+ fn = set_pre_text, inputs = [system_prompt, session_data], outputs = session_data
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  )
38
+
39
+
40
+ app.launch(auth=(os.getenv("id"), os.getenv("password")), show_api=False)
41
+ # app.launch()
 
 
 
 
 
helper_functions.py CHANGED
@@ -44,24 +44,3 @@ def get_context(query):
44
  My question is: β€œ{query}”
45
  """
46
 
47
- def get_reply(message, messages_archived, messages_current):
48
-
49
- if message:
50
- messages_current = messages_archived.copy()
51
- context = get_context(message)
52
- messages_current.append(
53
- {"role": "user", "content": context}
54
- )
55
- chat = openai.ChatCompletion.create(
56
- model="gpt-3.5-turbo", messages=messages_current, temperature=0
57
- )
58
-
59
- reply = chat.choices[0].message.content
60
- messages_archived.append({"role": "user", "content": message})
61
- messages_archived.append({"role": "assistant", "content": reply})
62
- # If no message is provided, return a string that says "No Message Received"
63
- else:
64
- reply = "No Message Received"
65
-
66
- return reply, messages_archived, messages_current
67
-
 
44
  My question is: β€œ{query}”
45
  """
46