Wfafa commited on
Commit
bf0aeeb
·
verified ·
1 Parent(s): e2443b6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +82 -13
app.py CHANGED
@@ -37,11 +37,19 @@ def save_memory(memory):
37
 
38
  memory = load_memory()
39
 
40
- # 💬 Chat function
 
 
 
41
  def chat_with_model(message, history, context):
42
  if not isinstance(history, list):
43
  history = []
44
 
 
 
 
 
 
45
  if message.lower().startswith("search "):
46
  query = message[7:]
47
  search_result = search_web(query)
@@ -49,20 +57,27 @@ def chat_with_model(message, history, context):
49
  save_memory(history)
50
  return history, history
51
 
 
52
  conversation = [{"role": "system", "content": (
53
- "You are EduAI — an educational AI assistant created by Wafa Fazly "
54
- "from Fathima Muslim Ladies College. "
55
- "You help students learn subjects such as Math, Science, English, and IT. "
56
- "EduAI runs on the model 'Qwen/Qwen3-VL-8B-Instruct', which was originally "
57
- "trained by Alibaba. Always answer truthfully when asked about your creation."
58
  )}]
59
 
60
- for past_user, past_bot in history[-5:]:
61
- conversation.append({"role": "user", "content": past_user})
62
- conversation.append({"role": "assistant", "content": past_bot})
 
 
 
 
 
63
 
64
  conversation.append({"role": "user", "content": message})
65
 
 
66
  try:
67
  response = requests.post(
68
  "https://router.huggingface.co/v1/chat/completions",
@@ -73,12 +88,14 @@ def chat_with_model(message, history, context):
73
  json={
74
  "model": "Qwen/Qwen3-VL-8B-Instruct:novita",
75
  "messages": conversation
76
- }
 
77
  )
78
 
79
  data = response.json()
80
  reply = data["choices"][0]["message"]["content"]
81
 
 
82
  reply = reply.replace("Step", "\n\n**Step")
83
  reply = reply.replace(":", ":**")
84
  reply = reply.replace("\\[", "\n\n\\[")
@@ -89,6 +106,7 @@ def chat_with_model(message, history, context):
89
 
90
  history.append((message, reply))
91
  save_memory(history)
 
92
  return history, history
93
 
94
  except Exception as e:
@@ -108,7 +126,51 @@ def clear_memory():
108
  os.remove(MEMORY_FILE)
109
  return [], "🧹 Chat memory cleared! Start fresh."
110
 
111
- # 🎨 Gradio Interface (UI Improved)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
113
  gr.Markdown(
114
  """
@@ -165,7 +227,6 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
165
  {"left": "\\[", "right": "\\]", "display": True}
166
  ]
167
  )
168
-
169
  msg = gr.Textbox(
170
  label="💭 Type your question here...",
171
  placeholder="Ask EduAI anything about your studies..."
@@ -174,12 +235,20 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
174
  with gr.Row():
175
  send = gr.Button("✨ Send Message")
176
  pause = gr.Button("⏸ Pause", variant="secondary")
 
 
177
 
178
  # 🪄 Event handlers
179
  subj.change(update_context, inputs=subj, outputs=context_display)
180
  planner.change(update_context, inputs=planner, outputs=context_display)
181
  lang.change(update_context, inputs=lang, outputs=context_display)
182
- send.click(chat_with_model, inputs=[msg, chatbot, context_display], outputs=[chatbot, chatbot])
 
 
 
183
  clear_btn.click(clear_memory, outputs=[chatbot, context_display])
184
 
 
 
 
185
  iface.launch()
 
37
 
38
  memory = load_memory()
39
 
40
+ # -----------------------
41
+ # Chat function (original behavior)
42
+ # returns (history, history) to match previous usage
43
+ # -----------------------
44
  def chat_with_model(message, history, context):
45
  if not isinstance(history, list):
46
  history = []
47
 
48
+ # prevent empty messages
49
+ if not message:
50
+ return history, history
51
+
52
+ # 🌍 Web search mode
53
  if message.lower().startswith("search "):
54
  query = message[7:]
55
  search_result = search_web(query)
 
57
  save_memory(history)
58
  return history, history
59
 
60
+ # 🧠 Build conversation
61
  conversation = [{"role": "system", "content": (
62
+ "You are EduAI — an educational AI assistant created by Wafa Fazly "
63
+ "from Fathima Muslim Ladies College. "
64
+ "You help students learn subjects such as Math, Science, English, and IT. "
65
+ "EduAI runs on the model 'Qwen/Qwen3-VL-8B-Instruct', which was originally "
66
+ "trained by Alibaba. Always answer truthfully when asked about your creation."
67
  )}]
68
 
69
+ # convert tuples to messages if necessary (keeps old history format)
70
+ for past in history[-5:]:
71
+ # expect (user_message, bot_reply)
72
+ if isinstance(past, tuple) and len(past) == 2:
73
+ conversation.append({"role": "user", "content": past[0]})
74
+ conversation.append({"role": "assistant", "content": past[1]})
75
+ elif isinstance(past, dict):
76
+ conversation.append(past)
77
 
78
  conversation.append({"role": "user", "content": message})
79
 
80
+ # 🚀 Send to Hugging Face model
81
  try:
82
  response = requests.post(
83
  "https://router.huggingface.co/v1/chat/completions",
 
88
  json={
89
  "model": "Qwen/Qwen3-VL-8B-Instruct:novita",
90
  "messages": conversation
91
+ },
92
+ timeout=60
93
  )
94
 
95
  data = response.json()
96
  reply = data["choices"][0]["message"]["content"]
97
 
98
+ # 🧮 Clean up math formatting (keeps your original formatting code)
99
  reply = reply.replace("Step", "\n\n**Step")
100
  reply = reply.replace(":", ":**")
101
  reply = reply.replace("\\[", "\n\n\\[")
 
106
 
107
  history.append((message, reply))
108
  save_memory(history)
109
+ # IMPORTANT: return a pair (history, history) because other code expects two outputs
110
  return history, history
111
 
112
  except Exception as e:
 
126
  os.remove(MEMORY_FILE)
127
  return [], "🧹 Chat memory cleared! Start fresh."
128
 
129
+ # -----------------------
130
+ # Pause / Send wrappers (FIXED)
131
+ # -----------------------
132
+
133
+ # send handler that respects paused state (ALWAYS returns (chat_history, textbox_clear))
134
+ def send_handler(message, history, context, paused_state):
135
+ if paused_state:
136
+ # Do not call the model when paused — append a friendly hint
137
+ if not isinstance(history, list):
138
+ history = []
139
+ history.append((None, "⏸️ Chat is paused. Click Resume to continue."))
140
+ return history, "" # update chatbot and clear textbox
141
+ # Not paused: call original chat handler and adapt its returns to (chat_history, textbox_clear)
142
+ hist_pair = chat_with_model(message, history, context) # returns (history, history)
143
+ # unpack safely
144
+ if isinstance(hist_pair, tuple) and len(hist_pair) == 2:
145
+ hist = hist_pair[0]
146
+ else:
147
+ hist = hist_pair
148
+ return hist, ""
149
+
150
+
151
+ # toggle pause/resume and update UI (state + chat + button text + send button disabled)
152
+ def toggle_pause(paused_state, history):
153
+ new_state = not bool(paused_state)
154
+ if not isinstance(history, list):
155
+ history = []
156
+
157
+ if new_state:
158
+ # now paused
159
+ history.append((None, "⏸️ Chat paused. Send is disabled."))
160
+ pause_btn_update = gr.Button.update(value="▶ Resume")
161
+ send_btn_update = gr.Button.update(disabled=True)
162
+ else:
163
+ # resumed
164
+ history.append((None, "▶️ Chat resumed. You can send messages now."))
165
+ pause_btn_update = gr.Button.update(value="⏸ Pause")
166
+ send_btn_update = gr.Button.update(disabled=False)
167
+
168
+ # return new pause state, updated chat history, and two UI updates (pause button & send button)
169
+ return new_state, history, pause_btn_update, send_btn_update
170
+
171
+ # -----------------------
172
+ # Build UI (unchanged layout; pause added)
173
+ # -----------------------
174
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
175
  gr.Markdown(
176
  """
 
227
  {"left": "\\[", "right": "\\]", "display": True}
228
  ]
229
  )
 
230
  msg = gr.Textbox(
231
  label="💭 Type your question here...",
232
  placeholder="Ask EduAI anything about your studies..."
 
235
  with gr.Row():
236
  send = gr.Button("✨ Send Message")
237
  pause = gr.Button("⏸ Pause", variant="secondary")
238
+ # state to keep track of pause (False = running, True = paused)
239
+ pause_state = gr.State(False)
240
 
241
  # 🪄 Event handlers
242
  subj.change(update_context, inputs=subj, outputs=context_display)
243
  planner.change(update_context, inputs=planner, outputs=context_display)
244
  lang.change(update_context, inputs=lang, outputs=context_display)
245
+
246
+ # send now uses send_handler and respects pause_state; outputs: chatbot and clears textbox
247
+ send.click(send_handler, inputs=[msg, chatbot, context_display, pause_state], outputs=[chatbot, msg])
248
+
249
  clear_btn.click(clear_memory, outputs=[chatbot, context_display])
250
 
251
+ # pause toggles pause_state, updates chatbot with a message, updates pause button label and disables/enables send
252
+ pause.click(toggle_pause, inputs=[pause_state, chatbot], outputs=[pause_state, chatbot, pause, send])
253
+
254
  iface.launch()