Commit
·
7c65f3f
1
Parent(s):
981cad3
workflow errors debugging V2
Browse files
app.py
CHANGED
|
@@ -140,8 +140,7 @@ def create_mobile_optimized_interface():
|
|
| 140 |
show_label=False,
|
| 141 |
height="60vh",
|
| 142 |
elem_classes="chatbot-container",
|
| 143 |
-
type="messages"
|
| 144 |
-
render=False # Improve mobile performance
|
| 145 |
)
|
| 146 |
interface_components['chatbot'] = chatbot
|
| 147 |
|
|
@@ -265,31 +264,49 @@ def setup_event_handlers(demo, event_handlers):
|
|
| 265 |
|
| 266 |
return demo
|
| 267 |
|
| 268 |
-
def simple_message_handler(message, chat_history):
|
| 269 |
-
"""Simple mock handler for testing UI without full backend"""
|
| 270 |
-
if not message.strip():
|
| 271 |
-
return chat_history, ""
|
| 272 |
-
|
| 273 |
-
# Simple echo response for MVP testing
|
| 274 |
-
response = f"I received your message: {message}. This is a placeholder response. The full agent system is ready to integrate!"
|
| 275 |
-
|
| 276 |
-
new_history = chat_history + [[message, response]]
|
| 277 |
-
return new_history, ""
|
| 278 |
-
|
| 279 |
def process_message(message, history):
|
| 280 |
-
"""
|
| 281 |
-
|
| 282 |
-
return history, ""
|
| 283 |
-
|
| 284 |
-
response = f"I received your message: {message}. This is a placeholder response. The full agent system is ready to integrate!"
|
| 285 |
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
|
| 289 |
-
|
| 290 |
-
|
| 291 |
-
|
| 292 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 293 |
|
| 294 |
# Decorate the chat handler with GPU if available
|
| 295 |
if SPACES_GPU_AVAILABLE and GPU is not None:
|
|
|
|
| 140 |
show_label=False,
|
| 141 |
height="60vh",
|
| 142 |
elem_classes="chatbot-container",
|
| 143 |
+
type="messages"
|
|
|
|
| 144 |
)
|
| 145 |
interface_components['chatbot'] = chatbot
|
| 146 |
|
|
|
|
| 264 |
|
| 265 |
return demo
|
| 266 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 267 |
def process_message(message, history):
|
| 268 |
+
"""
|
| 269 |
+
Process message with messages format - returns immediately
|
|
|
|
|
|
|
|
|
|
| 270 |
|
| 271 |
+
NOTE: This is currently using a placeholder. For full orchestration:
|
| 272 |
+
- Make this function async if you need to call async orchestrator methods
|
| 273 |
+
- Or use threading/background tasks for long-running operations
|
| 274 |
+
- Or use Gradio's streaming capabilities for progressive responses
|
| 275 |
+
"""
|
| 276 |
+
try:
|
| 277 |
+
# Handle empty messages
|
| 278 |
+
if not message or not message.strip():
|
| 279 |
+
return history if history else [], ""
|
| 280 |
+
|
| 281 |
+
# Initialize history if None
|
| 282 |
+
if history is None:
|
| 283 |
+
history = []
|
| 284 |
+
|
| 285 |
+
# Create a copy to avoid mutating the input
|
| 286 |
+
new_history = list(history) if isinstance(history, list) else []
|
| 287 |
+
|
| 288 |
+
# Add user message
|
| 289 |
+
new_history.append({"role": "user", "content": message.strip()})
|
| 290 |
+
|
| 291 |
+
# Generate immediate response (placeholder)
|
| 292 |
+
response = f"I received your message: {message}\n\nThis is a placeholder response. The full agent system is ready to integrate!"
|
| 293 |
+
|
| 294 |
+
# Add assistant response
|
| 295 |
+
new_history.append({"role": "assistant", "content": response})
|
| 296 |
+
|
| 297 |
+
# Return updated history and clear input
|
| 298 |
+
return new_history, ""
|
| 299 |
+
|
| 300 |
+
except Exception as e:
|
| 301 |
+
# Error handling - return error message to user
|
| 302 |
+
import traceback
|
| 303 |
+
print(f"ERROR in process_message: {e}")
|
| 304 |
+
traceback.print_exc()
|
| 305 |
+
|
| 306 |
+
error_history = list(history) if history else []
|
| 307 |
+
error_history.append({"role": "user", "content": message})
|
| 308 |
+
error_history.append({"role": "assistant", "content": f"I encountered an error: {str(e)}"})
|
| 309 |
+
return error_history, ""
|
| 310 |
|
| 311 |
# Decorate the chat handler with GPU if available
|
| 312 |
if SPACES_GPU_AVAILABLE and GPU is not None:
|