pythonprincess commited on
Commit
afc0da4
Β·
verified Β·
1 Parent(s): eca4264

Upload gradio_app.py

Browse files
Files changed (1) hide show
  1. gradio_app.py +493 -0
gradio_app.py ADDED
@@ -0,0 +1,493 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ πŸ€– PENNY V2.2 Gradio Interface
3
+ Hugging Face Space Entry Point
4
+
5
+ This file connects PENNY's backend to a Gradio chat interface,
6
+ allowing users to interact with PENNY through a web UI on Hugging Face Spaces.
7
+ """
8
+
9
+ import gradio as gr
10
+ import logging
11
+ import sys
12
+ import asyncio
13
+ from typing import List, Tuple, Dict, Any
14
+ from datetime import datetime
15
+
16
+ # Setup logging
17
+ logging.basicConfig(
18
+ level=logging.INFO,
19
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
20
+ handlers=[logging.StreamHandler(sys.stdout)]
21
+ )
22
+ logger = logging.getLogger(__name__)
23
+
24
+ # ============================================================
25
+ # IMPORT PENNY MODULES - FIXED FOR ACTUAL FILE STRUCTURE
26
+ # ============================================================
27
+
28
+ try:
29
+ # Core orchestration and routing
30
+ from app.orchestrator import run_orchestrator
31
+ from app.router import route_query
32
+
33
+ # Utilities
34
+ from app.location_utils import geocode_address, get_user_location
35
+ from app.logging_utils import setup_logger
36
+
37
+ # Event and weather handling
38
+ from app.event_weather import (
39
+ get_weather_info,
40
+ search_events,
41
+ format_weather_response,
42
+ format_event_response
43
+ )
44
+
45
+ # Tool agent for officials and resources
46
+ from app.tool_agent import (
47
+ search_officials,
48
+ search_resources,
49
+ format_official_response,
50
+ format_resource_response
51
+ )
52
+
53
+ # Model loader (if needed for initialization)
54
+ from app.model_loader import initialize_models
55
+
56
+ # Intent classification
57
+ from app.intents import classify_intent, IntentType
58
+
59
+ logger.info("βœ… Successfully imported PENNY modules from app/")
60
+
61
+ except ImportError as e:
62
+ logger.error(f"❌ Failed to import PENNY modules: {e}")
63
+ logger.error(f" Make sure all files exist in app/ folder")
64
+ logger.error(f" Current error: {str(e)}")
65
+
66
+ # Create fallback functions so the interface can still load
67
+ async def run_orchestrator(message: str, context: Dict[str, Any]) -> Dict[str, Any]:
68
+ return {
69
+ "reply": f"⚠️ PENNY is initializing. Please try again in a moment.\n\nError: {str(e)}",
70
+ "intent": "error",
71
+ "confidence": 0.0
72
+ }
73
+
74
+ def get_service_availability() -> Dict[str, bool]:
75
+ return {
76
+ "orchestrator": False,
77
+ "weather_service": False,
78
+ "event_database": False,
79
+ "resource_finder": False
80
+ }
81
+
82
+ # ============================================================
83
+ # SERVICE AVAILABILITY CHECK
84
+ # ============================================================
85
+
86
+ def get_service_availability() -> Dict[str, bool]:
87
+ """
88
+ Check which PENNY services are available.
89
+ Returns dict of service_name -> availability status.
90
+ """
91
+ services = {}
92
+
93
+ try:
94
+ # Check if orchestrator is callable
95
+ services["orchestrator"] = callable(run_orchestrator)
96
+ except:
97
+ services["orchestrator"] = False
98
+
99
+ try:
100
+ # Check if event/weather module loaded
101
+ from app.event_weather import get_weather_info
102
+ services["weather_service"] = True
103
+ except:
104
+ services["weather_service"] = False
105
+
106
+ try:
107
+ # Check if event database accessible
108
+ from app.event_weather import search_events
109
+ services["event_database"] = True
110
+ except:
111
+ services["event_database"] = False
112
+
113
+ try:
114
+ # Check if tool agent loaded
115
+ from app.tool_agent import search_resources
116
+ services["resource_finder"] = True
117
+ except:
118
+ services["resource_finder"] = False
119
+
120
+ return services
121
+
122
+
123
+ # ============================================================
124
+ # SUPPORTED CITIES CONFIGURATION
125
+ # ============================================================
126
+
127
+ SUPPORTED_CITIES = [
128
+ "Atlanta, GA",
129
+ "Birmingham, AL",
130
+ "Chesterfield, VA",
131
+ "El Paso, TX",
132
+ "Norfolk, VA",
133
+ "Providence, RI",
134
+ "Seattle, WA"
135
+ ]
136
+
137
+ def get_city_choices() -> List[str]:
138
+ """Get list of supported cities for dropdown."""
139
+ try:
140
+ return ["Not sure / Other"] + sorted(SUPPORTED_CITIES)
141
+ except Exception as e:
142
+ logger.error(f"Error loading cities: {e}")
143
+ return ["Not sure / Other", "Norfolk, VA"]
144
+
145
+
146
+ # ============================================================
147
+ # CHAT HANDLER
148
+ # ============================================================
149
+
150
+ async def chat_with_penny(
151
+ message: str,
152
+ city: str,
153
+ history: List[Tuple[str, str]]
154
+ ) -> Tuple[List[Tuple[str, str]], str]:
155
+ """
156
+ Process user message through PENNY's orchestrator and return response.
157
+
158
+ Args:
159
+ message: User's input text
160
+ city: Selected city/location
161
+ history: Chat history (list of (user_msg, bot_msg) tuples)
162
+
163
+ Returns:
164
+ Tuple of (updated_history, empty_string_to_clear_input)
165
+ """
166
+ if not message.strip():
167
+ return history, ""
168
+
169
+ try:
170
+ # Build context from selected city
171
+ context = {
172
+ "timestamp": datetime.now().isoformat(),
173
+ "conversation_history": history[-5:] if history else [] # Last 5 exchanges
174
+ }
175
+
176
+ # Add location if specified
177
+ if city and city != "Not sure / Other":
178
+ context["location"] = city
179
+ context["tenant_id"] = city.split(",")[0].lower().replace(" ", "_")
180
+
181
+ logger.info(f"πŸ“¨ Processing: '{message[:60]}...' | City: {city}")
182
+
183
+ # Call PENNY's orchestrator
184
+ result = await run_orchestrator(message, context)
185
+
186
+ # Extract response
187
+ reply = result.get("reply", "I'm having trouble right now. Please try again! πŸ’›")
188
+ intent = result.get("intent", "unknown")
189
+ confidence = result.get("confidence", 0.0)
190
+
191
+ # Add to history
192
+ history.append((message, reply))
193
+
194
+ logger.info(f"βœ… Response generated | Intent: {intent} | Confidence: {confidence:.2f}")
195
+
196
+ return history, ""
197
+
198
+ except Exception as e:
199
+ logger.error(f"❌ Error processing message: {e}", exc_info=True)
200
+
201
+ error_reply = (
202
+ "I'm having trouble processing your request right now. "
203
+ "Please try again in a moment! πŸ’›\n\n"
204
+ f"_Error: {str(e)[:100]}_"
205
+ )
206
+ history.append((message, error_reply))
207
+ return history, ""
208
+
209
+
210
+ def chat_with_penny_sync(message: str, city: str, history: List[Tuple[str, str]]) -> Tuple[List[Tuple[str, str]], str]:
211
+ """
212
+ Synchronous wrapper for chat_with_penny to work with Gradio.
213
+ Gradio expects sync functions, so we create an event loop here.
214
+ """
215
+ try:
216
+ # Create new event loop for this call
217
+ loop = asyncio.new_event_loop()
218
+ asyncio.set_event_loop(loop)
219
+ result = loop.run_until_complete(chat_with_penny(message, city, history))
220
+ loop.close()
221
+ return result
222
+ except Exception as e:
223
+ logger.error(f"Error in sync wrapper: {e}")
224
+ error_msg = f"Error: {str(e)}"
225
+ history.append((message, error_msg))
226
+ return history, ""
227
+
228
+
229
+ # ============================================================
230
+ # SERVICE STATUS DISPLAY
231
+ # ============================================================
232
+
233
+ def get_service_status() -> str:
234
+ """Display current service availability status."""
235
+ try:
236
+ services = get_service_availability()
237
+ status_lines = ["**πŸ”§ PENNY Service Status:**\n"]
238
+
239
+ service_names = {
240
+ "orchestrator": "🧠 Core Orchestrator",
241
+ "weather_service": "🌀️ Weather Service",
242
+ "event_database": "πŸ“… Event Database",
243
+ "resource_finder": "πŸ›οΈ Resource Finder"
244
+ }
245
+
246
+ for service_key, available in services.items():
247
+ icon = "βœ…" if available else "⚠️"
248
+ status = "Online" if available else "Limited"
249
+ name = service_names.get(service_key, service_key.replace('_', ' ').title())
250
+ status_lines.append(f"{icon} **{name}**: {status}")
251
+
252
+ return "\n".join(status_lines)
253
+ except Exception as e:
254
+ logger.error(f"Error getting service status: {e}")
255
+ return "**⚠️ Status:** Unable to check service availability"
256
+
257
+
258
+ # ============================================================
259
+ # GRADIO UI DEFINITION
260
+ # ============================================================
261
+
262
+ # Custom CSS for enhanced styling
263
+ custom_css = """
264
+ #chatbot {
265
+ height: 500px;
266
+ overflow-y: auto;
267
+ border-radius: 8px;
268
+ }
269
+ .gradio-container {
270
+ font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
271
+ }
272
+ #status-panel {
273
+ background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
274
+ padding: 15px;
275
+ border-radius: 8px;
276
+ margin: 10px 0;
277
+ }
278
+ footer {
279
+ display: none !important;
280
+ }
281
+ .message-user {
282
+ background-color: #e3f2fd !important;
283
+ }
284
+ .message-bot {
285
+ background-color: #fff3e0 !important;
286
+ }
287
+ """
288
+
289
+ # Build the Gradio interface
290
+ with gr.Blocks(
291
+ theme=gr.themes.Soft(primary_hue="amber", secondary_hue="blue"),
292
+ css=custom_css,
293
+ title="PENNY V2.2 - Civic Assistant"
294
+ ) as demo:
295
+
296
+ # Header
297
+ gr.Markdown(
298
+ """
299
+ # πŸ€– PENNY V2.2 - People's Engagement Network Navigator for You
300
+
301
+ **Your multilingual civic assistant connecting residents to local government services and community resources.**
302
+
303
+ ### πŸ’¬ Ask me about:
304
+ - 🌀️ **Weather conditions** and forecasts
305
+ - πŸ“… **Community events** and activities
306
+ - πŸ›οΈ **Local resources** (shelters, libraries, food banks, healthcare)
307
+ - πŸ‘₯ **Elected officials** and government contacts
308
+ - 🌍 **Translation** services (27+ languages)
309
+ - πŸ“„ **Document assistance** and form help
310
+ """
311
+ )
312
+
313
+ with gr.Row():
314
+ with gr.Column(scale=2):
315
+ # City selector
316
+ city_dropdown = gr.Dropdown(
317
+ choices=get_city_choices(),
318
+ value="Norfolk, VA",
319
+ label="πŸ“ Select Your City",
320
+ info="Choose your city for location-specific information",
321
+ interactive=True
322
+ )
323
+
324
+ # Chat interface
325
+ chatbot = gr.Chatbot(
326
+ label="πŸ’¬ Chat with PENNY",
327
+ elem_id="chatbot",
328
+ avatar_images=(None, "πŸ€–"),
329
+ show_label=True,
330
+ height=500,
331
+ bubble_full_width=False
332
+ )
333
+
334
+ # Input row
335
+ with gr.Row():
336
+ msg_input = gr.Textbox(
337
+ placeholder="Type your message here... (e.g., 'What's the weather today?')",
338
+ show_label=False,
339
+ scale=4,
340
+ container=False,
341
+ lines=1
342
+ )
343
+ submit_btn = gr.Button("Send πŸ“€", variant="primary", scale=1)
344
+
345
+ # Clear button
346
+ clear_btn = gr.Button("πŸ—‘οΈ Clear Chat", variant="secondary", size="sm")
347
+
348
+ # Example queries
349
+ gr.Examples(
350
+ examples=[
351
+ ["What's the weather in Norfolk today?"],
352
+ ["Any community events this weekend?"],
353
+ ["I need help finding a food bank"],
354
+ ["Who is my city council representative?"],
355
+ ["Show me local libraries"],
356
+ ["Translate 'hello' to Spanish"],
357
+ ["Help me understand this document"]
358
+ ],
359
+ inputs=msg_input,
360
+ label="πŸ’‘ Try asking:"
361
+ )
362
+
363
+ with gr.Column(scale=1):
364
+ # Service status panel
365
+ status_display = gr.Markdown(
366
+ value=get_service_status(),
367
+ label="System Status",
368
+ elem_id="status-panel"
369
+ )
370
+
371
+ # Refresh status button
372
+ refresh_btn = gr.Button("πŸ”„ Refresh Status", size="sm", variant="secondary")
373
+
374
+ gr.Markdown(
375
+ """
376
+ ### 🌟 Key Features
377
+
378
+ - βœ… **27+ Languages** supported
379
+ - βœ… **Real-time weather** via Azure Maps
380
+ - βœ… **Community events** database
381
+ - βœ… **Local resource** finder
382
+ - βœ… **Government contact** lookup
383
+ - βœ… **Document processing** help
384
+ - βœ… **Multilingual** support
385
+
386
+ ---
387
+
388
+ ### πŸ“ Supported Cities
389
+
390
+ - Atlanta, GA
391
+ - Birmingham, AL
392
+ - Chesterfield, VA
393
+ - El Paso, TX
394
+ - Norfolk, VA
395
+ - Providence, RI
396
+ - Seattle, WA
397
+
398
+ ---
399
+
400
+ ### πŸ†˜ Need Help?
401
+
402
+ PENNY can assist with:
403
+ - Finding emergency services
404
+ - Locating government offices
405
+ - Understanding civic processes
406
+ - Accessing community programs
407
+
408
+ ---
409
+
410
+ πŸ’› *PENNY is here to help connect you with civic resources!*
411
+ """
412
+ )
413
+
414
+ # Event handlers
415
+ submit_btn.click(
416
+ fn=chat_with_penny_sync,
417
+ inputs=[msg_input, city_dropdown, chatbot],
418
+ outputs=[chatbot, msg_input]
419
+ )
420
+
421
+ msg_input.submit(
422
+ fn=chat_with_penny_sync,
423
+ inputs=[msg_input, city_dropdown, chatbot],
424
+ outputs=[chatbot, msg_input]
425
+ )
426
+
427
+ clear_btn.click(
428
+ fn=lambda: ([], ""),
429
+ inputs=None,
430
+ outputs=[chatbot, msg_input]
431
+ )
432
+
433
+ refresh_btn.click(
434
+ fn=get_service_status,
435
+ inputs=None,
436
+ outputs=status_display
437
+ )
438
+
439
+ # Footer
440
+ gr.Markdown(
441
+ """
442
+ ---
443
+ **Built with:** Python β€’ FastAPI β€’ Gradio β€’ Azure ML β€’ Hugging Face Transformers
444
+
445
+ **Version:** 2.2 | **Last Updated:** November 2025
446
+
447
+ _PENNY is an open-source civic engagement platform designed to improve access to government services._
448
+ """
449
+ )
450
+
451
+
452
+ # ============================================================
453
+ # INITIALIZATION AND LAUNCH
454
+ # ============================================================
455
+
456
+ def initialize_penny():
457
+ """Initialize PENNY services at startup."""
458
+ logger.info("=" * 70)
459
+ logger.info("πŸš€ Initializing PENNY V2.2 Gradio Interface")
460
+ logger.info("=" * 70)
461
+
462
+ # Display service availability at startup
463
+ logger.info("\nπŸ“Š Service Availability Check:")
464
+ services = get_service_availability()
465
+
466
+ all_available = True
467
+ for service, available in services.items():
468
+ status = "βœ… Available" if available else "❌ Not loaded"
469
+ logger.info(f" {service.ljust(20)}: {status}")
470
+ if not available:
471
+ all_available = False
472
+
473
+ if all_available:
474
+ logger.info("\nβœ… All services loaded successfully!")
475
+ else:
476
+ logger.warning("\n⚠️ Some services are not available. PENNY will run with limited functionality.")
477
+
478
+ logger.info("\n" + "=" * 70)
479
+ logger.info("πŸ€– PENNY is ready to help residents!")
480
+ logger.info("=" * 70 + "\n")
481
+
482
+
483
+ if __name__ == "__main__":
484
+ # Initialize services
485
+ initialize_penny()
486
+
487
+ # Launch the Gradio app
488
+ demo.launch(
489
+ server_name="0.0.0.0",
490
+ server_port=7860,
491
+ share=False,
492
+ show_error=True
493
+ )