| """ |
| MAGI System - Web Interface v2.0 |
| Neon Genesis Evangelion AI Simulation |
| |
| Gradio web interface for the MAGI multi-agent system |
| """ |
|
|
| import gradio as gr |
| import sys |
| import os |
| import io |
| import re |
| import threading |
| import queue |
| from contextlib import redirect_stdout, redirect_stderr |
| from pathlib import Path |
| from datetime import datetime |
| from typing import Tuple, Generator |
|
|
| |
| sys.path.insert(0, str(Path(__file__).parent)) |
|
|
| |
| from Main_core_002 import analyze_question |
|
|
| |
| EVANGELION_CSS = """ |
| /* NERV/MAGI Theme - Evangelion Style */ |
| .gradio-container { |
| font-family: 'Courier New', monospace !important; |
| background: linear-gradient(135deg, #0a0e1a 0%, #1a1f2e 100%) !important; |
| } |
| |
| .contain { |
| background: rgba(26, 31, 46, 0.95) !important; |
| border: 2px solid #d32f2f !important; |
| border-radius: 0px !important; |
| } |
| |
| h1, h2, h3, h4, h5, h6, .centered-markdown { |
| color: #ff6f00 !important; |
| font-family: 'Courier New', monospace !important; |
| text-transform: uppercase !important; |
| letter-spacing: 2px !important; |
| text-shadow: 0 0 10px rgba(211, 47, 47, 0.5) !important; |
| text-align: center !important; |
| } |
| |
| .output-markdown, .gr-textbox, .gradio-markdown, .gradio-label, .gradio-status { |
| text-align: center !important; |
| } |
| |
| .tab-nav button { |
| background: #1a1f2e !important; |
| color: #00bcd4 !important; |
| border: 1px solid #d32f2f !important; |
| font-weight: bold !important; |
| } |
| |
| .tab-nav button.selected { |
| background: #d32f2f !important; |
| color: white !important; |
| border: 2px solid #ff6f00 !important; |
| } |
| |
| textarea, input { |
| background: #0a0e1a !important; |
| color: #00ff41 !important; |
| border: 1px solid #00bcd4 !important; |
| font-family: 'Courier New', monospace !important; |
| } |
| |
| .output-markdown { |
| background: #0a0e1a !important; |
| color: #00ff41 !important; |
| border: 1px solid #d32f2f !important; |
| padding: 20px !important; |
| font-family: 'Courier New', monospace !important; |
| text-align: center !important; |
| } |
| |
| button { |
| background: linear-gradient(135deg, #d32f2f 0%, #ff6f00 100%) !important; |
| color: white !important; |
| border: none !important; |
| font-weight: bold !important; |
| text-transform: uppercase !important; |
| letter-spacing: 1px !important; |
| box-shadow: 0 0 20px rgba(211, 47, 47, 0.5) !important; |
| } |
| |
| button:hover { |
| box-shadow: 0 0 30px rgba(255, 111, 0, 0.8) !important; |
| } |
| |
| .progress-bar { |
| background: #d32f2f !important; |
| } |
| |
| footer { |
| color: #00bcd4 !important; |
| text-align: center !important; |
| } |
| |
| /* Override alignment for live logs for readability */ |
| #live-logs textarea { |
| text-align: left !important; |
| font-family: 'Courier New', monospace !important; |
| white-space: pre-wrap !important; |
| } |
| """ |
|
|
|
|
| def process_magi_query_stream( |
| question: str, |
| provider: str = "Groq", |
| ollama_model: str = "", |
| enable_search: bool = False, |
| temperature: float = 0.5, |
| clean_logs: bool = True, |
| ) -> Generator[Tuple[str, str, str], None, None]: |
| """ |
| Stream MAGI analysis with live logs. |
| |
| Yields successive updates for (result_text, status_message, live_logs). |
| """ |
| result_text = "" |
| status_text = "" |
| log_text = "" |
|
|
| if not question or not question.strip(): |
| yield ("β ERROR: Please enter a question.", "β οΈ No input provided", "") |
| return |
|
|
| |
| provider_lower = provider.lower() |
| if provider_lower == "ollama (local)": |
| provider_lower = "ollama" |
|
|
| timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
| header = f""" |
| ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ |
| β MAGI SYSTEM ANALYSIS β |
| β Multi-Agent General Intelligence β |
| ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ |
| |
| β° Timestamp: {timestamp} |
| β Question: {question} |
| π€ Provider: {provider} |
| π Search: {"Enabled" if enable_search else "Disabled"} |
| π‘οΈ Temperature: {temperature} |
| π¦ Ollama Model: {ollama_model if provider_lower == "ollama" else "-"} |
| |
| {'='*70} |
| EXECUTING THREE-PERSPECTIVE ANALYSIS... |
| {'='*70} |
| |
| """ |
| |
| log_text += header |
| yield (result_text, "π Analysis started...", log_text) |
|
|
| |
| q: queue.Queue[str | None] = queue.Queue() |
|
|
| class QueueWriter(io.TextIOBase): |
| def write(self, s: str) -> int: |
| if s: |
| q.put(s) |
| return len(s) |
|
|
| ansi_escape = re.compile(r"\x1b\[[0-?]*[ -/]*[@-~]") |
|
|
| def sanitize(chunk: str) -> str: |
| |
| chunk = ansi_escape.sub("", chunk) |
| chunk = chunk.replace("\r", "") |
| return chunk |
|
|
| |
| analysis_result_holder = {"result": None, "error": None} |
|
|
| def worker(): |
| try: |
| with redirect_stdout(QueueWriter()), redirect_stderr(QueueWriter()): |
| res = analyze_question( |
| question=question, |
| provider=provider_lower, |
| ollama_model=ollama_model, |
| enable_search=enable_search, |
| temperature=temperature |
| ) |
| analysis_result_holder["result"] = res |
| except Exception as e: |
| analysis_result_holder["error"] = e |
| finally: |
| q.put(None) |
|
|
| t = threading.Thread(target=worker, daemon=True) |
| t.start() |
|
|
| |
| while True: |
| try: |
| item = q.get(timeout=0.2) |
| except queue.Empty: |
| |
| yield (result_text, "β³ Running analysis...", log_text) |
| continue |
|
|
| if item is None: |
| break |
| chunk = item |
| if clean_logs: |
| chunk = sanitize(chunk) |
| log_text += chunk |
| |
| if len(log_text) > 200_000: |
| log_text = log_text[-200_000:] |
| yield (result_text, "β³ Running analysis...", log_text) |
|
|
| |
| if analysis_result_holder["error"] is not None: |
| e = analysis_result_holder["error"] |
| error_msg = f""" |
| ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ |
| β ERROR β |
| ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ |
| |
| β An error occurred during MAGI analysis: |
| |
| {str(e)} |
| |
| Please check: |
| - Your API keys are configured in config/.env |
| - You have a stable internet connection (if using cloud providers) |
| - The question is not empty |
| """ |
| result_text = error_msg |
| status_text = f"β Error: {str(e)}" |
| yield (result_text, status_text, log_text) |
| return |
|
|
| res = analysis_result_holder["result"] |
| result_text = header + "\n" + res["result"] + "\n\n" + "=" * 70 |
| status_text = f"β
Analysis completed successfully at {timestamp}" |
| yield (result_text, status_text, log_text) |
|
|
|
|
| def create_magi_interface(): |
| """Create the Gradio interface for MAGI system""" |
| |
| with gr.Blocks(css=EVANGELION_CSS, title="MAGI System", theme=gr.themes.Base()) as interface: |
| |
| gr.Markdown(""" |
| # πΊ MAGI SYSTEM πΊ |
| ## Multi-Agent General Intelligence |
| ### *Based on Neon Genesis Evangelion* |
| |
| --- |
| |
| The MAGI system consists of three AI agents, each representing a different aspect of Dr. Naoko Akagi's personality: |
| - **MELCHIOR-1**: Scientific analysis (logic and data) |
| - **BALTHASAR-2**: Ethical evaluation (emotions and morals) |
| - **CASPER-3**: Practical assessment (social and real-world) |
| |
| All three perspectives are synthesized to provide comprehensive analysis. |
| """, elem_classes="centered-markdown") |
| |
| with gr.Row(): |
| with gr.Column(scale=2): |
| |
| question_input = gr.Textbox( |
| label="π― Enter Your Question", |
| placeholder="What question would you like the MAGI system to analyze?", |
| lines=3 |
| ) |
| |
| |
| with gr.Accordion("βοΈ Advanced Settings", open=False): |
| provider_dropdown = gr.Dropdown( |
| choices=["Groq", "OpenAI", "Ollama (local)"], |
| value="Groq", |
| label="LLM Provider", |
| info="Groq is free and fast, OpenAI requires paid API key, Ollama runs locally" |
| ) |
| ollama_model_input = gr.Textbox( |
| label="Ollama Model Name (if using Ollama)", |
| placeholder="e.g. llama3, phi3, mistral, ...", |
| visible=False |
| ) |
| search_checkbox = gr.Checkbox( |
| label="Enable Internet Search", |
| value=False, |
| info="Requires SERPER_API_KEY in .env file" |
| ) |
| temperature_slider = gr.Slider( |
| minimum=0.0, |
| maximum=1.0, |
| value=0.5, |
| step=0.1, |
| label="Temperature", |
| info="Higher = more creative, Lower = more focused" |
| ) |
| clean_logs_checkbox = gr.Checkbox( |
| label="Clean colored logs (strip ANSI)", |
| value=True, |
| info="Recommended for readable logs" |
| ) |
| |
| |
| with gr.Row(): |
| analyze_btn = gr.Button("π EXECUTE MAGI ANALYSIS", variant="primary", size="lg") |
| clear_btn = gr.Button("ποΈ Clear", variant="secondary") |
|
|
| |
| gr.Examples( |
| examples=[ |
| ["Should we deploy EVA Unit-01 against the approaching Angel despite Shinji's unstable sync ratio?"], |
| ["Is it ethical to proceed with the Human Instrumentality Project to eliminate individual suffering?"], |
| ["Should NERV prioritize civilian evacuation or Angel neutralization during an active attack on Tokyo-3?"], |
| ["What is the acceptable risk threshold for activating a Dummy Plug system in combat operations?"], |
| ["Should we collaborate with SEELE's directives or maintain autonomous control over NERV operations?"] |
| ], |
| inputs=question_input, |
| label="π‘ Example Questions" |
| ) |
| |
| with gr.Column(scale=3): |
| |
| logs_output = gr.Textbox( |
| label="οΏ½οΈ Live Logs", |
| lines=18, |
| max_lines=40, |
| interactive=False, |
| show_copy_button=True, |
| value="", |
| elem_id="live-logs", |
| ) |
| result_output = gr.Textbox( |
| label="π MAGI Analysis Result", |
| lines=16, |
| max_lines=30, |
| show_copy_button=True, |
| elem_classes="centered-markdown" |
| ) |
| status_output = gr.Textbox( |
| label="βΉοΈ Status", |
| lines=1, |
| interactive=False, |
| elem_classes="centered-markdown" |
| ) |
| |
| |
| |
| gr.Markdown(""" |
| --- |
| |
| **MAGI System v2.0** | Powered by CrewAI & Groq |
| *"The truth lies in the synthesis of three perspectives"* |
| |
| π΄ NERV Systems Division | π MAGI Supercomputer Array |
| """) |
| |
| |
| def update_ollama_visibility(provider): |
| return gr.update(visible=(provider == "Ollama (local)")) |
| provider_dropdown.change( |
| fn=update_ollama_visibility, |
| inputs=provider_dropdown, |
| outputs=ollama_model_input |
| ) |
| analyze_btn.click( |
| fn=process_magi_query_stream, |
| inputs=[question_input, provider_dropdown, ollama_model_input, search_checkbox, temperature_slider, clean_logs_checkbox], |
| outputs=[result_output, status_output, logs_output] |
| ) |
| clear_btn.click( |
| fn=lambda: ("", "", "", ""), |
| inputs=None, |
| outputs=[question_input, result_output, status_output, logs_output] |
| ) |
| |
| return interface |
|
|
|
|
| def main(): |
| """Launch the MAGI web interface""" |
| print("="*70) |
| print("MAGI SYSTEM - WEB INTERFACE STARTING") |
| print("="*70) |
| print("\nπΊ Initializing NERV MAGI Supercomputer Array...") |
| print("πΈ Loading: MELCHIOR-1 (Scientific)") |
| print("πΈ Loading: BALTHASAR-2 (Ethical)") |
| print("πΈ Loading: CASPER-3 (Practical)") |
| print("\nβ
All systems operational") |
| print("π Launching web interface...\n") |
| |
| interface = create_magi_interface() |
| |
| |
| interface.launch( |
| server_name="0.0.0.0", |
| server_port=7862, |
| share=True, |
| inbrowser=True, |
| show_error=True |
| ) |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|