ThreatLevelD
Various fixes
1fdb9ef
import gradio as gr
from mec_api import run_mec_pipeline, get_demo_scenarios
# ─── LOAD DEMO SCENARIOS ─────────────────────────────────────────────────────────
scenario_map = get_demo_scenarios()
# ─── PROCESS SCENARIO FUNCTION ────────────────────────────────────────────────────
def process_scenario(selected_label):
fusion_prompt, final_uesp, empathic_response, emid = run_mec_pipeline(selected_label)
# Extract values from final_uesp
primary_emotion = final_uesp.get('primary_emotion', 'TBD')
emotion_arc = final_uesp.get('emotion_arc_trajectory', 'TBD')
resonance = final_uesp.get('resonance_pattern', 'TBD')
tone = final_uesp.get('tone', 'TBD')
blend_states = final_uesp.get('blend_weights', 'None detected')
intervention_strategy = final_uesp.get('intervention_strategy', 'RSM-DEFAULT')
# Construct the Contextual Emotional State section
emotion_summary = f"""
Contextual Emotional State:
- Primary Emotion: {primary_emotion}
- Emotional Arc: {emotion_arc}
- Resonance: {resonance}
- Emotion ID (EmID): {emid}
"""
# Construct the Empathic Response section
empathic_response_section = f"""
Empathic Response:
{empathic_response}
"""
return emotion_summary, empathic_response_section
# ─── BUILD GRADIO UI ───────────────────────────────────────────────────────────────
def create_ui():
with gr.Blocks(title="MEC MVP DEMO") as demo:
gr.Markdown("## Master Emotional Core (MEC™)")
scenario_selector = gr.Radio(
choices=list(scenario_map.keys()),
label="Choose a Demo Scenario"
)
scenario_display = gr.Textbox(
label="Scenario Text", lines=4, interactive=False
)
# On scenario selection, display the text
scenario_selector.change(
fn=lambda label: scenario_map.get(label, ""),
inputs=scenario_selector,
outputs=scenario_display
)
run_btn = gr.Button("Run MEC")
emotion_summary_box = gr.Textbox(
label="MEC Emotion Summary", lines=6, interactive=False
)
empathic_response_box = gr.Textbox(
label="Empathic Response", lines=6, interactive=False
)
# Process the selected scenario
run_btn.click(
fn=process_scenario,
inputs=scenario_selector,
outputs=[emotion_summary_box, empathic_response_box]
)
return demo
if __name__ == "__main__":
ui = create_ui()
ui.launch(share=True)