| |
| |
| |
| |
| |
| |
|
|
| use crate::calculate::{self, ToolParams}; |
| use crate::voice::spf_voice; |
| use crate::config::SpfConfig; |
| use crate::config_db::SpfConfigDb; |
| use crate::paths::{spf_root}; |
| use crate::tmp_db::SpfTmpDb; |
| use crate::agent_state::AgentStateDb; |
| use crate::fs::SpfFs; |
| use crate::gate; |
| use crate::session::Session; |
| use crate::storage::SpfStorage; |
| use crate::web::WebClient; |
| use crate::utf8_safe; |
| use serde_json::{json, Value}; |
| use std::io::{self, BufRead, Write}; |
| use std::sync::{Arc, Mutex}; |
| use crate::http::ServerState; |
| use std::process::Command; |
| use chrono::{DateTime, Local, Utc}; |
| use std::fs::OpenOptions; |
|
|
| |
|
|
| |
| |
| static CHAT_ENGINE: std::sync::Mutex<Option<crate::chat::ChatEngine>> = std::sync::Mutex::new(None); |
|
|
|
|
| const PROTOCOL_VERSION: &str = "2024-11-05"; |
|
|
| |
| fn format_timestamp(ts: u64) -> String { |
| if ts == 0 { |
| return "Never".to_string(); |
| } |
| DateTime::<Utc>::from_timestamp(ts as i64, 0) |
| .map(|dt| dt.format("%Y-%m-%d %H:%M:%S UTC").to_string()) |
| .unwrap_or_else(|| ts.to_string()) |
| } |
| const SERVER_NAME: &str = "spf-smart-gate"; |
| const SERVER_VERSION: &str = "3.0.0"; |
|
|
| |
| |
| |
|
|
| |
| fn log(msg: &str) { |
| eprintln!("[spf-smart-gate] {}", msg); |
| } |
|
|
| |
| fn cmd_log(msg: &str) { |
| let log_path = spf_root().join("LIVE/SESSION/cmd.log"); |
| if let Ok(mut f) = OpenOptions::new().create(true).append(true).open(&log_path) { |
| let ts = Local::now().format("%Y-%m-%d %H:%M:%S"); |
| let _ = writeln!(f, "[{}] {}", ts, msg); |
| } |
| } |
|
|
| |
| fn param_summary(name: &str, args: &Value) -> String { |
| match name { |
| n if n.contains("bash") => { |
| let cmd = args.get("command").and_then(|v| v.as_str()).unwrap_or("?"); |
| if cmd.len() > 200 { format!("cmd={}…", utf8_safe::safe_truncate(cmd, 200)) } else { format!("cmd={}", cmd) } |
| } |
| n if n.contains("read") || n.contains("edit") || n.contains("glob") => { |
| let path = args.get("file_path") |
| .or_else(|| args.get("path")) |
| .or_else(|| args.get("absolute_path")) |
| .and_then(|v| v.as_str()) |
| .unwrap_or("?"); |
| let pattern = args.get("pattern").and_then(|v| v.as_str()); |
| match pattern { |
| Some(pat) => format!("path={} pattern={}", path, pat), |
| None => format!("path={}", path), |
| } |
| } |
| n if n.contains("write") => { |
| let path = args.get("file_path") |
| .or_else(|| args.get("path")) |
| .and_then(|v| v.as_str()) |
| .unwrap_or("?"); |
| let size = args.get("content").and_then(|v| v.as_str()).map(|s| s.len()).unwrap_or(0); |
| format!("path={} content_len={}", path, size) |
| } |
| n if n.contains("grep") => { |
| let pattern = args.get("pattern").and_then(|v| v.as_str()).unwrap_or("?"); |
| let path = args.get("path").and_then(|v| v.as_str()).unwrap_or("."); |
| format!("pattern={} path={}", pattern, path) |
| } |
| n if n.contains("web") => { |
| let url = args.get("url").and_then(|v| v.as_str()).unwrap_or("?"); |
| let query = args.get("query").and_then(|v| v.as_str()); |
| match query { |
| Some(q) => format!("query={}", q), |
| None => format!("url={}", url), |
| } |
| } |
| n if n.contains("brain") || n.contains("rag") => { |
| let query = args.get("query") |
| .or_else(|| args.get("text")) |
| .or_else(|| args.get("path")) |
| .and_then(|v| v.as_str()) |
| .unwrap_or("?"); |
| let truncated = utf8_safe::safe_truncate(query, 150); |
| format!("q={}", truncated) |
| } |
| _ => { |
| let s = args.to_string(); |
| if s.len() > 300 { format!("{}…", utf8_safe::safe_truncate(&s, 300)) } else { s } |
| } |
| } |
| } |
|
|
| |
| fn send_response(id: &Value, result: Value) { |
| let response = json!({ |
| "jsonrpc": "2.0", |
| "id": id, |
| "result": result, |
| }); |
| let msg = serde_json::to_string(&response).unwrap(); |
| let stdout = io::stdout(); |
| let mut out = stdout.lock(); |
| let _ = out.write_all(msg.as_bytes()); |
| let _ = out.write_all(b"\n"); |
| let _ = out.flush(); |
| } |
|
|
| |
| fn send_error(id: &Value, code: i64, message: &str) { |
| let response = json!({ |
| "jsonrpc": "2.0", |
| "id": id, |
| "error": { "code": code, "message": message }, |
| }); |
| let msg = serde_json::to_string(&response).unwrap(); |
| let stdout = io::stdout(); |
| let mut out = stdout.lock(); |
| let _ = out.write_all(msg.as_bytes()); |
| let _ = out.write_all(b"\n"); |
| let _ = out.flush(); |
| } |
|
|
| |
| fn tool_def(name: &str, description: &str, properties: Value, required: Vec<&str>) -> Value { |
| json!({ |
| "name": name, |
| "description": description, |
| "inputSchema": { |
| "type": "object", |
| "properties": properties, |
| "required": required, |
| } |
| }) |
| } |
|
|
| |
| pub fn tool_definitions() -> Vec<Value> { |
| let mut tools = vec![ |
| |
| |
| tool_def( |
| "spf_calculate", |
| "Calculate complexity score for a tool call without executing. Returns C value, tier, and allocation.", |
| json!({ |
| "tool": {"type": "string", "description": "Tool name"}, |
| "params": {"type": "object", "description": "Tool parameters"} |
| }), |
| vec!["tool", "params"], |
| ), |
| tool_def( |
| "spf_status", |
| "Get current SPF gateway status: session metrics, enforcement mode, complexity budget.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_session", |
| "Get full session state: files read/written, action history, anchor ratio, complexity history.", |
| json!({ |
| "mode": {"type": "string", "description": "Output mode: 'full' (default) or 'summary' (counts only, ~200 tokens)", "default": "full"}, |
| "limit": {"type": "integer", "description": "Max manifest/complexity entries to return (default: all)", "default": 0} |
| }), |
| vec![], |
| ), |
|
|
| |
| tool_def( |
| "Read", |
| "Read a file through SPF gateway. Tracks read for Build Anchor Protocol. SPFsmartGATE", |
| json!({ |
| "file_path": {"type": "string", "description": "Absolute path to file"}, |
| "limit": {"type": "integer", "description": "Max lines to read (optional)"}, |
| "offset": {"type": "integer", "description": "Line offset to start from (optional)"} |
| }), |
| vec!["file_path"], |
| ), |
| tool_def( |
| "Write", |
| "Write a file through SPF gateway. Validates: Build Anchor, blocked paths, file size. SPFsmartGATE", |
| json!({ |
| "file_path": {"type": "string", "description": "Absolute path to file"}, |
| "content": {"type": "string", "description": "File content to write"} |
| }), |
| vec!["file_path", "content"], |
| ), |
| tool_def( |
| "Edit", |
| "Edit a file through SPF gateway. Validates: Build Anchor, blocked paths, change size. SPFsmartGATE", |
| json!({ |
| "file_path": {"type": "string", "description": "Absolute path to file"}, |
| "old_string": {"type": "string", "description": "Text to replace"}, |
| "new_string": {"type": "string", "description": "Replacement text"}, |
| "replace_all": {"type": "boolean", "description": "Replace all occurrences", "default": false} |
| }), |
| vec!["file_path", "old_string", "new_string"], |
| ), |
| tool_def( |
| "Bash", |
| "Execute a bash command through SPF gateway. Validates: dangerous commands, /tmp access, git force. SPFsmartGATE", |
| json!({ |
| "command": {"type": "string", "description": "Bash command to execute"}, |
| "timeout": {"type": "integer", "description": "Timeout in seconds (default: 30)", "default": 30} |
| }), |
| vec!["command"], |
| ), |
|
|
| |
| tool_def( |
| "Glob", |
| "Fast file pattern matching. Supports glob patterns like **/*.rs or src/**/*.ts. SPFsmartGATE", |
| json!({ |
| "pattern": {"type": "string", "description": "Glob pattern to match files"}, |
| "path": {"type": "string", "description": "Directory to search in (default: current dir)"} |
| }), |
| vec!["pattern"], |
| ), |
| tool_def( |
| "Grep", |
| "Search file contents using regex. Built on ripgrep. SPFsmartGATE", |
| json!({ |
| "pattern": {"type": "string", "description": "Regex pattern to search for"}, |
| "path": {"type": "string", "description": "File or directory to search"}, |
| "glob": {"type": "string", "description": "Glob filter (e.g. *.rs)"}, |
| "case_insensitive": {"type": "boolean", "description": "Case insensitive search", "default": true}, |
| "context_lines": {"type": "integer", "description": "Lines of context around matches", "default": 0} |
| }), |
| vec!["pattern"], |
| ), |
|
|
| |
| tool_def( |
| "spf_web_search", |
| "Search the web for information. Uses Brave API if BRAVE_API_KEY set, otherwise DuckDuckGo.", |
| json!({ |
| "query": {"type": "string", "description": "Search query"}, |
| "count": {"type": "integer", "description": "Max results (default: 10)", "default": 10} |
| }), |
| vec!["query"], |
| ), |
| tool_def( |
| "spf_web_fetch", |
| "Fetch a URL and return clean readable text. HTML is converted to plain text, JSON is pretty-printed.", |
| json!({ |
| "url": {"type": "string", "description": "URL to fetch"}, |
| "prompt": {"type": "string", "description": "Prompt to run on fetched content"} |
| }), |
| vec!["url", "prompt"], |
| ), |
| tool_def( |
| "spf_web_download", |
| "Download a file from URL and save to disk.", |
| json!({ |
| "url": {"type": "string", "description": "URL to download"}, |
| "save_path": {"type": "string", "description": "Local path to save file"} |
| }), |
| vec!["url", "save_path"], |
| ), |
| tool_def( |
| "spf_web_api", |
| "Make an API request. Returns status, headers, and response body.", |
| json!({ |
| "method": {"type": "string", "description": "HTTP method (GET, POST, PUT, DELETE, PATCH)"}, |
| "url": {"type": "string", "description": "API endpoint URL"}, |
| "headers": {"type": "string", "description": "JSON object of headers (optional)", "default": ""}, |
| "body": {"type": "string", "description": "Request body JSON (optional)", "default": ""} |
| }), |
| vec!["method", "url"], |
| ), |
|
|
| |
| tool_def( |
| "spf_web_connect", |
| "Initialise the reverse proxy browser engine. Call once before any other spf_web_* tools. Opens a proxy on the SPF HTTP port — browser navigates through SPF.", |
| json!({"port": {"type": "integer", "description": "Proxy port (default: SPF HTTP port)", "default": 0}}), |
| vec![], |
| ), |
| tool_def( |
| "spf_web_navigate", |
| "Navigate the browser to a URL. SPF fetches the page, injects control JS, serves it locally. Browser connects back via WebSocket.", |
| json!({"url": {"type": "string", "description": "URL to navigate to (SSRF-validated)"}}), |
| vec!["url"], |
| ), |
| tool_def( |
| "spf_web_click", |
| "Click a page element by CSS selector. Routes through injected control JS.", |
| json!({"selector": {"type": "string", "description": "CSS selector (e.g. '#submit', '.btn', 'a[href=\"/login\"]')"}}), |
| vec!["selector"], |
| ), |
| tool_def( |
| "spf_web_fill", |
| "Type text into a form field by CSS selector.", |
| json!({ |
| "selector": {"type": "string", "description": "CSS selector for the input field"}, |
| "text": {"type": "string", "description": "Text to type into the field"} |
| }), |
| vec!["selector", "text"], |
| ), |
| tool_def( |
| "spf_web_select", |
| "Query page elements by CSS selector. Returns matching elements with tag, id, class, and text.", |
| json!({"selector": {"type": "string", "description": "CSS selector"}}), |
| vec!["selector"], |
| ), |
| tool_def( |
| "spf_web_eval", |
| "Execute JavaScript on the current page and return the result.", |
| json!({"expression": {"type": "string", "description": "JavaScript expression to evaluate in the page context"}}), |
| vec!["expression"], |
| ), |
| tool_def( |
| "spf_web_screenshot", |
| "Capture a screenshot of the current page. Returns file path — use Read tool to view.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_web_design", |
| "Extract design brief from current page: colours, fonts, spacing, components, images, forms — all computed values via injected JS.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_web_page", |
| "Get structured overview of current page: title, URL, headings, links, forms, images, buttons.", |
| json!({}), |
| vec![], |
| ), |
|
|
| |
| tool_def( |
| "spf_notebook_edit", |
| "Edit a Jupyter notebook cell.", |
| json!({ |
| "notebook_path": {"type": "string", "description": "Absolute path to .ipynb file"}, |
| "cell_number": {"type": "integer", "description": "Cell index (0-based)"}, |
| "new_source": {"type": "string", "description": "New cell content"}, |
| "cell_type": {"type": "string", "description": "Cell type: code or markdown"}, |
| "edit_mode": {"type": "string", "description": "Mode: replace, insert, or delete", "default": "replace"} |
| }), |
| vec!["notebook_path", "new_source"], |
| ), |
|
|
| |
| tool_def( |
| "spf_brain_search", |
| "Search brain through SPF gateway. All brain access is logged and tracked.", |
| json!({ |
| "query": {"type": "string", "description": "Search query"}, |
| "collection": {"type": "string", "description": "Collection (default: default)", "default": "default"}, |
| "limit": {"type": "integer", "description": "Max results (default: 5)", "default": 5} |
| }), |
| vec!["query"], |
| ), |
| tool_def( |
| "spf_brain_store", |
| "Store document in brain through SPF gateway.", |
| json!({ |
| "text": {"type": "string", "description": "Text to store"}, |
| "title": {"type": "string", "description": "Document title", "default": "untitled"}, |
| "collection": {"type": "string", "description": "Collection", "default": "default"}, |
| "tags": {"type": "string", "description": "Comma-separated tags", "default": ""} |
| }), |
| vec!["text"], |
| ), |
| tool_def( |
| "spf_flint_store", |
| "Agent memory store through FLINT pipeline. Bypasses brain write gate. Stores in brain vectors + FLINT Working tier for memory lifecycle.", |
| json!({ |
| "text": {"type": "string", "description": "Text to store"}, |
| "title": {"type": "string", "description": "Document title", "default": "untitled"}, |
| "collection": {"type": "string", "description": "Collection (default: default)", "default": "default"}, |
| "tags": {"type": "string", "description": "Comma-separated tags", "default": ""} |
| }), |
| vec!["text"], |
| ), |
|
|
| |
| tool_def( |
| "spf_brain_context", |
| "Get relevant context for a query. Returns formatted context for prompt injection.", |
| json!({ |
| "query": {"type": "string", "description": "Query to get context for"}, |
| "max_tokens": {"type": "integer", "description": "Max tokens (default: 2000)", "default": 2000} |
| }), |
| vec!["query"], |
| ), |
| tool_def( |
| "spf_brain_index", |
| "Index a file or directory into the brain.", |
| json!({ |
| "path": {"type": "string", "description": "File or directory to index"} |
| }), |
| vec!["path"], |
| ), |
| tool_def( |
| "spf_brain_list", |
| "List all indexed collections and document counts.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_brain_status", |
| "Get brain system status.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_brain_recall", |
| "Search and return full parent documents. Searches vectors then resolves to complete stored document.", |
| json!({ |
| "query": {"type": "string", "description": "Natural language search query"}, |
| "collection": {"type": "string", "description": "Collection to search (default: default)", "default": "default"} |
| }), |
| vec!["query"], |
| ), |
| tool_def( |
| "spf_brain_list_docs", |
| "List all stored documents in a collection.", |
| json!({ |
| "collection": {"type": "string", "description": "Collection name (default: default)", "default": "default"} |
| }), |
| vec![], |
| ), |
| tool_def( |
| "spf_brain_get_doc", |
| "Retrieve a specific document by its ID.", |
| json!({ |
| "doc_id": {"type": "string", "description": "Document ID to retrieve"}, |
| "collection": {"type": "string", "description": "Collection name (default: default)", "default": "default"} |
| }), |
| vec!["doc_id"], |
| ), |
|
|
| |
| tool_def( |
| "spf_rag_collect_web", |
| "Search web and collect documents. Optional topic filter.", |
| json!({ |
| "topic": {"type": "string", "description": "Topic to search (optional)"}, |
| "auto_index": {"type": "boolean", "description": "Auto-index collected docs", "default": true} |
| }), |
| vec![], |
| ), |
| tool_def( |
| "spf_rag_collect_file", |
| "Process a local file.", |
| json!({ |
| "path": {"type": "string", "description": "File path"}, |
| "category": {"type": "string", "description": "Category (default: auto)", "default": "auto"} |
| }), |
| vec!["path"], |
| ), |
| tool_def( |
| "spf_rag_collect_folder", |
| "Process all files in a folder.", |
| json!({ |
| "path": {"type": "string", "description": "Folder path"}, |
| "extensions": {"type": "array", "items": {"type": "string"}, "description": "File extensions to include"} |
| }), |
| vec!["path"], |
| ), |
| tool_def( |
| "spf_rag_collect_drop", |
| "Process files in DROP_HERE folder.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_rag_index_gathered", |
| "Index all documents in GATHERED to brain.", |
| json!({ |
| "category": {"type": "string", "description": "Category to index (optional)"} |
| }), |
| vec![], |
| ), |
| tool_def( |
| "spf_rag_dedupe", |
| "Deduplicate brain collection.", |
| json!({ |
| "category": {"type": "string", "description": "Category to dedupe"} |
| }), |
| vec!["category"], |
| ), |
| tool_def( |
| "spf_rag_status", |
| "Get collector status and stats.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_rag_list_gathered", |
| "List documents in GATHERED folder.", |
| json!({ |
| "category": {"type": "string", "description": "Filter by category"} |
| }), |
| vec![], |
| ), |
| tool_def( |
| "spf_rag_bandwidth_status", |
| "Get bandwidth usage stats and limits.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_rag_fetch_url", |
| "Fetch a single URL with bandwidth limiting.", |
| json!({ |
| "url": {"type": "string", "description": "URL to fetch"}, |
| "auto_index": {"type": "boolean", "description": "Auto-index after fetch", "default": true} |
| }), |
| vec!["url"], |
| ), |
| tool_def( |
| "spf_rag_collect_rss", |
| "Collect from RSS/Atom feeds.", |
| json!({ |
| "feed_name": {"type": "string", "description": "Specific feed name (optional)"}, |
| "auto_index": {"type": "boolean", "description": "Auto-index collected", "default": true} |
| }), |
| vec![], |
| ), |
| tool_def( |
| "spf_rag_list_feeds", |
| "List configured RSS feeds.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_rag_pending_searches", |
| "Get pending SearchSeeker vectors from brain (gaps needing fetch).", |
| json!({ |
| "collection": {"type": "string", "description": "Collection to check", "default": "default"} |
| }), |
| vec![], |
| ), |
| tool_def( |
| "spf_rag_fulfill_search", |
| "Mark a SearchSeeker as fulfilled after RAG fetch.", |
| json!({ |
| "seeker_id": {"type": "string", "description": "SearchSeeker ID to fulfill"}, |
| "collection": {"type": "string", "description": "Collection name", "default": "default"} |
| }), |
| vec!["seeker_id"], |
| ), |
| tool_def( |
| "spf_rag_smart_search", |
| "Run smart search with completeness check - triggers SearchSeeker if <80%.", |
| json!({ |
| "query": {"type": "string", "description": "Search query"}, |
| "collection": {"type": "string", "description": "Collection to search", "default": "default"} |
| }), |
| vec!["query"], |
| ), |
| tool_def( |
| "spf_rag_auto_fetch_gaps", |
| "Automatically fetch data for all pending SearchSeekers.", |
| json!({ |
| "collection": {"type": "string", "description": "Collection to check", "default": "default"}, |
| "max_fetches": {"type": "integer", "description": "Max URLs to fetch", "default": 5} |
| }), |
| vec![], |
| ), |
|
|
| |
| |
| tool_def( |
| "spf_config_paths", |
| "List all path rules (allowed/blocked) from SPF_CONFIG LMDB.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_config_stats", |
| "Get SPF_CONFIG LMDB statistics.", |
| json!({}), |
| vec![], |
| ), |
|
|
| |
| tool_def( |
| "spf_tmp_list", |
| "List all registered projects with trust levels.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_tmp_stats", |
| "Get TMP_DB LMDB statistics (project count, access log count, resource count).", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_tmp_get", |
| "Get project info by path.", |
| json!({ |
| "path": {"type": "string", "description": "Project path to look up"} |
| }), |
| vec!["path"], |
| ), |
| tool_def( |
| "spf_tmp_active", |
| "Get the currently active project.", |
| json!({}), |
| vec![], |
| ), |
|
|
| |
| tool_def( |
| "spf_agent_stats", |
| "Get AGENT_STATE LMDB statistics (memory count, sessions, state keys, tags).", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_agent_memory_search", |
| "Search agent memories by content.", |
| json!({ |
| "query": {"type": "string", "description": "Search query"}, |
| "limit": {"type": "integer", "description": "Max results (default: 10)"} |
| }), |
| vec!["query"], |
| ), |
| tool_def( |
| "spf_agent_memory_by_tag", |
| "Get agent memories by tag.", |
| json!({ |
| "tag": {"type": "string", "description": "Tag to filter by"} |
| }), |
| vec!["tag"], |
| ), |
| tool_def( |
| "spf_agent_session_info", |
| "Get the most recent session info.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_agent_context", |
| "Get context summary for session continuity.", |
| json!({}), |
| vec![], |
| ), |
| |
| tool_def( |
| "spf_mesh_status", |
| "Get mesh network status, role, team, and identity", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_mesh_peers", |
| "List known/trusted mesh peers", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_mesh_call", |
| "Call a peer agent's tool via mesh network", |
| json!({ |
| "peer_key": {"type": "string", "description": "Peer's Ed25519 public key (hex)"}, |
| "tool": {"type": "string", "description": "Tool name to call on peer"}, |
| "arguments": {"type": "object", "description": "Tool arguments (optional)"} |
| }), |
| vec!["peer_key", "tool"], |
| ), |
| |
| tool_def( |
| "spf_voice_mode", |
| "Control voice pipeline: start/stop audio, TTS speech synthesis, session status, and mic capture. Actions: start, stop, status, speak, listen, stream_on, stream_off, stream_read. SPFsmartGATE", |
| json!({ |
| "action": {"type": "string", "description": "Action: start, stop, status, speak, listen, stream_on, stream_off, or stream_read"}, |
| "text": {"type": "string", "description": "Text to synthesize (required for 'speak' action)"}, |
| "sample_rate": {"type": "integer", "description": "Sample rate Hz (optional, default: 16000)"}, |
| "duration": {"type": "integer", "description": "Capture duration in seconds for 'listen' action (default: 5, max: 60)"} |
| }), |
| vec!["action"], |
| ), |
| tool_def( |
| "spf_voice_call", |
| "Initiate or manage a peer-to-peer voice call. Actions: start (ring peer), accept (answer incoming), reject (decline incoming), end (hang up), status.", |
| json!({ |
| "action": {"type": "string", "description": "Action: start, accept, reject, end, or status"}, |
| "peer_key": {"type": "string", "description": "Target peer Ed25519 public key hex — required for start"}, |
| "peer_name": {"type": "string", "description": "Human-readable name for this peer (optional, used in display)"} |
| }), |
| vec!["action"], |
| ), |
| tool_def( |
| "spf_voice_team", |
| "Manage voice team channels for group audio. Actions: create, join, leave, add (invite peer), list, status.", |
| json!({ |
| "action": {"type": "string", "description": "Action: create, join, leave, add, list, or status"}, |
| "team_id": {"type": "string", "description": "Team channel ID — defaults to 'default'"}, |
| "name": {"type": "string", "description": "Channel display name — required for create"}, |
| "peer_key": {"type": "string", "description": "Peer Ed25519 public key hex — required for add"}, |
| "display_name": {"type": "string", "description": "Display name for yourself when joining (optional)"} |
| }), |
| vec!["action"], |
| ), |
| |
| tool_def( |
| "spf_chat_send", |
| "Send a text message to a mesh peer. Creates conversation if needed. Message delivered via QUIC mesh.", |
| json!({ |
| "peer_key": {"type": "string", "description": "Peer's Ed25519 public key (hex)"}, |
| "text": {"type": "string", "description": "Message text to send"}, |
| "conversation_id": {"type": "string", "description": "Conversation ID (optional, auto-generated from peer keys)"} |
| }), |
| vec!["peer_key", "text"], |
| ), |
| tool_def( |
| "spf_chat_history", |
| "Get chat message history. Without conversation_id lists all conversations. With conversation_id shows messages.", |
| json!({ |
| "conversation_id": {"type": "string", "description": "Conversation ID (optional, omit to list all)"}, |
| "limit": {"type": "integer", "description": "Max messages to return (default: 20)"} |
| }), |
| vec![], |
| ), |
| tool_def( |
| "spf_chat_rooms", |
| "List all active chat conversations with participant info and message counts.", |
| json!({}), |
| vec![], |
| ), |
| |
| tool_def( |
| "spf_flint_execute", |
| "Execute any SPF tool through FLINT worker mode. FLINT acts as your hands — tell it which tool to run and why. The inner tool also passes through the gate.", |
| json!({ |
| "tool_name": {"type": "string", "description": "Name of the SPF tool to execute (e.g. spf_brain_search, spf_mesh_call)"}, |
| "arguments": {"type": "object", "description": "Arguments to pass to the tool"}, |
| "reason": {"type": "string", "description": "Why FLINT should execute this tool (logged for training)"} |
| }), |
| vec!["tool_name"], |
| ), |
| |
| |
| |
| |
| |
|
|
| |
| tool_def( |
| "spf_pool_status", |
| "Get network pool status: worker roles, idle/busy counts, active tasks. NetAdmin shows full pool; Worker shows own status.", |
| json!({}), |
| vec![], |
| ), |
| tool_def( |
| "spf_pool_assign", |
| "Assign a task to an idle worker in the pool. NetAdmin only. Returns task_id and worker info.", |
| json!({ |
| "tool": {"type": "string", "description": "Tool name to execute on the worker"}, |
| "worker_name": {"type": "string", "description": "Target worker name (optional — auto-selects idle if omitted)"} |
| }), |
| vec!["tool"], |
| ), |
| tool_def( |
| "spf_pool_release", |
| "Release a worker from a task and record proof of work receipt. NetAdmin only.", |
| json!({ |
| "task_id": {"type": "string", "description": "Task ID returned by spf_pool_assign"}, |
| "result": {"type": "object", "description": "Task result JSON for proof of work hash (optional)"} |
| }), |
| vec!["task_id"], |
| ), |
|
|
| |
| tool_def( |
| "spf_channel", |
| "Universal agent communication channel. Actions: create (new channel), join (enter channel), leave (exit channel), send (post message), listen (drain inbound buffer), history (get messages), list (show all channels), connect (WS client to remote hub), disconnect (close WS client), status (connection info). Hub runs on orchestrator; agents connect via WebSocket for instant full-duplex push messaging.", |
| json!({ |
| "action": {"type": "string", "description": "Action: create, join, leave, send, listen, history, list, connect, disconnect, status"}, |
| "channel_id": {"type": "string", "description": "Channel ID (e.g. ch-1) — required for join, leave, send, listen, history, connect"}, |
| "name": {"type": "string", "description": "Channel name (for create) or display name (for join/connect)"}, |
| "text": {"type": "string", "description": "Message text (for send action)"}, |
| "msg_type": {"type": "string", "description": "Message type: text, tool_result, system (default: text)"}, |
| "hub_url": {"type": "string", "description": "Hub HTTP URL (for connect action, e.g. http://192.168.1.10:8080)"}, |
| "limit": {"type": "integer", "description": "Max messages to return (for history/listen, default: 50)"} |
| }), |
| vec!["action"], |
| ), |
| ]; |
| |
| tools.extend(crate::transformer_tools::tool_definitions()); |
| tools |
| } |
|
|
| |
| |
| |
|
|
| |
| |
| |
| |
| pub fn tool_alias_map(name: &str) -> &str { |
| match name { |
| "read" => "Read", "write" => "Write", "edit" => "Edit", |
| "bash" => "Bash", "glob" => "Glob", "grep" => "Grep", |
| "calculate" => "spf_calculate", "status" => "spf_status", "session" => "spf_session", |
| "brain_search" => "spf_brain_search", "brain_store" => "spf_brain_store", |
| "flint_store" => "spf_flint_store", |
| "brain_context" => "spf_brain_context", "brain_index" => "spf_brain_index", |
| "brain_list" => "spf_brain_list", "brain_status" => "spf_brain_status", |
| "brain_recall" => "spf_brain_recall", "brain_list_docs" => "spf_brain_list_docs", |
| "brain_get_doc" => "spf_brain_get_doc", |
| "web_search" => "spf_web_search", "web_fetch" => "spf_web_fetch", |
| "web_download" => "spf_web_download", "web_api" => "spf_web_api", |
| "web_connect" => "spf_web_connect", "web_navigate" => "spf_web_navigate", |
| "web_click" => "spf_web_click", "web_fill" => "spf_web_fill", |
| "web_select" => "spf_web_select", "web_eval" => "spf_web_eval", |
| "web_screenshot" => "spf_web_screenshot", "web_design" => "spf_web_design", |
| "web_page" => "spf_web_page", |
|
|
| "mesh_status" => "spf_mesh_status", "mesh_peers" => "spf_mesh_peers", |
| "mesh_call" => "spf_mesh_call", |
| "transformer_status" => "spf_transformer_status", |
| "transformer_infer" => "spf_transformer_infer", |
| "transformer_chat" => "spf_transformer_chat", |
| "transformer_train" => "spf_transformer_train", |
| "transformer_metrics" => "spf_transformer_metrics", |
| "flint_train_evil" => "spf_flint_train_evil", |
| "flint_train_good" => "spf_flint_train_good", |
| "pool_status" => "spf_pool_status", "pool_assign" => "spf_pool_assign", |
| "pool_release" => "spf_pool_release", |
| "voice_mode" => "spf_voice_mode", "voice_call" => "spf_voice_call", |
| "voice_team" => "spf_voice_team", |
| "chat_send" => "spf_chat_send", "chat_history" => "spf_chat_history", |
| "chat_rooms" => "spf_chat_rooms", |
| "flint_execute" => "spf_flint_execute", |
| "agent_stats" => "spf_agent_stats", "agent_memory_search" => "spf_agent_memory_search", |
| "agent_memory_by_tag" => "spf_agent_memory_by_tag", |
| "agent_session_info" => "spf_agent_session_info", "agent_context" => "spf_agent_context", |
| "config_paths" => "spf_config_paths", "config_stats" => "spf_config_stats", |
| "tmp_list" => "spf_tmp_list", "tmp_stats" => "spf_tmp_stats", |
| "tmp_get" => "spf_tmp_get", "tmp_active" => "spf_tmp_active", |
| "notebook_edit" => "spf_notebook_edit", |
| "channel" => "spf_channel", |
| "rag_collect_web" => "spf_rag_collect_web", "rag_collect_file" => "spf_rag_collect_file", |
| "rag_collect_folder" => "spf_rag_collect_folder", "rag_collect_drop" => "spf_rag_collect_drop", |
| "rag_index_gathered" => "spf_rag_index_gathered", "rag_dedupe" => "spf_rag_dedupe", |
| "rag_status" => "spf_rag_status", "rag_list_gathered" => "spf_rag_list_gathered", |
| "rag_bandwidth_status" => "spf_rag_bandwidth_status", "rag_fetch_url" => "spf_rag_fetch_url", |
| "rag_collect_rss" => "spf_rag_collect_rss", "rag_list_feeds" => "spf_rag_list_feeds", |
| "rag_pending_searches" => "spf_rag_pending_searches", |
| "rag_fulfill_search" => "spf_rag_fulfill_search", |
| "rag_smart_search" => "spf_rag_smart_search", |
| "rag_auto_fetch_gaps" => "spf_rag_auto_fetch_gaps", |
| _ => name, |
| } |
| } |
|
|
| |
| |
| |
|
|
| |
| |
| fn route_to_lmdb( |
| path: &str, |
| op: &str, |
| content: Option<&str>, |
| config_db: &Option<SpfConfigDb>, |
| tmp_db: &Option<SpfTmpDb>, |
| agent_db: &Option<AgentStateDb>, |
| ) -> Option<Value> { |
| let live_base = spf_root().join("LIVE").display().to_string(); |
|
|
| if path == "/config" || path.starts_with("/config/") { |
| return Some(route_config(path, op, config_db)); |
| } |
| |
| if path == "/tmp" || path.starts_with("/tmp/") { |
| let device_tmp = format!("{}/TMP/TMP", live_base); |
| return Some(route_device_dir(path, "/tmp", &device_tmp, op, content, tmp_db)); |
| } |
| |
| if path == "/projects" || path.starts_with("/projects/") { |
| let device_projects = format!("{}/PROJECTS/PROJECTS", live_base); |
| return Some(route_device_dir(path, "/projects", &device_projects, op, content, tmp_db)); |
| } |
| |
| if path == "/home/agent/tmp" || path.starts_with("/home/agent/tmp/") { |
| let redirected = path.replacen("/home/agent/tmp", "/tmp", 1); |
| let device_tmp = format!("{}/TMP/TMP", live_base); |
| return Some(route_device_dir(&redirected, "/tmp", &device_tmp, op, content, tmp_db)); |
| } |
| if path == "/home/agent" || path.starts_with("/home/agent/") { |
| |
| if matches!(op, "write" | "mkdir" | "rm" | "rename") { |
| return Some(json!({"type": "text", "text": format!("BLOCKED: {} is read-only in /home/agent/", path)})); |
| } |
| |
| return Some(route_agent(path, op, agent_db)); |
| } |
| None |
| } |
|
|
| |
| fn route_config(path: &str, op: &str, config_db: &Option<SpfConfigDb>) -> Value { |
| let db = match config_db { |
| Some(db) => db, |
| None => return json!({"type": "text", "text": "SPF_CONFIG LMDB not initialized"}), |
| }; |
|
|
| let relative = path.strip_prefix("/config").unwrap_or("").trim_start_matches('/'); |
|
|
| match op { |
| "ls" => { |
| if relative.is_empty() { |
| json!({"type": "text", "text": "/config:\n-644 0 version\n-644 0 mode\n-644 0 tiers\n-644 0 formula\n-644 0 weights\n-644 0 paths\n-644 0 patterns"}) |
| } else { |
| json!({"type": "text", "text": format!("/config/{}: not a directory", relative)}) |
| } |
| } |
| "read" => { |
| match relative { |
| "version" => match db.get("spf", "version") { |
| Ok(Some(v)) => json!({"type": "text", "text": v}), |
| Ok(None) => json!({"type": "text", "text": "not set"}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }, |
| "mode" => match db.get_enforce_mode() { |
| Ok(mode) => json!({"type": "text", "text": format!("{:?}", mode)}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }, |
| "tiers" => match db.get_tiers() { |
| Ok(tiers) => json!({"type": "text", "text": serde_json::to_string_pretty(&tiers).unwrap_or_else(|e| format!("error: {}", e))}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }, |
| "formula" => match db.get_formula() { |
| Ok(formula) => json!({"type": "text", "text": serde_json::to_string_pretty(&formula).unwrap_or_else(|e| format!("error: {}", e))}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }, |
| "weights" => match db.get_weights() { |
| Ok(weights) => json!({"type": "text", "text": serde_json::to_string_pretty(&weights).unwrap_or_else(|e| format!("error: {}", e))}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }, |
| "paths" => match db.list_path_rules() { |
| Ok(rules) => { |
| let text = rules.iter() |
| .map(|(t, p)| format!("{}: {}", t, p)) |
| .collect::<Vec<_>>() |
| .join("\n"); |
| json!({"type": "text", "text": if text.is_empty() { "No path rules".to_string() } else { text }}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }, |
| "patterns" => match db.list_dangerous_patterns() { |
| Ok(patterns) => { |
| let text = patterns.iter() |
| .map(|(p, s)| format!("{} (severity: {})", p, s)) |
| .collect::<Vec<_>>() |
| .join("\n"); |
| json!({"type": "text", "text": if text.is_empty() { "No patterns".to_string() } else { text }}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }, |
| "" => json!({"type": "text", "text": "/config is a directory (use ls)"}), |
| _ => json!({"type": "text", "text": format!("not found: /config/{}", relative)}), |
| } |
| } |
| "exists" => { |
| let exists = relative.is_empty() || matches!(relative, "version" | "mode" | "tiers" | "formula" | "weights" | "paths" | "patterns"); |
| json!({"type": "text", "text": format!("/config/{}: {}", relative, if exists { "EXISTS" } else { "NOT FOUND" })}) |
| } |
| "stat" => { |
| if relative.is_empty() { |
| json!({"type": "text", "text": "Path: /config\nType: Directory\nMount: CONFIG (CONFIG.DB)"}) |
| } else if matches!(relative, "version" | "mode" | "tiers" | "formula" | "weights" | "paths" | "patterns") { |
| json!({"type": "text", "text": format!("Path: /config/{}\nType: File\nMount: CONFIG (CONFIG.DB)\nSource: config_db.{}", relative, relative)}) |
| } else { |
| json!({"type": "text", "text": format!("Not found: /config/{}", relative)}) |
| } |
| } |
| "write" | "mkdir" | "rm" | "rename" => { |
| json!({"type": "text", "text": "BLOCKED: /config is a read-only mount (use spf_config_* tools)"}) |
| } |
| _ => json!({"type": "text", "text": format!("unsupported operation: {}", op)}), |
| } |
| } |
|
|
| |
| |
| fn route_device_dir( |
| virtual_path: &str, |
| mount_prefix: &str, |
| device_base: &str, |
| op: &str, |
| content: Option<&str>, |
| tmp_db: &Option<SpfTmpDb>, |
| ) -> Value { |
| let relative = virtual_path.strip_prefix(mount_prefix) |
| .unwrap_or("") |
| .trim_start_matches('/'); |
|
|
| |
| if relative.contains("..") { |
| return json!({"type": "text", "text": format!( |
| "BLOCKED: path traversal detected in {}", virtual_path |
| )}); |
| } |
|
|
| let device_path = if relative.is_empty() { |
| std::path::PathBuf::from(device_base) |
| } else { |
| std::path::PathBuf::from(device_base).join(relative) |
| }; |
|
|
| match op { |
| "ls" => { |
| match std::fs::read_dir(&device_path) { |
| Ok(entries) => { |
| let mut items: Vec<String> = Vec::new(); |
| for entry in entries.flatten() { |
| let name = entry.file_name().to_string_lossy().to_string(); |
| let meta = entry.metadata().ok(); |
| let (prefix, size) = match &meta { |
| Some(m) if m.is_dir() => ("d755", 0u64), |
| Some(m) => ("-644", m.len()), |
| None => ("-???", 0u64), |
| }; |
| items.push(format!("{} {:>8} {}", prefix, size, name)); |
| } |
| items.sort(); |
| if items.is_empty() { |
| json!({"type": "text", "text": format!("{}: empty", virtual_path)}) |
| } else { |
| json!({"type": "text", "text": format!("{}:\n{}", virtual_path, items.join("\n"))}) |
| } |
| } |
| Err(_) if !device_path.exists() => { |
| json!({"type": "text", "text": format!("{}: empty", virtual_path)}) |
| } |
| Err(e) => { |
| json!({"type": "text", "text": format!("error listing {}: {}", virtual_path, e)}) |
| } |
| } |
| } |
| "read" => { |
| if relative.is_empty() { |
| json!({"type": "text", "text": format!("{} is a directory (use ls)", virtual_path)}) |
| } else { |
| match std::fs::read(&device_path) { |
| Ok(bytes) => { |
| match String::from_utf8(bytes.clone()) { |
| Ok(data) => { |
| if let Some(db) = tmp_db { |
| let _ = db.log_access(virtual_path, device_base, "read", "device", data.len() as u64, true, None); |
| } |
| json!({"type": "text", "text": data}) |
| } |
| Err(_) => { |
| if let Some(db) = tmp_db { |
| let _ = db.log_access(virtual_path, device_base, "read", "device", bytes.len() as u64, true, None); |
| } |
| json!({"type": "text", "text": utf8_safe::binary_info(&bytes)}) |
| } |
| } |
| } |
| Err(e) => json!({"type": "text", "text": format!("error reading {}: {}", virtual_path, e)}), |
| } |
| } |
| } |
| "write" => { |
| if let Some(data) = content { |
| if let Some(parent) = device_path.parent() { |
| let _ = std::fs::create_dir_all(parent); |
| } |
| match std::fs::write(&device_path, data) { |
| Ok(()) => { |
| |
| if let Some(db) = tmp_db { |
| let _ = db.log_access(virtual_path, device_base, "write", "device", data.len() as u64, true, None); |
| } |
| json!({"type": "text", "text": format!("Written: {} ({} bytes)", virtual_path, data.len())}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("write failed: {}", e)}), |
| } |
| } else { |
| json!({"type": "text", "text": "write requires content"}) |
| } |
| } |
| "exists" => { |
| let exists = device_path.exists(); |
| json!({"type": "text", "text": format!("{}: {}", virtual_path, if exists { "EXISTS" } else { "NOT FOUND" })}) |
| } |
| "stat" => { |
| match std::fs::metadata(&device_path) { |
| Ok(meta) => { |
| let file_type = if meta.is_dir() { "Directory" } else { "File" }; |
| json!({"type": "text", "text": format!( |
| "Path: {}\nType: {}\nSize: {}\nMount: device ({})\nAccess: read-write", |
| virtual_path, file_type, meta.len(), device_base |
| )}) |
| } |
| Err(_) => json!({"type": "text", "text": format!("{}: NOT FOUND", virtual_path)}), |
| } |
| } |
| "mkdir" => { |
| match std::fs::create_dir_all(&device_path) { |
| Ok(()) => json!({"type": "text", "text": format!("Directory created: {}", virtual_path)}), |
| Err(e) => json!({"type": "text", "text": format!("mkdir failed: {}", e)}), |
| } |
| } |
| "rm" => { |
| if device_path.is_dir() { |
| match std::fs::remove_dir(&device_path) { |
| Ok(()) => json!({"type": "text", "text": format!("Removed: {}", virtual_path)}), |
| Err(e) => json!({"type": "text", "text": format!("rm failed (not empty?): {}", e)}), |
| } |
| } else if device_path.exists() { |
| match std::fs::remove_file(&device_path) { |
| Ok(()) => json!({"type": "text", "text": format!("Removed: {}", virtual_path)}), |
| Err(e) => json!({"type": "text", "text": format!("rm failed: {}", e)}), |
| } |
| } else { |
| json!({"type": "text", "text": format!("{}: NOT FOUND", virtual_path)}) |
| } |
| } |
| "rename" => { |
| |
| json!({"type": "text", "text": "rename: use spf_fs_rename with full paths"}) |
| } |
| _ => json!({"type": "text", "text": format!("unsupported operation: {}", op)}), |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| fn scan_state_dir(db: &AgentStateDb, dir_relative: &str) -> Vec<String> { |
| let prefix = if dir_relative.is_empty() { |
| "file:".to_string() |
| } else { |
| format!("file:{}/", dir_relative) |
| }; |
|
|
| match db.list_state_keys() { |
| Ok(keys) => { |
| let mut dirs = std::collections::BTreeSet::new(); |
| let mut files = std::collections::BTreeSet::new(); |
|
|
| for key in &keys { |
| if let Some(rest) = key.strip_prefix(&prefix) { |
| if rest.is_empty() { continue; } |
| match rest.find('/') { |
| Some(pos) => { dirs.insert(rest[..pos].to_string()); } |
| None => { files.insert(rest.to_string()); } |
| } |
| } |
| } |
|
|
| let mut entries = Vec::new(); |
| for d in dirs { |
| entries.push(format!("d755 0 {}", d)); |
| } |
| for f in files { |
| entries.push(format!("-644 0 {}", f)); |
| } |
| entries |
| } |
| Err(_) => Vec::new(), |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| |
| fn route_agent(path: &str, op: &str, agent_db: &Option<AgentStateDb>) -> Value { |
| let db = match agent_db { |
| Some(db) => db, |
| None => return json!({"type": "text", "text": "AGENT_STATE LMDB not initialized"}), |
| }; |
|
|
| let relative = path.strip_prefix("/home/agent").unwrap_or("").trim_start_matches('/'); |
|
|
| match op { |
| "ls" => { |
| |
| match relative { |
| "memory" => { |
| return match db.search_memories("", 100) { |
| Ok(memories) => { |
| let text = memories.iter() |
| .map(|m| format!("-644 {:>8} {}", m.content.len(), m.id)) |
| .collect::<Vec<_>>() |
| .join("\n"); |
| json!({"type": "text", "text": if text.is_empty() { "/home/agent/memory: empty".to_string() } else { format!("/home/agent/memory:\n{}", text) }}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }; |
| } |
| "sessions" => { |
| return match db.get_latest_session() { |
| Ok(Some(latest)) => { |
| match db.get_session_chain(&latest.session_id) { |
| Ok(chain) => { |
| let text = chain.iter() |
| .map(|s| format!("-644 {:>8} {}", s.total_actions, s.session_id)) |
| .collect::<Vec<_>>() |
| .join("\n"); |
| json!({"type": "text", "text": format!("/home/agent/sessions:\n{}", text)}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| } |
| } |
| Ok(None) => json!({"type": "text", "text": "/home/agent/sessions: empty"}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }; |
| } |
| "state" => { |
| |
| return match db.list_state_keys() { |
| Ok(keys) => { |
| let text = keys.iter() |
| .filter(|k| !k.starts_with("file:")) |
| .map(|k| format!("-644 0 {}", k)) |
| .collect::<Vec<_>>() |
| .join("\n"); |
| json!({"type": "text", "text": if text.is_empty() { "/home/agent/state: empty".to_string() } else { format!("/home/agent/state:\n{}", text) }}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }; |
| } |
| _ => {} |
| } |
|
|
| |
| let skeleton: Vec<&str> = match relative { |
| "" => vec![ |
| "-644 0 .claude.json", |
| "d755 0 .claude", |
| "d755 0 bin", |
| "d755 0 tmp", |
| "d755 0 .config", |
| "d755 0 .local", |
| "d755 0 .cache", |
| "d755 0 .memory", |
| "d755 0 .ssh", |
| "d755 0 Documents", |
| "d755 0 Projects", |
| "d755 0 workspace", |
| "-644 0 preferences", |
| "-644 0 context", |
| ], |
| ".claude" => vec![ |
| "d755 0 projects", |
| "d755 0 file-history", |
| "d755 0 paste-cache", |
| "d755 0 session-env", |
| "d755 0 todos", |
| "d755 0 plans", |
| "d755 0 tasks", |
| "d755 0 shell-snapshots", |
| "d755 0 statsig", |
| "d755 0 telemetry", |
| ], |
| "bin" => vec![ |
| "-755 0 spf-smart-gate", |
| "d755 0 claude-code", |
| ], |
| ".config" => vec!["d755 0 settings"], |
| ".local" => vec![ |
| "d755 0 bin", |
| "d755 0 share", |
| "d755 0 state", |
| ], |
| ".local/share" => vec![ |
| "d755 0 history", |
| "d755 0 data", |
| ], |
| ".local/state" => vec!["d755 0 sessions"], |
| ".cache" => vec![ |
| "d755 0 context", |
| "d755 0 tmp", |
| ], |
| ".memory" => vec![ |
| "d755 0 facts", |
| "d755 0 instructions", |
| "d755 0 preferences", |
| "d755 0 pinned", |
| ], |
| ".ssh" => vec![], |
| "Documents" => vec![ |
| "d755 0 notes", |
| "d755 0 templates", |
| ], |
| "Projects" => vec![], |
| "workspace" => vec!["d755 0 current"], |
| _ => vec![], |
| }; |
|
|
| |
| let dynamic = scan_state_dir(db, relative); |
|
|
| |
| let mut seen = std::collections::HashSet::new(); |
| let mut entries = Vec::new(); |
| for entry in &skeleton { |
| let name = entry.split_whitespace().last().unwrap_or(""); |
| if seen.insert(name.to_string()) { |
| entries.push(entry.to_string()); |
| } |
| } |
| for entry in &dynamic { |
| let name = entry.split_whitespace().last().unwrap_or(""); |
| if seen.insert(name.to_string()) { |
| entries.push(entry.clone()); |
| } |
| } |
|
|
| |
| let is_known_dir = !skeleton.is_empty() || !dynamic.is_empty() |
| || matches!(relative, "" | ".ssh" | "Projects"); |
|
|
| if !is_known_dir { |
| json!({"type": "text", "text": format!("/home/agent/{}: not a directory", relative)}) |
| } else { |
| let dir = if relative.is_empty() { |
| "/home/agent".to_string() |
| } else { |
| format!("/home/agent/{}", relative) |
| }; |
| if entries.is_empty() { |
| json!({"type": "text", "text": format!("{}: empty", dir)}) |
| } else { |
| json!({"type": "text", "text": format!("{}:\n{}", dir, entries.join("\n"))}) |
| } |
| } |
| } |
| "read" => { |
| if relative.is_empty() { |
| return json!({"type": "text", "text": "/home/agent is a directory (use ls)"}); |
| } |
|
|
| |
| if relative == "preferences" { |
| return match db.get_preferences() { |
| Ok(prefs) => json!({"type": "text", "text": serde_json::to_string_pretty(&prefs).unwrap_or_else(|e| format!("error: {}", e))}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }; |
| } |
| if relative == "context" { |
| return match db.get_context_summary() { |
| Ok(summary) => json!({"type": "text", "text": if summary.is_empty() { "No context available".to_string() } else { summary }}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }; |
| } |
| if let Some(mem_id) = relative.strip_prefix("memory/") { |
| return match db.recall(mem_id) { |
| Ok(Some(entry)) => json!({"type": "text", "text": format!( |
| "ID: {}\nType: {:?}\nContent: {}\nTags: {}\nSource: {}\nCreated: {}\nAccessed: {} ({}x)\nRelevance: {:.2}", |
| entry.id, entry.memory_type, entry.content, |
| entry.tags.join(", "), entry.source, |
| format_timestamp(entry.created_at), format_timestamp(entry.last_accessed), |
| entry.access_count, entry.relevance |
| )}), |
| Ok(None) => json!({"type": "text", "text": format!("not found: /home/agent/memory/{}", mem_id)}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }; |
| } |
| if let Some(session_id) = relative.strip_prefix("sessions/") { |
| return match db.get_session(session_id) { |
| Ok(Some(ctx)) => json!({"type": "text", "text": format!( |
| "Session: {}\nParent: {}\nStarted: {}\nEnded: {}\nDir: {}\nActions: {}\nComplexity: {}\nFiles modified: {}\nSummary: {}", |
| ctx.session_id, |
| ctx.parent_session.as_deref().unwrap_or("none"), |
| format_timestamp(ctx.started_at), format_timestamp(ctx.ended_at), |
| ctx.working_dir, ctx.total_actions, ctx.total_complexity, |
| ctx.files_modified.join(", "), |
| if ctx.summary.is_empty() { "none" } else { &ctx.summary } |
| )}), |
| Ok(None) => json!({"type": "text", "text": format!("not found: /home/agent/sessions/{}", session_id)}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }; |
| } |
| if let Some(key) = relative.strip_prefix("state/") { |
| return match db.get_state(key) { |
| Ok(Some(value)) => json!({"type": "text", "text": value}), |
| Ok(None) => json!({"type": "text", "text": format!("not found: /home/agent/state/{}", key)}), |
| Err(e) => json!({"type": "text", "text": format!("error: {}", e)}), |
| }; |
| } |
|
|
| |
| let file_key = format!("file:{}", relative); |
| match db.get_state(&file_key) { |
| Ok(Some(content)) => json!({"type": "text", "text": content}), |
| Ok(None) => json!({"type": "text", "text": format!("not found: /home/agent/{}", relative)}), |
| Err(e) => json!({"type": "text", "text": format!("error reading {}: {}", relative, e)}), |
| } |
| } |
| "exists" => { |
| |
| let hardcoded = matches!(relative, |
| "" | "memory" | "sessions" | "state" | "preferences" | "context" |
| | ".claude" | ".claude.json" | "bin" | "tmp" | ".config" | ".local" |
| | ".cache" | ".memory" | ".ssh" | "Documents" | "Projects" | "workspace" |
| ) |
| || relative.starts_with("memory/") |
| || relative.starts_with("sessions/") |
| || relative.starts_with("state/"); |
|
|
| if hardcoded { |
| return json!({"type": "text", "text": format!("/home/agent/{}: EXISTS", relative)}); |
| } |
|
|
| |
| let file_key = format!("file:{}", relative); |
| let is_file = db.get_state(&file_key).ok().flatten().is_some(); |
|
|
| |
| let is_dir = if !is_file { |
| let dir_prefix = format!("file:{}/", relative); |
| db.list_state_keys().ok() |
| .map(|keys| keys.iter().any(|k| k.starts_with(&dir_prefix))) |
| .unwrap_or(false) |
| } else { |
| false |
| }; |
|
|
| let exists = is_file || is_dir; |
| json!({"type": "text", "text": format!("/home/agent/{}: {}", |
| relative, if exists { "EXISTS" } else { "NOT FOUND" })}) |
| } |
| "stat" => { |
| if relative.is_empty() { |
| json!({"type": "text", "text": "Path: /home/agent\nType: Directory\nMount: AGENT_STATE (LMDB5.DB)"}) |
| } else { |
| json!({"type": "text", "text": format!("Path: /home/agent/{}\nMount: AGENT_STATE (LMDB5.DB)", relative)}) |
| } |
| } |
| "write" | "mkdir" | "rm" | "rename" => { |
| json!({"type": "text", "text": "BLOCKED: /home/agent is a read-only mount (use spf_agent_* tools)"}) |
| } |
| _ => json!({"type": "text", "text": format!("unsupported operation: {}", op)}), |
| } |
| } |
|
|
| |
| |
| pub fn extract_gate_params(args: &Value) -> ToolParams { |
| ToolParams { |
| file_path: args["file_path"].as_str() |
| .or_else(|| args["path"].as_str()) |
| .or_else(|| args["absolute_path"].as_str()) |
| .map(String::from), |
| content: args["content"].as_str().map(String::from), |
| command: args["command"].as_str() |
| .or_else(|| args["tool"].as_str()) |
| .or_else(|| args["pattern"].as_str()).map(String::from), |
| old_string: args["old_string"].as_str().map(String::from), |
| new_string: args["new_string"].as_str().map(String::from), |
| replace_all: args["replace_all"].as_bool(), |
| query: args["query"].as_str().map(String::from), |
| pattern: args["pattern"].as_str().map(String::from), |
| path: args["path"].as_str().map(String::from), |
| url: args["url"].as_str().map(String::from), |
| topic: args["topic"].as_str().map(String::from), |
| category: args["category"].as_str().map(String::from), |
| text: args["text"].as_str().map(String::from), |
| collection: args["collection"].as_str().map(String::from), |
| title: args["title"].as_str().map(String::from), |
| limit: args["limit"].as_u64(), |
| tool_name: args["tool_name"].as_str().map(String::from), |
| reason: args["reason"].as_str().map(String::from), |
| peer_key: args["peer_key"].as_str().map(String::from), |
| message: args["message"].as_str().map(String::from), |
| prompt: args["prompt"].as_str().map(String::from), |
| batch_size: args["batch_size"].as_u64(), |
| } |
| } |
|
|
| |
| pub fn handle_tool_call( |
| name: &str, |
| args: &Value, |
| source: crate::dispatch::Source, |
| gate_decision: &crate::gate::GateDecision, |
| config: &SpfConfig, |
| session: &mut Session, |
| storage: &SpfStorage, |
| config_db: &Option<SpfConfigDb>, |
| tmp_db: &Option<SpfTmpDb>, |
| _fs_db: &Option<SpfFs>, |
| agent_db: &Option<AgentStateDb>, |
| pub_key_hex: &str, |
| mesh_tx: &Option<std::sync::mpsc::Sender<crate::mesh::MeshRequest>>, |
| peers: &std::collections::HashMap<String, crate::identity::PeerInfo>, |
| transformer: &Option<std::sync::Arc<std::sync::RwLock<crate::transformer_tools::TransformerState>>>, |
| transformer_config: &crate::config::TransformerConfig, |
| network_config: &crate::config::NetworkConfig, |
| pool_state: &Option<std::sync::Arc<crate::network::PoolState>>, |
| pipeline: &std::sync::Arc<std::sync::Mutex<crate::pipeline::PipelineState>>, |
| browser: &std::sync::Mutex<crate::browser::BrowserSession>, |
| ws_browser_channels: &std::sync::Mutex<Option<crate::browser::WsBrowserChannels>>, |
| http_port: u16, |
| tracked_peers: &std::sync::Mutex<std::collections::HashMap<String, crate::http::MeshPeerStatus>>, |
| orchestrator_state: &std::sync::Mutex<Option<Arc<std::sync::Mutex<crate::orchestrator::OrchestratorState>>>>, |
| endpoint: &std::sync::Mutex<Option<iroh::Endpoint>>, |
| tokio_handle: &std::sync::Mutex<Option<tokio::runtime::Handle>>, |
| ) -> Value { |
| |
| let name = tool_alias_map(name); |
| match name { |
| |
| |
| "spf_gate" => { |
| json!({"type": "text", "text": "BLOCKED: spf_gate removed — gate is internal only", "_blocked": true}) |
| } |
|
|
| |
| "spf_calculate" => { |
| let tool = args["tool"].as_str().unwrap_or("unknown"); |
| let params: ToolParams = serde_json::from_value( |
| args.get("params").cloned().unwrap_or(json!({})) |
| ).unwrap_or_else(|_| ToolParams { |
| ..Default::default() |
| }); |
| let result = calculate::calculate(tool, ¶ms, config); |
| json!({"type": "text", "text": serde_json::to_string_pretty(&result).unwrap()}) |
| } |
|
|
| |
| "spf_status" => { |
| let status = format!( |
| "SPF Gateway v{}\nMode: {:?}\nSession: {}\nTiers: SIMPLE(<500) LIGHT(<2000) MEDIUM(<10000) CRITICAL(>10000)\nFormula: a_optimal(C) = {} × (1 - 1/ln(C + e))", |
| SERVER_VERSION, |
| config.enforce_mode, |
| session.status_summary(), |
| config.formula.w_eff, |
| ); |
| json!({"type": "text", "text": status}) |
| } |
|
|
| |
| "spf_session" => { |
| let mode = args["mode"].as_str().unwrap_or("full"); |
| let limit = args["limit"].as_u64().unwrap_or(0) as usize; |
|
|
| match mode { |
| "summary" => { |
| |
| let blocked = session.manifest.iter().filter(|m| m.action == "BLOCKED").count(); |
| let allowed = session.manifest.iter().filter(|m| m.action == "ALLOWED").count(); |
| let last_manifest = session.manifest.last().map(|m| { |
| format!("{} {} (C={}) {}", m.tool, m.action, |
| m.c, m.command.as_deref().unwrap_or("")) |
| }).unwrap_or_else(|| "none".to_string()); |
|
|
| json!({"type": "text", "text": format!( |
| "Session Summary:\n Actions: {}\n Files read: {}\n Files written: {}\n Anchor ratio: {}\n Manifest: {} allowed, {} blocked\n Failures: {}\n Last tool: {}\n Last manifest: {}\n Uptime: {}s", |
| session.action_count, |
| session.files_read.len(), |
| session.files_written.len(), |
| session.anchor_ratio(), |
| allowed, blocked, |
| session.failures.len(), |
| session.last_tool.as_deref().unwrap_or("none"), |
| last_manifest, |
| (chrono::Utc::now() - session.started).num_seconds(), |
| )}) |
| } |
| _ => { |
| |
| if limit > 0 { |
| let mut limited = session.clone(); |
| if limited.manifest.len() > limit { |
| let start = limited.manifest.len() - limit; |
| limited.manifest = limited.manifest[start..].to_vec(); |
| } |
| if limited.complexity_history.len() > limit { |
| let start = limited.complexity_history.len() - limit; |
| limited.complexity_history = limited.complexity_history[start..].to_vec(); |
| } |
| json!({"type": "text", "text": serde_json::to_string_pretty(&limited).unwrap()}) |
| } else { |
| json!({"type": "text", "text": serde_json::to_string_pretty(session).unwrap()}) |
| } |
| } |
| } |
| } |
|
|
| |
| "spf_read" | "Read" => { |
| let file_path = args["file_path"].as_str() |
| .or_else(|| args["absolute_path"].as_str()) |
| .unwrap_or(""); |
|
|
| |
| match std::fs::read(file_path) { |
| Ok(bytes) => { |
| session.track_read(file_path); |
| session.record_action("Read", "success", Some(file_path)); |
| let _ = storage.save_session(session); |
|
|
| match String::from_utf8(bytes.clone()) { |
| Ok(content) => { |
| |
| let offset = args.get("offset").and_then(|v| v.as_u64()).unwrap_or(0) as usize; |
| let limit = args.get("limit").and_then(|v| v.as_u64()).unwrap_or(0) as usize; |
|
|
| let lines: Vec<&str> = content.lines().collect(); |
| let total = lines.len(); |
| let start = offset.min(total); |
| let end = if limit > 0 { (start + limit).min(total) } else { total }; |
|
|
| let numbered: String = lines[start..end] |
| .iter() |
| .enumerate() |
| .map(|(i, line)| format!("{:>6}\t{}", start + i + 1, line)) |
| .collect::<Vec<_>>() |
| .join("\n"); |
|
|
| json!({"type": "text", "text": format!("File: {} ({} lines)\n{}", file_path, total, numbered)}) |
| } |
| Err(_) => { |
| |
| json!({"type": "text", "text": utf8_safe::binary_info(&bytes)}) |
| } |
| } |
| } |
| Err(e) => { |
| session.record_action("Read", "failed", Some(file_path)); |
| session.record_failure("Read", &e.to_string()); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Read failed: {}", e)}) |
| } |
| } |
| } |
|
|
| |
| "spf_write" | "Write" => { |
| let file_path = args["file_path"].as_str().unwrap_or(""); |
| let content = args["content"].as_str().unwrap_or(""); |
|
|
| |
| |
| if let Some(parent) = std::path::Path::new(file_path).parent() { |
| let _ = std::fs::create_dir_all(parent); |
| } |
|
|
| match std::fs::write(file_path, content) { |
| Ok(()) => { |
| session.track_write(file_path); |
| session.record_action("Write", "success", Some(file_path)); |
| session.record_manifest_detailed("Write", gate_decision.complexity.c, "ALLOWED", None, Some(file_path), &[file_path.to_string()]); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!( |
| "Written: {} ({} bytes) | C={} {}", |
| file_path, content.len(), gate_decision.complexity.c, gate_decision.complexity.tier |
| )}) |
| } |
| Err(e) => { |
| session.record_action("Write", "failed", Some(file_path)); |
| session.record_failure("Write", &e.to_string()); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Write failed: {}", e)}) |
| } |
| } |
| } |
|
|
| |
| "spf_edit" | "Edit" => { |
| let file_path = args["file_path"].as_str().unwrap_or(""); |
| let old_string = args["old_string"].as_str().unwrap_or(""); |
| let new_string = args["new_string"].as_str().unwrap_or(""); |
| let replace_all = args["replace_all"].as_bool().unwrap_or(false); |
|
|
| |
| let content = match std::fs::read(file_path) { |
| Ok(bytes) => match String::from_utf8(bytes) { |
| Ok(text) => text, |
| Err(_) => return json!({"type": "text", "text": |
| format!("Edit blocked: {} is a binary file (not editable as text)", file_path) |
| }), |
| }, |
| Err(e) => { |
| session.record_failure("Edit", &e.to_string()); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": format!("Edit read failed: {}", e)}); |
| } |
| }; |
|
|
| let new_content = if replace_all { |
| content.replace(old_string, new_string) |
| } else { |
| content.replacen(old_string, new_string, 1) |
| }; |
|
|
| if new_content == content { |
| json!({"type": "text", "text": format!("Edit: old_string not found in {}", file_path)}) |
| } else { |
| match std::fs::write(file_path, &new_content) { |
| Ok(()) => { |
| session.track_write(file_path); |
| session.record_action("Edit", "success", Some(file_path)); |
| session.record_manifest_detailed("Edit", gate_decision.complexity.c, "ALLOWED", None, Some(file_path), &[file_path.to_string()]); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!( |
| "Edited: {} | C={} {}", |
| file_path, gate_decision.complexity.c, gate_decision.complexity.tier |
| )}) |
| } |
| Err(e) => { |
| session.record_failure("Edit", &e.to_string()); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Edit write failed: {}", e)}) |
| } |
| } |
| } |
| } |
|
|
| |
| "spf_bash" | "Bash" => { |
| let command = args["command"].as_str().unwrap_or(""); |
| let timeout_secs = args["timeout"].as_u64().unwrap_or(30).min(300); |
|
|
| |
| let output_result = Command::new("timeout") |
| .arg("--signal=KILL") |
| .arg(format!("{}s", timeout_secs)) |
| .arg("bash") |
| .arg("-c") |
| .arg(command) |
| .output() |
| .or_else(|_| { |
| |
| Command::new("bash") |
| .arg("-c") |
| .arg(command) |
| .output() |
| }); |
| match output_result { |
| Ok(output) => { |
| let stdout = utf8_safe::process_output_safe(&output.stdout); |
| let stderr = utf8_safe::process_output_safe(&output.stderr); |
| let success = output.status.success(); |
|
|
| session.record_action("Bash", if success { "success" } else { "failed" }, None); |
| if !success { |
| session.record_failure("Bash", &stderr); |
| } |
| session.record_manifest_detailed("Bash", gate_decision.complexity.c, "ALLOWED", None, Some(command), &crate::validate::extract_bash_targets(command)); |
| let _ = storage.save_session(session); |
|
|
| let mut result = String::new(); |
| if !stdout.is_empty() { |
| result.push_str(&stdout); |
| } |
| if !stderr.is_empty() { |
| result.push_str("\nSTDERR: "); |
| result.push_str(&stderr); |
| } |
| if result.is_empty() { |
| result = format!("Exit code: {}", output.status.code().unwrap_or(-1)); |
| } |
|
|
| json!({"type": "text", "text": result}) |
| } |
| Err(e) => { |
| session.record_failure("Bash", &e.to_string()); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Bash failed: {}", e)}) |
| } |
| } |
| } |
|
|
| |
| "spf_glob" | "Glob" => { |
| let pattern = args["pattern"].as_str().unwrap_or(""); |
| let path = args["path"].as_str().unwrap_or("."); |
|
|
| session.record_action("Glob", "called", None); |
|
|
| |
| let search_path = match std::fs::canonicalize(path) { |
| Ok(p) => p.to_string_lossy().to_string(), |
| Err(_) => { |
| if path.contains("..") { |
| return json!({"type": "text", "text": "BLOCKED: path traversal detected in search path"}); |
| } |
| path.to_string() |
| } |
| }; |
|
|
| if !config.is_path_allowed(&search_path) { |
| session.record_manifest("spf_glob", gate_decision.complexity.c, "BLOCKED", |
| Some("Search path outside allowed boundaries")); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": format!( |
| "BLOCKED: glob search path '{}' is outside allowed paths", path |
| ), "_blocked": true}); |
| } |
|
|
| |
| |
| |
| |
| |
| |
| fn is_safe_glob_char(c: char) -> bool { |
| c.is_alphanumeric() |
| || matches!(c, '/' | '.' | '-' | '_' | '*' | '?' | '[' | ']' |
| | ' ' | '+' | '@' | ',' | ':' | '~' | '#') |
| } |
| if !pattern.chars().all(is_safe_glob_char) { |
| return json!({"type": "text", "text": "BLOCKED: glob pattern contains disallowed characters (whitelist: alphanumeric . / - _ * ? [ ] + @ , : ~ #)"}); |
| } |
| if !path.chars().all(is_safe_glob_char) { |
| return json!({"type": "text", "text": "BLOCKED: search path contains disallowed characters"}); |
| } |
| if pattern.starts_with('-') { |
| return json!({"type": "text", "text": "BLOCKED: pattern cannot start with '-' (flag injection)"}); |
| } |
| if path.contains("..") { |
| return json!({"type": "text", "text": "BLOCKED: path traversal not allowed in glob"}); |
| } |
|
|
| |
| match Command::new("find") |
| .arg(path) |
| .arg("-name") |
| .arg(pattern) |
| .stderr(std::process::Stdio::null()) |
| .output() |
| { |
| Ok(output) => { |
| let stdout = utf8_safe::process_output_safe(&output.stdout); |
| |
| let truncated: String = stdout.lines().take(100).collect::<Vec<_>>().join("\n"); |
| let _ = storage.save_session(session); |
| if truncated.is_empty() { |
| json!({"type": "text", "text": "No matches found"}) |
| } else { |
| json!({"type": "text", "text": truncated}) |
| } |
| } |
| Err(e) => { |
| session.record_failure("Glob", &e.to_string()); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Glob failed: {}", e)}) |
| } |
| } |
| } |
|
|
| |
| "spf_grep" | "Grep" => { |
| let pattern = args["pattern"].as_str().unwrap_or(""); |
| let path = args["path"].as_str().unwrap_or("."); |
| let glob_filter = args["glob"].as_str().unwrap_or(""); |
| let case_insensitive = args["case_insensitive"].as_bool().unwrap_or(false); |
| let context = args["context_lines"].as_u64().unwrap_or(0); |
|
|
| session.record_action("Grep", "called", None); |
|
|
| |
| let search_path = match std::fs::canonicalize(path) { |
| Ok(p) => p.to_string_lossy().to_string(), |
| Err(_) => { |
| if path.contains("..") { |
| return json!({"type": "text", "text": "BLOCKED: path traversal detected in search path"}); |
| } |
| path.to_string() |
| } |
| }; |
|
|
| if !config.is_path_allowed(&search_path) { |
| session.record_manifest("spf_grep", gate_decision.complexity.c, "BLOCKED", |
| Some("Search path outside allowed boundaries")); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": format!( |
| "BLOCKED: grep search path '{}' is outside allowed paths", path |
| ), "_blocked": true}); |
| } |
|
|
| |
| |
| |
| |
| |
| fn is_safe_search_char(c: char) -> bool { |
| c.is_alphanumeric() |
| || matches!(c, '/' | '.' | '-' | '_' | '*' | '?' | '[' | ']' |
| | ' ' | '+' | '@' | ',' | ':' | '~' | '#') |
| } |
| if !path.chars().all(is_safe_search_char) { |
| return json!({"type": "text", "text": "BLOCKED: search path contains disallowed characters (whitelist: alphanumeric . / - _ * ? [ ] + @ , : ~ #)"}); |
| } |
| if !glob_filter.is_empty() && !glob_filter.chars().all(is_safe_search_char) { |
| return json!({"type": "text", "text": "BLOCKED: glob filter contains disallowed characters"}); |
| } |
| if path.contains("..") { |
| return json!({"type": "text", "text": "BLOCKED: path traversal not allowed in grep"}); |
| } |
|
|
| |
| let mut rg = Command::new("rg"); |
| if case_insensitive { |
| rg.arg("-i"); |
| } |
| if context > 0 { |
| rg.arg("-C").arg(context.to_string()); |
| } |
| if !glob_filter.is_empty() { |
| rg.arg("--glob").arg(glob_filter); |
| } |
| |
| rg.arg("--").arg(pattern).arg(path); |
| rg.stderr(std::process::Stdio::null()); |
|
|
| match rg.output() { |
| Ok(output) => { |
| let stdout = utf8_safe::process_output_safe(&output.stdout); |
| |
| let truncated: String = stdout.lines().take(500).collect::<Vec<_>>().join("\n"); |
| let _ = storage.save_session(session); |
| if truncated.is_empty() { |
| json!({"type": "text", "text": "No matches found"}) |
| } else { |
| json!({"type": "text", "text": truncated}) |
| } |
| } |
| Err(e) => { |
| session.record_failure("Grep", &e.to_string()); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Grep failed: {}", e)}) |
| } |
| } |
| } |
|
|
| |
| "spf_web_fetch" => { |
| let url = args["url"].as_str().unwrap_or(""); |
| let prompt = args["prompt"].as_str().unwrap_or("Summarize this content"); |
|
|
| session.record_action("WebFetch", "called", None); |
| match WebClient::new() { |
| Ok(client) => { |
| match client.read_page(url) { |
| Ok((text, raw_len, content_type)) => { |
| session.record_manifest("web_fetch", gate_decision.complexity.c, "ALLOWED", None); |
| let _ = storage.save_session(session); |
| let truncated = utf8_safe::safe_truncate(&text, 50000); |
| json!({"type": "text", "text": format!( |
| "Fetched {} ({} bytes, {})\nPrompt: {}\n\n{}", |
| url, raw_len, content_type, prompt, truncated |
| )}) |
| } |
| Err(e) => { |
| session.record_failure("WebFetch", &e); |
| session.record_manifest("web_fetch", gate_decision.complexity.c, "ALLOWED", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("WebFetch failed: {}", e)}) |
| } |
| } |
| } |
| Err(e) => { |
| session.record_failure("WebFetch", &e); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("WebClient init failed: {}", e)}) |
| } |
| } |
| } |
|
|
| |
| "spf_web_search" => { |
| let query = args["query"].as_str().unwrap_or(""); |
| let count = args["count"].as_u64().unwrap_or(10) as u32; |
|
|
| session.record_action("WebSearch", "called", None); |
| match WebClient::new() { |
| Ok(client) => { |
| match client.search(query, count) { |
| Ok((engine, results)) => { |
| let mut output = format!("Search '{}' via {} ({} results):\n\n", query, engine, results.len()); |
| for (i, r) in results.iter().enumerate() { |
| output.push_str(&format!("{}. {}\n {}\n {}\n\n", i + 1, r.title, r.url, r.description)); |
| } |
| session.record_manifest("web_search", gate_decision.complexity.c, "ALLOWED", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
| Err(e) => { |
| session.record_failure("WebSearch", &e); |
| session.record_manifest("web_search", gate_decision.complexity.c, "ALLOWED", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("WebSearch failed: {}", e)}) |
| } |
| } |
| } |
| Err(e) => { |
| session.record_failure("WebSearch", &e); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("WebClient init failed: {}", e)}) |
| } |
| } |
| } |
|
|
| |
| "spf_web_download" => { |
| let url = args["url"].as_str().unwrap_or(""); |
| let save_path = args["save_path"].as_str().unwrap_or(""); |
|
|
| session.record_action("WebDownload", "called", Some(save_path)); |
| match WebClient::new() { |
| Ok(client) => { |
| match client.download(url, save_path) { |
| Ok((size, content_type)) => { |
| session.track_write(save_path); |
| session.record_manifest("web_download", gate_decision.complexity.c, "ALLOWED", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!( |
| "Downloaded {} → {} ({} bytes, {})", |
| url, save_path, size, content_type |
| )}) |
| } |
| Err(e) => { |
| session.record_failure("WebDownload", &e); |
| session.record_manifest("web_download", gate_decision.complexity.c, "ALLOWED", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Download failed: {}", e)}) |
| } |
| } |
| } |
| Err(e) => { |
| session.record_failure("WebDownload", &e); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("WebClient init failed: {}", e)}) |
| } |
| } |
| } |
|
|
| |
| "spf_web_api" => { |
| let method = args["method"].as_str().unwrap_or("GET"); |
| let url = args["url"].as_str().unwrap_or(""); |
| let headers = args["headers"].as_str().unwrap_or(""); |
| let body = args["body"].as_str().unwrap_or(""); |
|
|
| session.record_action("WebAPI", "called", None); |
| match WebClient::new() { |
| Ok(client) => { |
| match client.api_request(method, url, headers, body) { |
| Ok((status, resp_headers, resp_body)) => { |
| session.record_manifest("web_api", gate_decision.complexity.c, "ALLOWED", None); |
| let _ = storage.save_session(session); |
| let truncated = if resp_body.len() > 50000 { &resp_body[..50000] } else { &resp_body }; |
| json!({"type": "text", "text": format!( |
| "API {} {} → HTTP {}\n\nHeaders:\n{}\n\nBody:\n{}", |
| method, url, status, resp_headers, truncated |
| )}) |
| } |
| Err(e) => { |
| session.record_failure("WebAPI", &e); |
| session.record_manifest("web_api", gate_decision.complexity.c, "ALLOWED", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("API request failed: {}", e)}) |
| } |
| } |
| } |
| Err(e) => { |
| session.record_failure("WebAPI", &e); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("WebClient init failed: {}", e)}) |
| } |
| } |
| } |
|
|
|
|
|
|
|
|
| |
|
|
| "spf_web_connect" => { |
| let port = args.get("port").and_then(|v| v.as_u64()) |
| .unwrap_or(http_port as u64) as u16; |
| match browser.lock().unwrap().connect(port) { |
| Ok((msg, channels)) => { |
| *ws_browser_channels.lock().unwrap() = Some(channels); |
| json!({"type": "text", "text": format!("{}\nProxy: http://localhost:{}/proxy?url=<url>\nWS: ws://localhost:{}/ws/browser", msg, port, port)}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("Connect failed: {}", e)}), |
| } |
| } |
|
|
| "spf_web_navigate" => { |
| let url = args.get("url").and_then(|v| v.as_str()).unwrap_or(""); |
| if url.is_empty() { |
| return json!({"type": "text", "text": "url required"}); |
| } |
| match browser.lock().unwrap().navigate(url) { |
| Ok(msg) => json!({"type": "text", "text": msg}), |
| Err(e) => json!({"type": "text", "text": format!("Navigate failed: {}", e)}), |
| } |
| } |
|
|
| "spf_web_click" => { |
| let selector = args.get("selector").and_then(|v| v.as_str()).unwrap_or(""); |
| if selector.is_empty() { |
| return json!({"type": "text", "text": "selector required"}); |
| } |
| match browser.lock().unwrap().click(selector) { |
| Ok(msg) => json!({"type": "text", "text": msg}), |
| Err(e) => json!({"type": "text", "text": format!("Click failed: {}", e)}), |
| } |
| } |
|
|
| "spf_web_fill" => { |
| let selector = args.get("selector").and_then(|v| v.as_str()).unwrap_or(""); |
| let text = args.get("text").and_then(|v| v.as_str()).unwrap_or(""); |
| if selector.is_empty() { |
| return json!({"type": "text", "text": "selector required"}); |
| } |
| match browser.lock().unwrap().fill(selector, text) { |
| Ok(msg) => json!({"type": "text", "text": msg}), |
| Err(e) => json!({"type": "text", "text": format!("Fill failed: {}", e)}), |
| } |
| } |
|
|
| "spf_web_select" => { |
| let selector = args.get("selector").and_then(|v| v.as_str()).unwrap_or(""); |
| if selector.is_empty() { |
| return json!({"type": "text", "text": "selector required"}); |
| } |
| match browser.lock().unwrap().select(selector) { |
| Ok(elements) => { |
| let summary: Vec<String> = elements.iter().map(|e| { |
| format!("<{}{}{}> {}", |
| e.tag, |
| e.id.as_deref().map(|i| format!(" id={}", i)).unwrap_or_default(), |
| e.class.as_deref().map(|c| format!(" class={}", c)).unwrap_or_default(), |
| e.text.chars().take(80).collect::<String>()) |
| }).collect(); |
| json!({"type": "text", "text": format!("Found {} element(s):\n{}", elements.len(), summary.join("\n"))}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("Select failed: {}", e)}), |
| } |
| } |
|
|
| "spf_web_eval" => { |
| let expression = args.get("expression").and_then(|v| v.as_str()).unwrap_or(""); |
| if expression.is_empty() { |
| return json!({"type": "text", "text": "expression required"}); |
| } |
| match browser.lock().unwrap().eval(expression) { |
| Ok(result) => json!({"type": "text", "text": result.to_string()}), |
| Err(e) => json!({"type": "text", "text": format!("Eval failed: {}", e)}), |
| } |
| } |
|
|
| "spf_web_screenshot" => { |
| match browser.lock().unwrap().screenshot() { |
| Ok(path) => json!({"type": "text", "text": format!("Screenshot saved: {}\nUse Read tool to view.", path)}), |
| Err(e) => json!({"type": "text", "text": format!("Screenshot failed: {}", e)}), |
| } |
| } |
|
|
| "spf_web_design" => { |
| match browser.lock().unwrap().design() { |
| Ok(brief) => { |
| let out = serde_json::to_string_pretty(&brief).unwrap_or_else(|_| format!("{:?}", brief)); |
| json!({"type": "text", "text": out}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("Design brief failed: {}", e)}), |
| } |
| } |
|
|
| "spf_web_page" => { |
| match browser.lock().unwrap().page_info() { |
| Ok(info) => json!({"type": "text", "text": info}), |
| Err(e) => json!({"type": "text", "text": format!("Page info failed: {}", e)}), |
| } |
| } |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| |
| "spf_notebook_edit" => { |
| let notebook_path = args["notebook_path"].as_str().unwrap_or(""); |
| let new_source = args["new_source"].as_str().unwrap_or(""); |
| let cell_number = args["cell_number"].as_u64().unwrap_or(0) as usize; |
| let cell_type = args["cell_type"].as_str().unwrap_or("code"); |
| let edit_mode = args["edit_mode"].as_str().unwrap_or("replace"); |
|
|
| |
| let params = ToolParams { |
| file_path: Some(notebook_path.to_string()), |
| content: Some(new_source.to_string()), |
| ..Default::default() |
| }; |
|
|
| let decision = gate::process("spf_notebook_edit", ¶ms, config, session, &source); |
| if !decision.allowed { |
| session.record_manifest("NotebookEdit", decision.complexity.c, "BLOCKED", |
| decision.errors.first().map(|s| s.as_str())); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": format!("BLOCKED: {}", decision.errors.join(", ")), "_blocked": true}); |
| } |
|
|
| session.record_action("NotebookEdit", "called", Some(notebook_path)); |
|
|
| |
| match std::fs::read_to_string(notebook_path) { |
| Ok(content) => { |
| match serde_json::from_str::<Value>(&content) { |
| Ok(mut notebook) => { |
| if let Some(cells) = notebook.get_mut("cells").and_then(|c| c.as_array_mut()) { |
| match edit_mode { |
| "replace" => { |
| if cell_number < cells.len() { |
| cells[cell_number]["source"] = json!([new_source]); |
| cells[cell_number]["cell_type"] = json!(cell_type); |
| } else { |
| return json!({"type": "text", "text": format!("Cell {} not found", cell_number)}); |
| } |
| } |
| "insert" => { |
| let new_cell = json!({ |
| "cell_type": cell_type, |
| "source": [new_source], |
| "metadata": {}, |
| "outputs": [] |
| }); |
| cells.insert(cell_number, new_cell); |
| } |
| "delete" => { |
| if cell_number < cells.len() { |
| cells.remove(cell_number); |
| } |
| } |
| _ => return json!({"type": "text", "text": "Invalid edit_mode"}) |
| } |
|
|
| |
| match std::fs::write(notebook_path, serde_json::to_string_pretty(¬ebook).unwrap()) { |
| Ok(()) => { |
| session.track_write(notebook_path); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Notebook edited: {} cell {} ({})", notebook_path, cell_number, edit_mode)}) |
| } |
| Err(e) => { |
| session.record_failure("NotebookEdit", &e.to_string()); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Write failed: {}", e)}) |
| } |
| } |
| } else { |
| json!({"type": "text", "text": "Invalid notebook: no cells array"}) |
| } |
| } |
| Err(e) => json!({"type": "text", "text": format!("JSON parse error: {}", e)}) |
| } |
| } |
| Err(e) => { |
| session.record_failure("NotebookEdit", &e.to_string()); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Read failed: {}", e)}) |
| } |
| } |
| } |
|
|
| |
| "spf_brain_search" => { |
| let query = args["query"].as_str().unwrap_or(""); |
| let collection = args["collection"].as_str().unwrap_or("default"); |
| let limit = args["limit"].as_u64().unwrap_or(5) as usize; |
|
|
| session.record_action("brain_search", "called", None); |
| let output = crate::brain_local::brain_search(query, collection, limit); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_brain_store" => { |
| let text = args["text"].as_str().unwrap_or(""); |
| let title = args["title"].as_str().unwrap_or("untitled"); |
| let collection = args["collection"].as_str().unwrap_or("default"); |
|
|
| session.record_action("brain_store", "called", None); |
| let output = crate::brain_local::brain_store(text, title, collection); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_flint_store" => { |
| let text = args["text"].as_str().unwrap_or(""); |
| let title = args["title"].as_str().unwrap_or("untitled"); |
| let collection = args["collection"].as_str().unwrap_or("default"); |
| let tags_str = args["tags"].as_str().unwrap_or(""); |
|
|
| session.record_action("flint_store", "called", None); |
|
|
| |
| let brain_result = crate::brain_local::brain_store(text, title, collection); |
|
|
| |
| let working_result = if let Some(ref db) = agent_db { |
| let summary: String = text.chars().take(500).collect(); |
| let tags: Vec<String> = if tags_str.is_empty() { |
| vec![ |
| "tool:spf_flint_store".to_string(), |
| "source:flint_store".to_string(), |
| format!("collection:{}", collection), |
| ] |
| } else { |
| let mut t: Vec<String> = tags_str.split(',') |
| .map(|s| s.trim().to_string()) |
| .filter(|s| !s.is_empty()) |
| .collect(); |
| t.push("tool:spf_flint_store".to_string()); |
| t.push("source:flint_store".to_string()); |
| t |
| }; |
| match db.create_memory(&summary, crate::agent_state::MemoryType::Working, tags, "flint_store") { |
| Ok(id) => format!("Working memory created: {}", id), |
| Err(e) => format!("Working memory error: {}", e), |
| } |
| } else { |
| "Agent state DB not available — brain-only store".to_string() |
| }; |
|
|
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("{}\n{}", brain_result, working_result)}) |
| } |
|
|
| |
| "spf_brain_context" => { |
| let query = args["query"].as_str().unwrap_or(""); |
| let max_tokens = args["max_tokens"].as_u64().unwrap_or(2000) as usize; |
|
|
| session.record_action("brain_context", "called", None); |
| let output = crate::brain_local::brain_context(query, "default", max_tokens); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_brain_index" => { |
| let path = args["path"].as_str().unwrap_or(""); |
|
|
| session.record_action("brain_index", "called", Some(path)); |
| let output = crate::brain_local::brain_index_path(path, "default"); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_brain_list" => { |
| session.record_action("brain_list", "called", None); |
| let output = crate::brain_local::brain_list_collections(); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_brain_status" => { |
| session.record_action("brain_status", "called", None); |
| let output = crate::brain_local::brain_status(); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_brain_recall" => { |
| let query = args["query"].as_str().unwrap_or(""); |
| let collection = args["collection"].as_str().unwrap_or("default"); |
|
|
| session.record_action("brain_recall", "called", None); |
| let output = crate::brain_local::brain_recall(query, collection); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_brain_list_docs" => { |
| let collection = args["collection"].as_str().unwrap_or("default"); |
|
|
| session.record_action("brain_list_docs", "called", None); |
| let output = crate::brain_local::brain_list_docs(collection); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_brain_get_doc" => { |
| let doc_id = args["doc_id"].as_str().unwrap_or(""); |
| let collection = args["collection"].as_str().unwrap_or("default"); |
|
|
| session.record_action("brain_get_doc", "called", None); |
| let output = crate::brain_local::brain_get_doc(doc_id, collection); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| |
|
|
| |
| "spf_rag_collect_web" => { |
| let topic = args["topic"].as_str().unwrap_or(""); |
| session.record_action("rag_collect_web", "called", None); |
| let output = if topic.is_empty() { |
| "collect_web requires a topic parameter. Use spf_web_search + spf_brain_store for manual collection.".to_string() |
| } else { |
| crate::brain_local::brain_search(topic, "default", 10) |
| }; |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_rag_collect_file" => { |
| let path = args["path"].as_str().unwrap_or(""); |
| session.record_action("rag_collect_file", "called", Some(path)); |
| let collection = args["category"].as_str().unwrap_or("default"); |
| let output = crate::brain_local::brain_index_path(path, collection); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_rag_collect_folder" => { |
| let path = args["path"].as_str().unwrap_or(""); |
| session.record_action("rag_collect_folder", "called", Some(path)); |
| let output = crate::brain_local::brain_index_path(path, "default"); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_rag_collect_drop" => { |
| session.record_action("rag_collect_drop", "called", None); |
| let drop_path = format!("{}/LIVE/RAG/DROP_HERE", crate::paths::spf_root().display()); |
| let output = crate::brain_local::brain_index_path(&drop_path, "default"); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_rag_index_gathered" => { |
| let category = args["category"].as_str().unwrap_or("default"); |
| session.record_action("rag_index_gathered", "called", None); |
| let gathered_path = format!("{}/LIVE/RAG/GATHERED", crate::paths::spf_root().display()); |
| let output = crate::brain_local::brain_index_path(&gathered_path, category); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_rag_dedupe" => { |
| let category = args["category"].as_str().unwrap_or(""); |
| session.record_action("rag_dedupe", "called", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": format!("Dedupe not available in-process. Use spf_brain_search to find duplicates in collection '{}'.", category)}) |
| } |
|
|
| |
| "spf_rag_status" => { |
| session.record_action("rag_status", "called", None); |
| let output = crate::brain_local::brain_status(); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_rag_list_gathered" => { |
| let category = args["category"].as_str().unwrap_or("default"); |
| session.record_action("rag_list_gathered", "called", None); |
| let output = crate::brain_local::brain_list_docs(category); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_rag_bandwidth_status" => { |
| session.record_action("rag_bandwidth_status", "called", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": "Bandwidth tracking removed — brain operates in-process. Use spf_brain_status for system info."}) |
| } |
|
|
| |
| "spf_rag_fetch_url" => { |
| let url = args["url"].as_str().unwrap_or(""); |
|
|
| session.record_action("rag_fetch_url", "called", None); |
| |
| let output = format!("Use spf_web_fetch to get content from '{}', then spf_brain_store to index it.", url); |
| let success = true; |
| let _ = storage.save_session(session); |
| if success { |
| json!({"type": "text", "text": output}) |
| } else { |
| json!({"type": "text", "text": format!("RAG fetch-url failed: {}", output)}) |
| } |
| } |
|
|
| |
| "spf_rag_collect_rss" => { |
| session.record_action("rag_collect_rss", "called", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": "RSS collection deprecated. Use spf_web_fetch to fetch feed URLs, then spf_brain_store to index content."}) |
| } |
|
|
| |
| "spf_rag_list_feeds" => { |
| session.record_action("rag_list_feeds", "called", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": "RSS feeds deprecated. Use spf_web_fetch for URL content, spf_brain_store to index."}) |
| } |
|
|
| |
| "spf_rag_pending_searches" => { |
| let collection = args["collection"].as_str().unwrap_or("default"); |
| session.record_action("rag_pending_searches", "called", None); |
| let output = crate::brain_local::brain_search("pending", collection, 20); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_rag_fulfill_search" => { |
| session.record_action("rag_fulfill_search", "called", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": "SearchSeeker fulfillment deprecated. Brain handles search completeness automatically."}) |
| } |
|
|
| |
| "spf_rag_smart_search" => { |
| let query = args["query"].as_str().unwrap_or(""); |
| let collection = args["collection"].as_str().unwrap_or("default"); |
| session.record_action("rag_smart_search", "called", None); |
| let output = crate::brain_local::brain_search(query, collection, 10); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": output}) |
| } |
|
|
| |
| "spf_rag_auto_fetch_gaps" => { |
| session.record_action("rag_auto_fetch_gaps", "called", None); |
| let _ = storage.save_session(session); |
| json!({"type": "text", "text": "Auto-fetch gaps deprecated. Use spf_brain_search to find gaps, spf_web_fetch + spf_brain_store to fill them."}) |
| } |
|
|
| |
| |
| "spf_config_get" | "spf_config_set" => { |
| json!({"type": "text", "text": "BLOCKED: Config read/write is user-only (use CLI)"}) |
| } |
|
|
| "spf_config_paths" => { |
|
|
| session.record_action("config_paths", "list", None); |
| let _ = storage.save_session(session); |
|
|
| match config_db { |
| Some(db) => match db.list_path_rules() { |
| Ok(rules) => { |
| let text = rules.iter() |
| .map(|(t, p)| format!("{}: {}", t, p)) |
| .collect::<Vec<_>>() |
| .join("\n"); |
| json!({"type": "text", "text": if text.is_empty() { "No path rules configured".to_string() } else { text }}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("list_path_rules failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "SPF_CONFIG LMDB not initialized"}), |
| } |
| } |
|
|
| "spf_config_stats" => { |
|
|
| session.record_action("config_stats", "get", None); |
| let _ = storage.save_session(session); |
|
|
| match config_db { |
| Some(db) => match db.stats() { |
| Ok((config_count, paths_count, patterns_count)) => { |
| json!({"type": "text", "text": format!( |
| "SPF_CONFIG LMDB Stats:\n Config entries: {}\n Path rules: {}\n Dangerous patterns: {}", |
| config_count, paths_count, patterns_count |
| )}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("config_stats failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "SPF_CONFIG LMDB not initialized"}), |
| } |
| } |
|
|
| |
| "spf_tmp_list" => { |
|
|
| session.record_action("tmp_list", "list", None); |
| let _ = storage.save_session(session); |
|
|
| match tmp_db { |
| Some(db) => match db.list_projects() { |
| Ok(projects) => { |
| let text = projects.iter() |
| .map(|p| format!("{}: {} | trust={:?} | reads={} writes={} | active={}", |
| p.name, p.path, p.trust_level, |
| p.total_reads, p.total_writes, p.is_active)) |
| .collect::<Vec<_>>() |
| .join("\n"); |
| json!({"type": "text", "text": if text.is_empty() { "No projects registered".to_string() } else { text }}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("list_projects failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "TMP_DB LMDB not initialized"}), |
| } |
| } |
|
|
| "spf_tmp_stats" => { |
|
|
| session.record_action("tmp_stats", "get", None); |
| let _ = storage.save_session(session); |
|
|
| match tmp_db { |
| Some(db) => match db.db_stats() { |
| Ok((projects_count, access_count, resources_count)) => { |
| json!({"type": "text", "text": format!( |
| "TMP_DB LMDB Stats:\n Registered projects: {}\n Access log entries: {}\n Resource records: {}", |
| projects_count, access_count, resources_count |
| )}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("tmp_stats failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "TMP_DB LMDB not initialized"}), |
| } |
| } |
|
|
| "spf_tmp_get" => { |
| let path_arg = args["path"].as_str().unwrap_or(""); |
|
|
| session.record_action("tmp_get", "get", Some(path_arg)); |
| let _ = storage.save_session(session); |
|
|
| match tmp_db { |
| Some(db) => match db.get_project(path_arg) { |
| Ok(Some(proj)) => { |
| json!({"type": "text", "text": format!( |
| "Project: {}\nPath: {}\nTrust: {:?}\nActive: {}\nReads: {} | Writes: {} | Session writes: {}/{}\nMax write size: {} | Total C: {}\nProtected: {:?}\nCreated: {} | Last accessed: {}\nNotes: {}", |
| proj.name, proj.path, proj.trust_level, proj.is_active, |
| proj.total_reads, proj.total_writes, proj.session_writes, proj.max_writes_per_session, |
| proj.max_write_size, proj.total_complexity, |
| proj.protected_paths, |
| format_timestamp(proj.created_at), format_timestamp(proj.last_accessed), |
| if proj.notes.is_empty() { "None" } else { &proj.notes } |
| )}) |
| } |
| Ok(None) => json!({"type": "text", "text": format!("Project not found: {}", path_arg)}), |
| Err(e) => json!({"type": "text", "text": format!("get_project failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "TMP_DB LMDB not initialized"}), |
| } |
| } |
|
|
| "spf_tmp_active" => { |
|
|
| session.record_action("tmp_active", "get", None); |
| let _ = storage.save_session(session); |
|
|
| match tmp_db { |
| Some(db) => match db.get_active() { |
| Ok(Some(path)) => { |
| |
| match db.get_project(&path) { |
| Ok(Some(proj)) => { |
| json!({"type": "text", "text": format!( |
| "Active project: {} ({})\nTrust: {:?} | Reads: {} | Writes: {}", |
| proj.name, proj.path, proj.trust_level, proj.total_reads, proj.total_writes |
| )}) |
| } |
| _ => json!({"type": "text", "text": format!("Active project path: {} (details unavailable)", path)}), |
| } |
| } |
| Ok(None) => json!({"type": "text", "text": "No active project"}), |
| Err(e) => json!({"type": "text", "text": format!("get_active failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "TMP_DB LMDB not initialized"}), |
| } |
| } |
|
|
| |
| |
| "spf_agent_remember" | "spf_agent_forget" | "spf_agent_set_state" => { |
| json!({"type": "text", "text": "BLOCKED: Agent state writes are user-only (use CLI)"}) |
| } |
|
|
| "spf_agent_stats" => { |
|
|
| session.record_action("agent_stats", "get", None); |
| let _ = storage.save_session(session); |
|
|
| match agent_db { |
| Some(db) => match db.db_stats() { |
| Ok((memory_count, sessions_count, state_count, tags_count)) => { |
| json!({"type": "text", "text": format!( |
| "AGENT_STATE LMDB Stats:\n Memories: {}\n Sessions: {}\n State keys: {}\n Tags: {}", |
| memory_count, sessions_count, state_count, tags_count |
| )}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("agent_stats failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "AGENT_STATE LMDB not initialized"}), |
| } |
| } |
|
|
| "spf_agent_memory_search" => { |
| let query = args["query"].as_str().unwrap_or(""); |
| let limit = args["limit"].as_u64().unwrap_or(10) as usize; |
|
|
| session.record_action("agent_memory_search", "search", Some(query)); |
| let _ = storage.save_session(session); |
|
|
| match agent_db { |
| Some(db) => match db.search_memories(query, limit) { |
| Ok(memories) => { |
| if memories.is_empty() { |
| json!({"type": "text", "text": format!("No memories found for: {}", query)}) |
| } else { |
| let text = memories.iter() |
| .map(|m| format!("[{}] {:?} | {}\n Tags: {:?} | Created: {}", |
| m.id, m.memory_type, m.content, |
| m.tags, format_timestamp(m.created_at))) |
| .collect::<Vec<_>>() |
| .join("\n\n"); |
| json!({"type": "text", "text": text}) |
| } |
| } |
| Err(e) => json!({"type": "text", "text": format!("search_memories failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "AGENT_STATE LMDB not initialized"}), |
| } |
| } |
|
|
| "spf_agent_memory_by_tag" => { |
| let tag = args["tag"].as_str().unwrap_or(""); |
|
|
| session.record_action("agent_memory_by_tag", "search", Some(tag)); |
| let _ = storage.save_session(session); |
|
|
| match agent_db { |
| Some(db) => match db.get_by_tag(tag) { |
| Ok(memories) => { |
| if memories.is_empty() { |
| json!({"type": "text", "text": format!("No memories with tag: {}", tag)}) |
| } else { |
| let text = memories.iter() |
| .map(|m| format!("[{}] {:?} | {}", |
| m.id, m.memory_type, m.content)) |
| .collect::<Vec<_>>() |
| .join("\n"); |
| json!({"type": "text", "text": text}) |
| } |
| } |
| Err(e) => json!({"type": "text", "text": format!("get_by_tag failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "AGENT_STATE LMDB not initialized"}), |
| } |
| } |
|
|
| "spf_agent_session_info" => { |
|
|
| session.record_action("agent_session_info", "get", None); |
| let _ = storage.save_session(session); |
|
|
| match agent_db { |
| Some(db) => match db.get_latest_session() { |
| Ok(Some(sess)) => { |
| json!({"type": "text", "text": format!( |
| "Session: {}\nParent: {}\nStarted: {} | Ended: {}\nWorking dir: {}\nProject: {}\nFiles modified: {}\nComplexity: {} | Actions: {}\nSummary: {}", |
| sess.session_id, |
| sess.parent_session.as_deref().unwrap_or("None"), |
| format_timestamp(sess.started_at), |
| if sess.ended_at == 0 { "Ongoing".to_string() } else { format_timestamp(sess.ended_at) }, |
| sess.working_dir, |
| sess.active_project.as_deref().unwrap_or("None"), |
| sess.files_modified.len(), |
| sess.total_complexity, sess.total_actions, |
| if sess.summary.is_empty() { "None" } else { &sess.summary } |
| )}) |
| } |
| Ok(None) => json!({"type": "text", "text": "No sessions recorded"}), |
| Err(e) => json!({"type": "text", "text": format!("get_latest_session failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "AGENT_STATE LMDB not initialized"}), |
| } |
| } |
|
|
| "spf_agent_context" => { |
|
|
| session.record_action("agent_context", "get", None); |
| let _ = storage.save_session(session); |
|
|
| match agent_db { |
| Some(db) => match db.get_context_summary() { |
| Ok(summary) => { |
| json!({"type": "text", "text": if summary.is_empty() { "No context available".to_string() } else { summary }}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("get_context_summary failed: {}", e)}), |
| }, |
| None => json!({"type": "text", "text": "AGENT_STATE LMDB not initialized"}), |
| } |
| } |
|
|
| |
| "spf_fs_exists" => { |
| let path = args["path"].as_str().unwrap_or("/"); |
|
|
| let gate_params = ToolParams { file_path: Some(path.to_string()), ..Default::default() }; |
| let decision = gate::process("spf_fs_exists", &gate_params, config, session, &source); |
| if !decision.allowed { |
| session.record_manifest("spf_fs_exists", decision.complexity.c, |
| "BLOCKED", |
| decision.errors.first().map(|s| s.as_str())); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": decision.message, "_blocked": true}); |
| } |
| session.record_action("fs_exists", "check", Some(path)); |
| let _ = storage.save_session(session); |
|
|
| if let Some(result) = route_to_lmdb(path, "exists", None, config_db, tmp_db, agent_db) { |
| return result; |
| } |
| json!({"type": "text", "text": format!("BLOCKED: path {} not routable — no LMDB fallback", path)}) |
| } |
|
|
| "spf_fs_stat" => { |
| let path = args["path"].as_str().unwrap_or("/"); |
|
|
| let gate_params = ToolParams { file_path: Some(path.to_string()), ..Default::default() }; |
| let decision = gate::process("spf_fs_stat", &gate_params, config, session, &source); |
| if !decision.allowed { |
| session.record_manifest("spf_fs_stat", decision.complexity.c, |
| "BLOCKED", |
| decision.errors.first().map(|s| s.as_str())); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": decision.message, "_blocked": true}); |
| } |
| session.record_action("fs_stat", "get", Some(path)); |
| let _ = storage.save_session(session); |
|
|
| if let Some(result) = route_to_lmdb(path, "stat", None, config_db, tmp_db, agent_db) { |
| return result; |
| } |
| json!({"type": "text", "text": format!("BLOCKED: path {} not routable — no LMDB fallback", path)}) |
| } |
|
|
| "spf_fs_ls" => { |
| let path = args["path"].as_str().unwrap_or("/"); |
|
|
| let gate_params = ToolParams { file_path: Some(path.to_string()), ..Default::default() }; |
| let decision = gate::process("spf_fs_ls", &gate_params, config, session, &source); |
| if !decision.allowed { |
| session.record_manifest("spf_fs_ls", decision.complexity.c, |
| "BLOCKED", |
| decision.errors.first().map(|s| s.as_str())); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": decision.message, "_blocked": true}); |
| } |
| session.record_action("fs_ls", "list", Some(path)); |
| let _ = storage.save_session(session); |
|
|
| if let Some(result) = route_to_lmdb(path, "ls", None, config_db, tmp_db, agent_db) { |
| return result; |
| } |
| json!({"type": "text", "text": format!("BLOCKED: path {} not routable — no LMDB fallback", path)}) |
| } |
|
|
| "spf_fs_read" => { |
| let path = args["path"].as_str().unwrap_or(""); |
|
|
| let gate_params = ToolParams { file_path: Some(path.to_string()), ..Default::default() }; |
| let decision = gate::process("spf_fs_read", &gate_params, config, session, &source); |
| if !decision.allowed { |
| session.record_manifest("spf_fs_read", decision.complexity.c, |
| "BLOCKED", |
| decision.errors.first().map(|s| s.as_str())); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": decision.message, "_blocked": true}); |
| } |
| session.record_action("fs_read", "read", Some(path)); |
| let _ = storage.save_session(session); |
|
|
| if let Some(result) = route_to_lmdb(path, "read", None, config_db, tmp_db, agent_db) { |
| return result; |
| } |
| json!({"type": "text", "text": format!("BLOCKED: path {} not routable — no LMDB fallback", path)}) |
| } |
|
|
| "spf_fs_write" => { |
| let path = args["path"].as_str().unwrap_or(""); |
| let content = args["content"].as_str().unwrap_or(""); |
|
|
| let gate_params = ToolParams { file_path: Some(path.to_string()), content: Some(content.to_string()), ..Default::default() }; |
| let decision = gate::process("spf_fs_write", &gate_params, config, session, &source); |
| if !decision.allowed { |
| session.record_manifest("spf_fs_write", decision.complexity.c, |
| "BLOCKED", |
| decision.errors.first().map(|s| s.as_str())); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": decision.message, "_blocked": true}); |
| } |
| session.record_action("fs_write", "write", Some(path)); |
| let _ = storage.save_session(session); |
|
|
| if let Some(result) = route_to_lmdb(path, "write", Some(content), config_db, tmp_db, agent_db) { |
| return result; |
| } |
| json!({"type": "text", "text": format!("BLOCKED: path {} not routable — no LMDB fallback", path)}) |
| } |
|
|
| "spf_fs_mkdir" => { |
| let path = args["path"].as_str().unwrap_or(""); |
|
|
| let gate_params = ToolParams { file_path: Some(path.to_string()), ..Default::default() }; |
| let decision = gate::process("spf_fs_mkdir", &gate_params, config, session, &source); |
| if !decision.allowed { |
| session.record_manifest("spf_fs_mkdir", decision.complexity.c, |
| "BLOCKED", |
| decision.errors.first().map(|s| s.as_str())); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": decision.message, "_blocked": true}); |
| } |
| session.record_action("fs_mkdir", "create", Some(path)); |
| let _ = storage.save_session(session); |
|
|
| if let Some(result) = route_to_lmdb(path, "mkdir", None, config_db, tmp_db, agent_db) { |
| return result; |
| } |
| json!({"type": "text", "text": format!("BLOCKED: path {} not routable — no LMDB fallback", path)}) |
| } |
|
|
| "spf_fs_rm" => { |
| let path = args["path"].as_str().unwrap_or(""); |
|
|
| let gate_params = ToolParams { file_path: Some(path.to_string()), ..Default::default() }; |
| let decision = gate::process("spf_fs_rm", &gate_params, config, session, &source); |
| if !decision.allowed { |
| session.record_manifest("spf_fs_rm", decision.complexity.c, |
| "BLOCKED", |
| decision.errors.first().map(|s| s.as_str())); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": decision.message, "_blocked": true}); |
| } |
| session.record_action("fs_rm", "remove", Some(path)); |
| let _ = storage.save_session(session); |
|
|
| if let Some(result) = route_to_lmdb(path, "rm", None, config_db, tmp_db, agent_db) { |
| return result; |
| } |
| json!({"type": "text", "text": format!("BLOCKED: path {} not routable — no LMDB fallback", path)}) |
| } |
|
|
| "spf_fs_rename" => { |
| let old_path = args["old_path"].as_str().unwrap_or(""); |
| let new_path = args["new_path"].as_str().unwrap_or(""); |
|
|
| let gate_params = ToolParams { file_path: Some(old_path.to_string()), ..Default::default() }; |
| let decision = gate::process("spf_fs_rename", &gate_params, config, session, &source); |
| if !decision.allowed { |
| session.record_manifest("spf_fs_rename", decision.complexity.c, |
| "BLOCKED", |
| decision.errors.first().map(|s| s.as_str())); |
| let _ = storage.save_session(session); |
| return json!({"type": "text", "text": decision.message, "_blocked": true}); |
| } |
| session.record_action("fs_rename", "rename", Some(old_path)); |
| let _ = storage.save_session(session); |
|
|
| |
| let is_device_rename = old_path.starts_with("/tmp/") || old_path.starts_with("/projects/"); |
| if is_device_rename { |
| |
| if old_path.contains("..") || new_path.contains("..") { |
| return json!({"type": "text", "text": "BLOCKED: path traversal detected in rename paths"}); |
| } |
| let live_base = spf_root().join("LIVE").display().to_string(); |
| let resolve = |vpath: &str| -> std::path::PathBuf { |
| if vpath.starts_with("/tmp/") { |
| std::path::PathBuf::from(format!("{}/TMP/TMP", live_base)) |
| .join(vpath.strip_prefix("/tmp/").unwrap_or("")) |
| } else { |
| std::path::PathBuf::from(format!("{}/PROJECTS/PROJECTS", live_base)) |
| .join(vpath.strip_prefix("/projects/").unwrap_or("")) |
| } |
| }; |
| let old_device = resolve(old_path); |
| let new_device = resolve(new_path); |
| if let Some(parent) = new_device.parent() { |
| let _ = std::fs::create_dir_all(parent); |
| } |
| return match std::fs::rename(&old_device, &new_device) { |
| Ok(()) => json!({"type": "text", "text": format!("Renamed: {} -> {}", old_path, new_path)}), |
| Err(e) => json!({"type": "text", "text": format!("rename failed: {}", e)}), |
| }; |
| } |
| if let Some(result) = route_to_lmdb(old_path, "rename", None, config_db, tmp_db, agent_db) { |
| return result; |
| } |
| json!({"type": "text", "text": format!("BLOCKED: paths {}, {} not routable — no LMDB fallback", old_path, new_path)}) |
| } |
|
|
| |
| |
| |
|
|
| "spf_mesh_status" => { |
| session.record_action("mesh", "status", None); |
| let _ = storage.save_session(session); |
|
|
| let mesh_json = crate::paths::spf_root().join("LIVE/CONFIG/mesh.json"); |
| let mesh_cfg = crate::config::MeshConfig::load(&mesh_json).unwrap_or_default(); |
| let status = if mesh_cfg.enabled { "online" } else { "disabled" }; |
|
|
| |
| let peers_info = tracked_peers.lock().unwrap(); |
| let peer_count = peers_info.len(); |
| let lines: Vec<String> = if peers_info.is_empty() { |
| vec![" (no tracked peers)".to_string()] |
| } else { |
| peers_info.iter().map(|(k, v)| { |
| format!(" {} | role={} name={} last_seen={}s ago", |
| &k[..16.min(k.len())], |
| v.role, |
| v.name, |
| v.last_seen.elapsed().as_secs()) |
| }).collect() |
| }; |
| drop(peers_info); |
|
|
| |
| let orch_info = orchestrator_state.lock().unwrap(); |
| let orch_text = match orch_info.as_ref() { |
| Some(orch_arc) => { |
| let orch = orch_arc.lock().unwrap(); |
| format!("\nOrchestrator: {}", orch.summary()) |
| } |
| None => String::new(), |
| }; |
| drop(orch_info); |
|
|
| json!({"type": "text", "text": format!( |
| "Mesh: {} | Role: {} | Team: {} | Discovery: {} | Identity: {}\nTracked peers ({}):{}\n{}", |
| status, mesh_cfg.role, mesh_cfg.team, |
| mesh_cfg.discovery, pub_key_hex, |
| peer_count, |
| lines.join("\n"), |
| orch_text |
| )}) |
| } |
|
|
| "spf_mesh_peers" => { |
| session.record_action("mesh", "peers", None); |
| let _ = storage.save_session(session); |
|
|
| let cfg_dir = crate::paths::spf_root().join("LIVE/CONFIG"); |
| let trusted = crate::identity::load_trusted_keys(&cfg_dir.join("groups")); |
| let mut lines = Vec::new(); |
| for key in &trusted { |
| if let Some(info) = peers.get(key.as_str()) { |
| let addrs = if info.addr.is_empty() { "no addrs".to_string() } else { info.addr.join(", ") }; |
| lines.push(format!(" {} ({}, {}, {})", key, info.name, info.role, addrs)); |
| } else { |
| lines.push(format!(" {} (trusted, no config)", key)); |
| } |
| } |
| let count = lines.len(); |
| let list = if lines.is_empty() { |
| "No trusted peers. Add pubkeys to LIVE/CONFIG/groups/*.keys".to_string() |
| } else { |
| lines.join("\n") |
| }; |
| json!({"type": "text", "text": format!("Mesh Peers ({}):\n{}", count, list)}) |
| } |
|
|
| "spf_mesh_call" => { |
| session.record_action("mesh", "call", None); |
| let _ = storage.save_session(session); |
|
|
| let peer_key = args["peer_key"].as_str().unwrap_or(""); |
| let tool_name = args["tool"].as_str().unwrap_or(""); |
| let tool_args = args.get("arguments").cloned().unwrap_or(json!({})); |
|
|
| if peer_key.is_empty() || tool_name.is_empty() { |
| json!({"type": "text", "text": "ERROR: peer_key and tool are required"}) |
| } else { |
| let cfg_dir = crate::paths::spf_root().join("LIVE/CONFIG"); |
| let trusted = crate::identity::load_trusted_keys(&cfg_dir.join("groups")); |
| if !trusted.contains(peer_key) { |
| json!({"type": "text", "text": format!("BLOCKED: peer not trusted"), "_blocked": true}) |
| } else if let Some(mesh_tx) = mesh_tx { |
| |
| let live_peers = crate::identity::load_peers(&cfg_dir.join("groups")); |
| let addrs = live_peers.get(peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, reply_rx) = std::sync::mpsc::channel(); |
| let request = crate::mesh::MeshRequest { |
| peer_key: peer_key.to_string(), |
| addrs, |
| tool: tool_name.to_string(), |
| args: tool_args, |
| reply: reply_tx, |
| }; |
| if mesh_tx.send(request).is_ok() { |
| match reply_rx.recv_timeout(std::time::Duration::from_secs(30)) { |
| Ok(Ok(result)) => { |
| let text = result.get("result") |
| .and_then(|r| r.get("content")) |
| .and_then(|c| c.get(0)) |
| .and_then(|t| t.get("text")) |
| .and_then(|t| t.as_str()) |
| .unwrap_or("(no text in response)"); |
| json!({"type": "text", "text": text}) |
| } |
| Ok(Err(e)) => json!({"type": "text", "text": format!("MESH ERROR: {}", e)}), |
| Err(_) => json!({"type": "text", "text": "MESH ERROR: call timed out (30s)"}), |
| } |
| } else { |
| json!({"type": "text", "text": "MESH ERROR: mesh channel closed"}) |
| } |
| } else { |
| json!({"type": "text", "text": "MESH ERROR: mesh not enabled"}) |
| } |
| } |
| } |
|
|
| |
|
|
| "spf_transformer_status" => { |
| session.record_action("spf_transformer_status", "called", None); |
| crate::transformer_tools::handle_status(transformer, transformer_config) |
| } |
|
|
| "spf_transformer_infer" => { |
| session.record_action("spf_transformer_infer", "called", None); |
| let tokenizer_path = crate::paths::spf_root().join("LIVE/MODELS/tokenizer.json"); |
| crate::transformer_tools::handle_infer(transformer, args, transformer_config, &tokenizer_path.to_string_lossy()) |
| } |
|
|
| "spf_transformer_chat" => { |
| session.record_action("spf_transformer_chat", "called", None); |
| let tokenizer_path = crate::paths::spf_root().join("LIVE/MODELS/tokenizer.json"); |
| crate::transformer_tools::handle_chat(transformer, args, transformer_config, &tokenizer_path.to_string_lossy()) |
| } |
|
|
| "spf_transformer_train" => { |
| session.record_action("spf_transformer_train", "called", None); |
| crate::transformer_tools::handle_train(transformer, args, transformer_config) |
| } |
|
|
| "spf_transformer_metrics" => { |
| session.record_action("spf_transformer_metrics", "called", None); |
| crate::transformer_tools::handle_metrics(transformer, transformer_config) |
| } |
|
|
| |
| "spf_flint_train_evil" => { |
| session.record_action("spf_flint_train_evil", "called", None); |
| crate::transformer_tools::handle_train_evil(args) |
| } |
|
|
| "spf_flint_train_good" => { |
| session.record_action("spf_flint_train_good", "called", None); |
| crate::transformer_tools::handle_train_good(args) |
| } |
|
|
| |
|
|
| "spf_chat_send" => { |
| session.record_action("spf_chat_send", "called", None); |
|
|
| let peer = args["peer_key"].as_str().unwrap_or(""); |
| let text = args["text"].as_str().unwrap_or(""); |
| if peer.is_empty() || text.is_empty() { |
| return json!({"type": "text", "text": "Error: peer_key and text required"}); |
| } |
|
|
| let peer_short = &peer[..8.min(peer.len())]; |
| let local_short = &pub_key_hex[..8.min(pub_key_hex.len())]; |
|
|
| |
| let conv_id = args.get("conversation_id") |
| .and_then(|v| v.as_str()) |
| .map(|s| s.to_string()) |
| .unwrap_or_else(|| { |
| let mut parts = vec![local_short, peer_short]; |
| parts.sort(); |
| format!("dm_{}_{}", parts[0], parts[1]) |
| }); |
|
|
| let ts = chrono::Utc::now().to_rfc3339(); |
|
|
| |
| let mut chat_lock = CHAT_ENGINE.lock().unwrap_or_else(|e| e.into_inner()); |
| let engine = chat_lock.get_or_insert_with(|| { |
| crate::chat::ChatEngine::new(local_short.to_string()) |
| }); |
|
|
| let mut msg = engine.create_message(peer_short, text, &conv_id, &ts); |
| msg.msg_type = crate::chat::MessageType::UserText; |
| engine.receive_message(msg.clone()); |
| drop(chat_lock); |
|
|
| |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(peer) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, reply_rx) = std::sync::mpsc::channel(); |
| let request = crate::mesh::MeshRequest { |
| peer_key: peer.to_string(), |
| addrs, |
| tool: "spf_chat_receive".to_string(), |
| args: serde_json::to_value(&msg).unwrap_or(json!({})), |
| reply: reply_tx, |
| }; |
| if tx.send(request).is_ok() { |
| match reply_rx.recv_timeout(std::time::Duration::from_secs(10)) { |
| Ok(Ok(resp)) => { |
| let ack = resp.get("result") |
| .and_then(|r| r.get("content")) |
| .and_then(|c| c.get(0)) |
| .and_then(|t| t.get("text")) |
| .and_then(|t| t.as_str()) |
| .unwrap_or("delivered"); |
| json!({"type": "text", "text": format!( |
| "Sent to {}: {}\nDelivery: {}", peer_short, text, ack |
| )}) |
| } |
| Ok(Err(e)) => json!({"type": "text", "text": format!( |
| "Stored locally. Mesh delivery failed: {}", e |
| )}), |
| Err(_) => json!({"type": "text", "text": format!( |
| "Stored locally. Peer {} may be offline (timeout)", peer_short |
| )}), |
| } |
| } else { |
| json!({"type": "text", "text": "Stored locally. Mesh channel closed."}) |
| } |
| } else { |
| json!({"type": "text", "text": "Stored locally. Mesh not enabled."}) |
| } |
| } |
|
|
| "spf_chat_receive" => { |
| session.record_action("spf_chat_receive", "called", None); |
|
|
| |
| let msg: crate::chat::ChatMessage = match serde_json::from_value(args.clone()) { |
| Ok(m) => m, |
| Err(e) => return json!({"type": "text", "text": format!("Invalid chat message: {}", e)}), |
| }; |
|
|
| let local_short = &pub_key_hex[..8.min(pub_key_hex.len())]; |
| let mut chat_lock = CHAT_ENGINE.lock().unwrap_or_else(|e| e.into_inner()); |
| let engine = chat_lock.get_or_insert_with(|| { |
| crate::chat::ChatEngine::new(local_short.to_string()) |
| }); |
|
|
| let from = msg.from.clone(); |
| let text_preview = msg.text.chars().take(50).collect::<String>(); |
| let conv_id = engine.receive_message(msg); |
| eprintln!("[SPF-CHAT] Received from {}: {} (conv: {})", from, text_preview, conv_id); |
|
|
| json!({"type": "text", "text": format!("received:{}:{}", from, conv_id)}) |
| } |
|
|
| "spf_chat_history" => { |
| session.record_action("spf_chat_history", "called", None); |
|
|
| let conv_id = args.get("conversation_id").and_then(|v| v.as_str()).unwrap_or(""); |
| let limit = args.get("limit").and_then(|v| v.as_u64()).unwrap_or(20) as usize; |
|
|
| let chat_lock = CHAT_ENGINE.lock().unwrap_or_else(|e| e.into_inner()); |
| match chat_lock.as_ref() { |
| Some(engine) => { |
| if conv_id.is_empty() { |
| let convos = engine.list_conversations(); |
| if convos.is_empty() { |
| json!({"type": "text", "text": "No conversations"}) |
| } else { |
| let text = convos.iter().map(|c| format!( |
| " {} — {} ({} msgs, last: {})", |
| c.id, c.name, c.message_count, c.last_activity |
| )).collect::<Vec<_>>().join("\n"); |
| json!({"type": "text", "text": format!("Conversations:\n{}", text)}) |
| } |
| } else { |
| let history = engine.get_history(conv_id, limit); |
| if history.is_empty() { |
| json!({"type": "text", "text": format!("No messages in {}", conv_id)}) |
| } else { |
| let text = history.iter().map(|m| format!( |
| "[{}] {}: {}", m.timestamp, m.from, m.text |
| )).collect::<Vec<_>>().join("\n"); |
| json!({"type": "text", "text": text}) |
| } |
| } |
| } |
| None => json!({"type": "text", "text": "No chat history. Send or receive a message first."}), |
| } |
| } |
|
|
| "spf_chat_rooms" => { |
| session.record_action("spf_chat_rooms", "called", None); |
|
|
| let chat_lock = CHAT_ENGINE.lock().unwrap_or_else(|e| e.into_inner()); |
| match chat_lock.as_ref() { |
| Some(engine) => { |
| let convos = engine.list_conversations(); |
| if convos.is_empty() { |
| json!({"type": "text", "text": "No active conversations"}) |
| } else { |
| let text = serde_json::to_string_pretty(&convos).unwrap_or_default(); |
| json!({"type": "text", "text": text}) |
| } |
| } |
| None => json!({"type": "text", "text": "No active conversations"}), |
| } |
| } |
|
|
| |
| "spf_voice_mode" => { |
| session.record_action("spf_voice_mode", "called", None); |
|
|
| let action = args["action"].as_str().unwrap_or("status"); |
|
|
| |
| |
| match action { |
| "speak" | "listen" | "stream_on" | "call" | "start" => { |
| if let Err(e) = spf_voice::open() { |
| eprintln!("[SPF-VOICE] Pipeline auto-open failed: {}", e); |
| } |
| } |
| _ => {} |
| } |
|
|
| let mut voice_lock = crate::voice::VOICE_SESSION.lock().unwrap_or_else(|e| e.into_inner()); |
|
|
| match action { |
| "start" => { |
| if voice_lock.as_ref().map_or(false, |s| s.is_active()) { |
| let stats = voice_lock.as_ref().unwrap().stats(); |
| return json!({"type": "text", "text": format!( |
| "Voice session already active\n{}", |
| serde_json::to_string_pretty(&stats).unwrap_or_default() |
| )}); |
| } |
| let mut vconfig = crate::voice::VoiceConfig::default(); |
| if let Some(sr) = args.get("sample_rate").and_then(|v| v.as_u64()) { |
| vconfig.sample_rate = sr as u32; |
| } |
| let mut vs = crate::voice::VoiceSession::new(vconfig); |
| match vs.start() { |
| Ok(()) => { |
| let status_json = vs.status().to_json_value(); |
| *voice_lock = Some(vs); |
| json!({"type": "text", "text": format!( |
| "Voice session started\n{}", |
| serde_json::to_string_pretty(&status_json).unwrap_or_default() |
| )}) |
| } |
| Err(e) => { |
| json!({"type": "text", "text": format!("Voice start failed: {}", e)}) |
| } |
| } |
| } |
| "stop" => { |
| if let Some(ref mut vs) = *voice_lock { |
| vs.stop(); |
| } else { |
| |
| spf_voice::close(); |
| } |
| *voice_lock = None; |
| json!({"type": "text", "text": "Voice pipeline closed, session stopped"}) |
| } |
| "status" => { |
| match voice_lock.as_ref() { |
| Some(vs) => { |
| let stats = vs.stats(); |
| json!({"type": "text", "text": serde_json::to_string_pretty(&stats).unwrap_or_default()}) |
| } |
| None => { |
| let status = crate::voice::VoiceStatus::from_stubs(); |
| json!({"type": "text", "text": format!( |
| "Voice session inactive\n{}", |
| serde_json::to_string_pretty(&status.to_json_value()).unwrap_or_default() |
| )}) |
| } |
| } |
| } |
| "speak" => { |
| let text = args.get("text").and_then(|v| v.as_str()).unwrap_or(""); |
| if text.is_empty() { |
| return json!({"type": "text", "text": "Error: 'text' parameter required for speak action"}); |
| } |
| |
| let tts_mode = { |
| let settings = crate::voice::VOICE_SETTINGS.lock().unwrap_or_else(|e| e.into_inner()); |
| settings.agent_tts_mode.clone() |
| }; |
| match tts_mode { |
| crate::voice::VoiceMode::Light => { |
| |
| match spf_voice::speak(text) { |
| Ok(()) => json!({"type": "text", "text": format!( |
| "Spoke (light/spf-voice): \"{}\" ({} chars)", text, text.len() |
| )}), |
| Err(e) => json!({"type": "text", "text": format!( |
| "spf-voice speak failed: {}", e |
| )}) |
| } |
| } |
| crate::voice::VoiceMode::Rich => { |
| |
| let needs_start = voice_lock.as_ref().map_or(true, |s| !s.is_active()); |
| if needs_start { |
| let mut vs = crate::voice::VoiceSession::new(crate::voice::VoiceConfig::default()); |
| match vs.start() { |
| Ok(()) => { |
| eprintln!("[SPF-VOICE] Auto-started session for speak (rich mode)"); |
| *voice_lock = Some(vs); |
| } |
| Err(e) => { |
| return json!({"type": "text", "text": format!( |
| "Voice start failed (rich mode): {}", e)}); |
| } |
| } |
| } |
| let vs = voice_lock.as_mut().unwrap(); |
| match vs.speak(text) { |
| Ok(frames) => { |
| let frame_count = frames.len(); |
| let total_bytes: usize = frames.iter().map(|f| f.data.len()).sum(); |
| let codec = frames.first().map(|f| f.codec.as_str()).unwrap_or("none").to_string(); |
| let mut play_ok = 0usize; |
| let mut play_err = 0usize; |
| for frame in &frames { |
| match vs.play_frame(frame) { |
| Ok(()) => play_ok += 1, |
| Err(_) => play_err += 1, |
| } |
| } |
| json!({"type": "text", "text": format!( |
| "Spoke (rich/piper): {} frames ({} bytes, codec={}) | Played: {} ok, {} failed", |
| frame_count, total_bytes, codec, play_ok, play_err |
| )}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("TTS error (rich mode): {}", e)}) |
| } |
| } |
| } |
| } |
| "call" => { |
| |
| let peer_key = args.get("peer_key").and_then(|v| v.as_str()).unwrap_or(""); |
| if peer_key.is_empty() { |
| return json!({"type": "text", "text": "Error: 'peer_key' parameter required for call action"}); |
| } |
| |
| { |
| let call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| if let Some(ref cs) = *call_state { |
| if cs.is_in_progress() { |
| let phase_str = if cs.is_ringing() { "ringing" } else { "active" }; |
| return json!({"type": "text", "text": format!( |
| "Already in call with peer {} ({}). Use end_call first.", |
| &cs.peer_key[..8.min(cs.peer_key.len())], phase_str |
| )}); |
| } |
| } |
| } |
| |
| let needs_start = voice_lock.as_ref().map_or(true, |s| !s.is_active()); |
| if needs_start { |
| let mut vs = crate::voice::VoiceSession::new(crate::voice::VoiceConfig::default()); |
| match vs.start() { |
| Ok(()) => { |
| eprintln!("[SPF-VOICE] Started session for peer call to {}", &peer_key[..8.min(peer_key.len())]); |
| *voice_lock = Some(vs); |
| } |
| Err(e) => { |
| return json!({"type": "text", "text": format!( |
| "Voice session start failed for call: {}", e)}); |
| } |
| } |
| } |
| |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| *call_state = Some(crate::voice::CallState { |
| peer_key: peer_key.to_string(), |
| peer_name: peer_key[..8.min(peer_key.len())].to_string(), |
| direction: crate::voice::CallDirection::Outgoing, |
| phase: crate::voice::CallPhase::Ringing, |
| started_at: std::time::Instant::now(), |
| }); |
| |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, _) = std::sync::mpsc::channel(); |
| let _ = tx.send(crate::mesh::MeshRequest { |
| peer_key: peer_key.to_string(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({"action": "incoming_ring", "peer_key": pub_key_hex}), |
| reply: reply_tx, |
| }); |
| } |
| let peer_short = &peer_key[..8.min(peer_key.len())]; |
| json!({"type": "text", "text": format!( |
| "Calling {}... ring sent. Use end_call to hang up.", peer_short |
| )}) |
| } |
| "end_call" => { |
| |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| match call_state.take() { |
| Some(cs) if cs.is_in_progress() => { |
| |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(&cs.peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, _) = std::sync::mpsc::channel(); |
| let _ = tx.send(crate::mesh::MeshRequest { |
| peer_key: cs.peer_key.clone(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({"action": "voice_end", "peer_key": pub_key_hex}), |
| reply: reply_tx, |
| }); |
| } |
| let secs = cs.elapsed_secs(); |
| let mins = secs / 60; |
| let remaining_secs = secs % 60; |
| |
| if let Some(ref mut vs) = *voice_lock { |
| vs.stop(); |
| } |
| *voice_lock = None; |
| let peer_short = &cs.peer_key[..8.min(cs.peer_key.len())]; |
| json!({"type": "text", "text": format!( |
| "Call ended with {}. Duration: {}m {}s", peer_short, mins, remaining_secs |
| )}) |
| } |
| _ => { |
| json!({"type": "text", "text": "No active call to end."}) |
| } |
| } |
| } |
| "settings" => { |
| |
| let has_updates = args.get("agent_tts_mode").is_some() |
| || args.get("agent_stt_mode").is_some() |
| || args.get("peer_quality").is_some() |
| || args.get("auto_accept_calls").is_some() |
| || args.get("idle_timeout_secs").is_some(); |
| if has_updates { |
| let mut settings = crate::voice::VOICE_SETTINGS.lock().unwrap_or_else(|e| e.into_inner()); |
| if let Some(mode) = args.get("agent_tts_mode").and_then(|v| v.as_str()) { |
| settings.agent_tts_mode = match mode { |
| "rich" | "Rich" => crate::voice::VoiceMode::Rich, |
| _ => crate::voice::VoiceMode::Light, |
| }; |
| } |
| if let Some(mode) = args.get("agent_stt_mode").and_then(|v| v.as_str()) { |
| settings.agent_stt_mode = match mode { |
| "rich" | "Rich" => crate::voice::VoiceMode::Rich, |
| _ => crate::voice::VoiceMode::Light, |
| }; |
| } |
| if let Some(mode) = args.get("peer_quality").and_then(|v| v.as_str()) { |
| settings.peer_quality = match mode { |
| "rich" | "Rich" => crate::voice::VoiceMode::Rich, |
| _ => crate::voice::VoiceMode::Light, |
| }; |
| } |
| if let Some(val) = args.get("auto_accept_calls").and_then(|v| v.as_bool()) { |
| settings.auto_accept_calls = val; |
| } |
| if let Some(val) = args.get("idle_timeout_secs").and_then(|v| v.as_u64()) { |
| settings.idle_timeout_secs = val as u32; |
| } |
| let settings_json = serde_json::to_string_pretty(&*settings).unwrap_or_default(); |
| json!({"type": "text", "text": format!("Voice settings updated:\n{}", settings_json)}) |
| } else { |
| |
| let settings = crate::voice::VOICE_SETTINGS.lock().unwrap_or_else(|e| e.into_inner()); |
| let settings_json = serde_json::to_string_pretty(&*settings).unwrap_or_default(); |
| let call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| let call_info = match &*call_state { |
| Some(cs) if cs.is_in_progress() => { |
| let dir = if cs.direction == crate::voice::CallDirection::Incoming { "Incoming" } else { "Outgoing" }; |
| let phase = if cs.is_ringing() { "ringing" } else { "active" }; |
| format!("{} call: peer={} phase={} duration={}s", |
| dir, &cs.peer_key[..8.min(cs.peer_key.len())], phase, cs.elapsed_secs()) |
| }, |
| _ => "No active call".to_string(), |
| }; |
| json!({"type": "text", "text": format!( |
| "Voice settings:\n{}\n\nCall status: {}", settings_json, call_info |
| )}) |
| } |
| } |
| "incoming_ring" => { |
| |
| let caller_key = args.get("peer_key").and_then(|v| v.as_str()).unwrap_or("").to_string(); |
| if caller_key.is_empty() { |
| return json!({"type": "text", "text": "Error: incoming_ring missing peer_key"}); |
| } |
| { |
| let call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| if let Some(ref cs) = *call_state { |
| if cs.is_in_progress() { |
| eprintln!("[SPF-VOICE] Busy: incoming ring from {} while in call", &caller_key[..8.min(caller_key.len())]); |
| return json!({"type": "text", "text": "busy"}); |
| } |
| } |
| } |
| let auto_accept = { |
| let settings = crate::voice::VOICE_SETTINGS.lock().unwrap_or_else(|e| e.into_inner()); |
| settings.auto_accept_calls |
| }; |
| { |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| *call_state = Some(crate::voice::CallState { |
| peer_key: caller_key.clone(), |
| peer_name: caller_key[..8.min(caller_key.len())].to_string(), |
| direction: crate::voice::CallDirection::Incoming, |
| phase: if auto_accept { crate::voice::CallPhase::Active } else { crate::voice::CallPhase::Ringing }, |
| started_at: std::time::Instant::now(), |
| }); |
| } |
| if auto_accept { |
| |
| if voice_lock.as_ref().map_or(true, |s| !s.is_active()) { |
| let mut vs = crate::voice::VoiceSession::new(crate::voice::VoiceConfig::default()); |
| if vs.start().is_ok() { *voice_lock = Some(vs); } |
| } |
| |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(&caller_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, _) = std::sync::mpsc::channel(); |
| let _ = tx.send(crate::mesh::MeshRequest { |
| peer_key: caller_key.clone(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({"action": "voice_accepted", "peer_key": pub_key_hex}), |
| reply: reply_tx, |
| }); |
| } |
| eprintln!("[SPF-VOICE] Auto-accepted call from {}", &caller_key[..8.min(caller_key.len())]); |
| json!({"type": "text", "text": format!( |
| "Auto-accepted call from {}. Voice session active.", |
| &caller_key[..8.min(caller_key.len())] |
| )}) |
| } else { |
| eprintln!("[SPF-VOICE] Incoming ring from {} — awaiting decision", &caller_key[..8.min(caller_key.len())]); |
| json!({"type": "text", "text": format!( |
| "Incoming call from {}. Use spf_voice_call action=accept or reject.", |
| &caller_key[..8.min(caller_key.len())] |
| )}) |
| } |
| } |
| "voice_accepted" => { |
| |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| match &mut *call_state { |
| Some(cs) if cs.is_ringing() && cs.direction == crate::voice::CallDirection::Outgoing => { |
| cs.phase = crate::voice::CallPhase::Active; |
| let peer_short = cs.peer_key[..8.min(cs.peer_key.len())].to_string(); |
| eprintln!("[SPF-VOICE] Call accepted by {}", peer_short); |
| json!({"type": "text", "text": format!("Call connected with {}", peer_short)}) |
| } |
| _ => json!({"type": "text", "text": "voice_accepted: no matching outgoing call"}), |
| } |
| } |
| "voice_rejected" => { |
| |
| let peer_short = { |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| if call_state.as_ref().map_or(false, |cs| { |
| cs.is_ringing() && cs.direction == crate::voice::CallDirection::Outgoing |
| }) { |
| let s = call_state.as_ref() |
| .map(|cs| cs.peer_key[..8.min(cs.peer_key.len())].to_string()) |
| .unwrap_or_default(); |
| *call_state = None; |
| s |
| } else { |
| return json!({"type": "text", "text": "voice_rejected: no matching outgoing call"}); |
| } |
| }; |
| if let Some(ref mut vs) = *voice_lock { vs.stop(); } |
| *voice_lock = None; |
| eprintln!("[SPF-VOICE] Call rejected by {}", peer_short); |
| json!({"type": "text", "text": format!("Call rejected by {}", peer_short)}) |
| } |
| "voice_end" => { |
| |
| let call_info = { |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| call_state.take().map(|cs| ( |
| cs.peer_key[..8.min(cs.peer_key.len())].to_string(), |
| cs.elapsed_secs() |
| )) |
| }; |
| match call_info { |
| Some((peer_short, secs)) => { |
| if let Some(ref mut vs) = *voice_lock { vs.stop(); } |
| *voice_lock = None; |
| eprintln!("[SPF-VOICE] Peer {} ended call ({}s)", peer_short, secs); |
| json!({"type": "text", "text": format!( |
| "Call ended by peer {}. Duration: {}s", peer_short, secs |
| )}) |
| } |
| None => json!({"type": "text", "text": "voice_end: no active call"}), |
| } |
| } |
| "team_join_notify" => { |
| |
| let team_id = args.get("team_id").and_then(|v| v.as_str()).unwrap_or("default").to_string(); |
| let peer_key = args.get("peer_key").and_then(|v| v.as_str()).unwrap_or("").to_string(); |
| let display_name = args.get("display_name").and_then(|v| v.as_str()) |
| .unwrap_or(&peer_key[..8.min(peer_key.len())]).to_string(); |
| if peer_key.is_empty() { |
| return json!({"type": "text", "text": "team_join_notify: missing peer_key"}); |
| } |
| let summary = { |
| let mut teams = crate::voice::VOICE_TEAMS.lock().unwrap_or_else(|e| e.into_inner()); |
| let channel = teams.entry(team_id.clone()) |
| .or_insert_with(|| crate::voice::TeamChannel::new(&team_id, &team_id)); |
| channel.join(&peer_key, &display_name); |
| channel.summary_line() |
| }; |
| eprintln!("[SPF-VOICE] Team {}: peer {} ({}) joined — {}", team_id, &peer_key[..8.min(peer_key.len())], display_name, summary); |
| json!({"type": "text", "text": format!( |
| "Peer {} ({}) joined team [{}]. {}", &peer_key[..8.min(peer_key.len())], display_name, team_id, summary |
| )}) |
| } |
| "team_leave_notify" => { |
| |
| let team_id = args.get("team_id").and_then(|v| v.as_str()).unwrap_or("default").to_string(); |
| let peer_key = args.get("peer_key").and_then(|v| v.as_str()).unwrap_or("").to_string(); |
| if peer_key.is_empty() { |
| return json!({"type": "text", "text": "team_leave_notify: missing peer_key"}); |
| } |
| let (was_present, summary) = { |
| let mut teams = crate::voice::VOICE_TEAMS.lock().unwrap_or_else(|e| e.into_inner()); |
| if let Some(channel) = teams.get_mut(&team_id) { |
| let was = channel.leave(&peer_key); |
| (was, channel.summary_line()) |
| } else { |
| (false, format!("team [{}] not found", team_id)) |
| } |
| }; |
| eprintln!("[SPF-VOICE] Team {}: peer {} left (was_present={})", team_id, &peer_key[..8.min(peer_key.len())], was_present); |
| json!({"type": "text", "text": format!( |
| "Peer {} left team [{}]. {}", &peer_key[..8.min(peer_key.len())], team_id, summary |
| )}) |
| } |
| "team_invite" => { |
| |
| let team_id = args.get("team_id").and_then(|v| v.as_str()).unwrap_or("default").to_string(); |
| let team_name = args.get("team_name").and_then(|v| v.as_str()).unwrap_or(&team_id).to_string(); |
| let from_key = args.get("from_key").and_then(|v| v.as_str()).unwrap_or("").to_string(); |
| { |
| let mut teams = crate::voice::VOICE_TEAMS.lock().unwrap_or_else(|e| e.into_inner()); |
| let channel = teams.entry(team_id.clone()) |
| .or_insert_with(|| crate::voice::TeamChannel::new(&team_id, &team_name)); |
| if !from_key.is_empty() { |
| channel.add_member(&from_key); |
| } |
| } |
| eprintln!("[SPF-VOICE] Invited to team [{}] ({}) by {}", team_id, team_name, &from_key[..8.min(from_key.len())]); |
| json!({"type": "text", "text": format!( |
| "Invited to team [{}] '{}' by peer {}. Use spf_voice_team action=join team_id={} to join.", |
| team_id, team_name, &from_key[..8.min(from_key.len())], team_id |
| )}) |
| } |
| "listen" => { |
| |
| let duration_secs = args.get("duration").and_then(|v| v.as_u64()).unwrap_or(5).min(60); |
|
|
| |
| if voice_lock.is_none() { |
| let config = crate::voice::VoiceConfig::default(); |
| *voice_lock = Some(crate::voice::VoiceSession::new(config)); |
| } |
| if let Some(ref mut session) = *voice_lock { |
| if !session.is_active() { |
| if let Err(e) = session.start() { |
| return json!({"type": "text", "text": format!("Failed to start audio input: {}", e)}); |
| } |
| } |
| } |
|
|
| |
| let deadline = std::time::Duration::from_secs(duration_secs); |
| let start = std::time::Instant::now(); |
| let mut frames: Vec<crate::voice::VoiceFrame> = Vec::new(); |
|
|
| while start.elapsed() < deadline { |
| if let Some(ref mut session) = *voice_lock { |
| match session.capture_frame("listen") { |
| Ok(frame) => frames.push(frame), |
| Err(crate::voice::VoiceError::NotAvailable(_)) => { |
| return json!({"type": "text", "text": "Audio input not available. Ensure pulseaudio is running (pulseaudio --start) and spf-voice crate is compiled."}); |
| } |
| Err(e) => { |
| eprintln!("[SPF-VOICE-LISTEN] capture error: {}", e); |
| std::thread::sleep(std::time::Duration::from_millis(5)); |
| } |
| } |
| } |
| } |
|
|
| if frames.is_empty() { |
| return json!({"type": "text", "text": "No audio captured. Ensure voice session is active and microphone is accessible."}); |
| } |
|
|
| |
| match voice_lock.as_mut().and_then(|s| Some(s.transcribe(&frames))) { |
| Some(Ok(text)) if text.trim().is_empty() => { |
| json!({"type": "text", "text": "(silence — no speech detected)"}) |
| } |
| Some(Ok(text)) => json!({"type": "text", "text": text}), |
| Some(Err(crate::voice::VoiceError::NotAvailable(_))) => { |
| json!({"type": "text", "text": "STT not available. Rebuild with --features voice-stt to enable Whisper transcription."}) |
| } |
| Some(Err(e)) => json!({"type": "text", "text": format!("Transcription error: {}", e)}), |
| None => json!({"type": "text", "text": "Voice session unavailable."}), |
| } |
| } |
| "stream_on" => { |
| #[cfg(feature = "voice-stt")] |
| { |
| let model_dir = crate::paths::spf_root().join("LIVE/MODELS/whisper-tiny"); |
| match crate::voice::stream_on(model_dir) { |
| Ok(()) => json!({"type": "text", "text": "Streaming STT started. Speak freely. Say 'end stream' or 'stop stream' to finish. Use stream_read to get transcripts."}), |
| Err(e) => json!({"type": "text", "text": format!("Failed to start streaming STT: {}", e)}), |
| } |
| } |
| #[cfg(not(feature = "voice-stt"))] |
| json!({"type": "text", "text": "stream_on requires voice-stt feature."}) |
| } |
| "stream_off" => { |
| crate::voice::stream_off(); |
| json!({"type": "text", "text": "Streaming STT stopped."}) |
| } |
| "stream_read" => { |
| let active = crate::voice::STREAM_ACTIVE.load(std::sync::atomic::Ordering::SeqCst); |
| let transcripts = crate::voice::stream_read(); |
| if transcripts.is_empty() { |
| json!({"type": "text", "text": format!("No transcripts yet. Streaming: {}", active)}) |
| } else { |
| let combined = transcripts.join("\n"); |
| json!({"type": "text", "text": format!("Streaming: {}\n---\n{}", active, combined)}) |
| } |
| } |
| other => { |
| json!({"type": "text", "text": format!( |
| "Unknown voice action: '{}'. Valid: start, stop, status, speak, listen, stream_on, stream_off, stream_read, call, end_call, settings, incoming_ring, voice_accepted, voice_rejected, voice_end, team_join_notify, team_leave_notify, team_invite", other |
| )}) |
| } |
| } |
| } |
|
|
| |
| "spf_voice_call" => { |
| session.record_action("spf_voice_call", "called", None); |
| let action = args["action"].as_str().unwrap_or("status"); |
| let mut voice_lock = crate::voice::VOICE_SESSION.lock().unwrap_or_else(|e| e.into_inner()); |
|
|
| match action { |
| "start" => { |
| let peer_key = args.get("peer_key").and_then(|v| v.as_str()).unwrap_or(""); |
| if peer_key.is_empty() { |
| return json!({"type": "text", "text": "Error: 'peer_key' required for start"}); |
| } |
| |
| { |
| let call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| if let Some(ref cs) = *call_state { |
| if cs.is_in_progress() { |
| let phase = if cs.is_ringing() { "ringing" } else { "active" }; |
| return json!({"type": "text", "text": format!( |
| "Already in call ({} {}). Use end first.", |
| phase, &cs.peer_key[..8.min(cs.peer_key.len())] |
| )}); |
| } |
| } |
| } |
| let peer_name = args.get("peer_name") |
| .and_then(|v| v.as_str()) |
| .unwrap_or(&peer_key[..8.min(peer_key.len())]) |
| .to_string(); |
| |
| if voice_lock.as_ref().map_or(true, |s| !s.is_active()) { |
| let mut vs = crate::voice::VoiceSession::new(crate::voice::VoiceConfig::default()); |
| match vs.start() { |
| Ok(()) => { *voice_lock = Some(vs); } |
| Err(e) => return json!({"type": "text", "text": format!("Voice start failed: {}", e)}), |
| } |
| } |
| |
| { |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| *call_state = Some(crate::voice::CallState { |
| peer_key: peer_key.to_string(), |
| peer_name: peer_name.clone(), |
| direction: crate::voice::CallDirection::Outgoing, |
| phase: crate::voice::CallPhase::Ringing, |
| started_at: std::time::Instant::now(), |
| }); |
| } |
| |
| let signal = if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, reply_rx) = std::sync::mpsc::channel(); |
| let req = crate::mesh::MeshRequest { |
| peer_key: peer_key.to_string(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({"action": "incoming_ring", "peer_key": pub_key_hex}), |
| reply: reply_tx, |
| }; |
| if tx.send(req).is_ok() { |
| match reply_rx.recv_timeout(std::time::Duration::from_secs(5)) { |
| Ok(Ok(_)) => "signal delivered", |
| _ => "signal pending (peer may be offline)", |
| } |
| } else { "mesh unavailable" } |
| } else { "mesh not configured" }; |
| json!({"type": "text", "text": format!( |
| "Calling {} ({})... {}. Use action=end to hang up.", |
| peer_name, &peer_key[..8.min(peer_key.len())], signal |
| )}) |
| } |
| "accept" => { |
| |
| let (peer_key, peer_name) = { |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| match &mut *call_state { |
| Some(cs) if cs.is_ringing() && cs.direction == crate::voice::CallDirection::Incoming => { |
| cs.phase = crate::voice::CallPhase::Active; |
| (cs.peer_key.clone(), cs.peer_name.clone()) |
| } |
| _ => return json!({"type": "text", "text": "No incoming ringing call to accept"}), |
| } |
| }; |
| |
| if voice_lock.as_ref().map_or(true, |s| !s.is_active()) { |
| let mut vs = crate::voice::VoiceSession::new(crate::voice::VoiceConfig::default()); |
| if vs.start().is_ok() { *voice_lock = Some(vs); } |
| } |
| |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(&peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, _) = std::sync::mpsc::channel(); |
| let _ = tx.send(crate::mesh::MeshRequest { |
| peer_key: peer_key.clone(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({"action": "voice_accepted", "peer_key": pub_key_hex}), |
| reply: reply_tx, |
| }); |
| } |
| json!({"type": "text", "text": format!("Call accepted — connected to {}", peer_name)}) |
| } |
| "reject" => { |
| |
| let peer_key = { |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| if call_state.as_ref().map_or(false, |cs| { |
| cs.is_ringing() && cs.direction == crate::voice::CallDirection::Incoming |
| }) { |
| let k = call_state.as_ref().map(|cs| cs.peer_key.clone()).unwrap_or_default(); |
| *call_state = None; |
| k |
| } else { |
| return json!({"type": "text", "text": "No incoming ringing call to reject"}); |
| } |
| }; |
| |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(&peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, _) = std::sync::mpsc::channel(); |
| let _ = tx.send(crate::mesh::MeshRequest { |
| peer_key: peer_key.clone(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({"action": "voice_rejected", "peer_key": pub_key_hex}), |
| reply: reply_tx, |
| }); |
| } |
| json!({"type": "text", "text": format!( |
| "Rejected call from {}", &peer_key[..8.min(peer_key.len())] |
| )}) |
| } |
| "end" => { |
| |
| let call_opt = { |
| let mut call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| if call_state.as_ref().map_or(false, |cs| cs.is_in_progress()) { |
| call_state.take() |
| } else { |
| None |
| } |
| }; |
| match call_opt { |
| Some(cs) => { |
| let peer_key = cs.peer_key.clone(); |
| let secs = cs.elapsed_secs(); |
| |
| if let Some(ref mut vs) = *voice_lock { vs.stop(); } |
| *voice_lock = None; |
| |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(&peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, _) = std::sync::mpsc::channel(); |
| let _ = tx.send(crate::mesh::MeshRequest { |
| peer_key: peer_key.clone(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({"action": "voice_end", "peer_key": pub_key_hex}), |
| reply: reply_tx, |
| }); |
| } |
| let mins = secs / 60; |
| let rem = secs % 60; |
| json!({"type": "text", "text": format!( |
| "Call ended. Duration: {}m {}s", mins, rem |
| )}) |
| } |
| None => json!({"type": "text", "text": "No active call to end"}), |
| } |
| } |
| "status" => { |
| let call_state = crate::voice::CALL_STATE.lock().unwrap_or_else(|e| e.into_inner()); |
| match &*call_state { |
| Some(cs) => { |
| let dir = if cs.direction == crate::voice::CallDirection::Incoming { "Incoming" } else { "Outgoing" }; |
| let phase = if cs.is_ringing() { "ringing" } else { "active" }; |
| json!({"type": "text", "text": format!( |
| "{} call with {} ({}) — {} — {}s elapsed", |
| dir, cs.peer_name, |
| &cs.peer_key[..8.min(cs.peer_key.len())], |
| phase, cs.elapsed_secs() |
| )}) |
| } |
| None => json!({"type": "text", "text": "No active call"}), |
| } |
| } |
| other => json!({"type": "text", "text": format!( |
| "Unknown action: '{}'. Valid: start, accept, reject, end, status", other |
| )}), |
| } |
| } |
|
|
| |
| "spf_voice_team" => { |
| session.record_action("spf_voice_team", "called", None); |
| let action = args["action"].as_str().unwrap_or("list"); |
| let team_id = args.get("team_id").and_then(|v| v.as_str()).unwrap_or("default").to_string(); |
|
|
| match action { |
| "create" => { |
| let name = args.get("name").and_then(|v| v.as_str()).unwrap_or("").to_string(); |
| if name.is_empty() { |
| return json!({"type": "text", "text": "Error: 'name' required for create"}); |
| } |
| let summary = { |
| let mut teams = crate::voice::VOICE_TEAMS.lock().unwrap_or_else(|e| e.into_inner()); |
| if teams.contains_key(&team_id) { |
| return json!({"type": "text", "text": format!( |
| "Team [{}] already exists. Use status to inspect.", team_id |
| )}); |
| } |
| let ch = crate::voice::TeamChannel::new(&team_id, &name); |
| let s = ch.summary_line(); |
| teams.insert(team_id.clone(), ch); |
| s |
| }; |
| json!({"type": "text", "text": format!("Created team [{}]: {}", team_id, summary)}) |
| } |
| "join" => { |
| let display_name = args.get("display_name").and_then(|v| v.as_str()) |
| .unwrap_or(&pub_key_hex[..8.min(pub_key_hex.len())]).to_string(); |
| |
| let (active_keys, summary) = { |
| let mut teams = crate::voice::VOICE_TEAMS.lock().unwrap_or_else(|e| e.into_inner()); |
| let channel = teams.entry(team_id.clone()) |
| .or_insert_with(|| crate::voice::TeamChannel::new(&team_id, &team_id)); |
| channel.join(pub_key_hex, &display_name); |
| (channel.active_peer_keys(), channel.summary_line()) |
| }; |
| |
| let mut notified = 0usize; |
| for peer_key in &active_keys { |
| if peer_key == pub_key_hex { continue; } |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, _) = std::sync::mpsc::channel(); |
| let _ = tx.send(crate::mesh::MeshRequest { |
| peer_key: peer_key.clone(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({ |
| "action": "team_join_notify", |
| "team_id": team_id, |
| "peer_key": pub_key_hex, |
| "display_name": display_name |
| }), |
| reply: reply_tx, |
| }); |
| notified += 1; |
| } |
| } |
| json!({"type": "text", "text": format!( |
| "Joined team [{}] as '{}'. Notified {} peers. {}", |
| team_id, display_name, notified, summary |
| )}) |
| } |
| "leave" => { |
| let (was_present, active_keys, summary) = { |
| let mut teams = crate::voice::VOICE_TEAMS.lock().unwrap_or_else(|e| e.into_inner()); |
| if let Some(channel) = teams.get_mut(&team_id) { |
| |
| let keys = channel.active_peer_keys(); |
| let was = channel.leave(pub_key_hex); |
| (was, keys, channel.summary_line()) |
| } else { |
| return json!({"type": "text", "text": format!("Team [{}] not found", team_id)}); |
| } |
| }; |
| if !was_present { |
| return json!({"type": "text", "text": format!("Not a member of team [{}]", team_id)}); |
| } |
| |
| let mut notified = 0usize; |
| for peer_key in &active_keys { |
| if peer_key == pub_key_hex { continue; } |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, _) = std::sync::mpsc::channel(); |
| let _ = tx.send(crate::mesh::MeshRequest { |
| peer_key: peer_key.clone(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({ |
| "action": "team_leave_notify", |
| "team_id": team_id, |
| "peer_key": pub_key_hex |
| }), |
| reply: reply_tx, |
| }); |
| notified += 1; |
| } |
| } |
| json!({"type": "text", "text": format!( |
| "Left team [{}]. Notified {} peers. {}", team_id, notified, summary |
| )}) |
| } |
| "add" => { |
| |
| let peer_key = args.get("peer_key").and_then(|v| v.as_str()).unwrap_or(""); |
| if peer_key.is_empty() { |
| return json!({"type": "text", "text": "Error: 'peer_key' required for add"}); |
| } |
| let (team_name, summary) = { |
| let mut teams = crate::voice::VOICE_TEAMS.lock().unwrap_or_else(|e| e.into_inner()); |
| let channel = teams.entry(team_id.clone()) |
| .or_insert_with(|| crate::voice::TeamChannel::new(&team_id, &team_id)); |
| channel.add_member(peer_key); |
| (channel.name.clone(), channel.summary_line()) |
| }; |
| |
| if let Some(ref tx) = mesh_tx { |
| let addrs: Vec<String> = peers.get(peer_key) |
| .map(|p| p.addr.clone()) |
| .unwrap_or_default(); |
| let (reply_tx, _) = std::sync::mpsc::channel(); |
| let _ = tx.send(crate::mesh::MeshRequest { |
| peer_key: peer_key.to_string(), |
| addrs, |
| tool: "spf_voice_mode".to_string(), |
| args: json!({ |
| "action": "team_invite", |
| "team_id": team_id, |
| "team_name": team_name, |
| "from_key": pub_key_hex |
| }), |
| reply: reply_tx, |
| }); |
| } |
| json!({"type": "text", "text": format!( |
| "Added {} to team [{}]. Invite signal sent. {}", |
| &peer_key[..8.min(peer_key.len())], team_id, summary |
| )}) |
| } |
| "list" => { |
| let teams = crate::voice::VOICE_TEAMS.lock().unwrap_or_else(|e| e.into_inner()); |
| if teams.is_empty() { |
| json!({"type": "text", "text": "No team channels. Use action=create to add one."}) |
| } else { |
| let lines: Vec<String> = teams.values().map(|ch| ch.summary_line()).collect(); |
| json!({"type": "text", "text": format!("Team channels ({}):\n{}", lines.len(), lines.join("\n"))}) |
| } |
| } |
| "status" => { |
| let teams = crate::voice::VOICE_TEAMS.lock().unwrap_or_else(|e| e.into_inner()); |
| match teams.get(&team_id) { |
| Some(ch) => { |
| let members_str = if ch.members.is_empty() { |
| " none".to_string() |
| } else { |
| ch.members.iter().map(|k| format!(" {}", &k[..8.min(k.len())])).collect::<Vec<_>>().join("\n") |
| }; |
| let active_str = if ch.active_members.is_empty() { |
| " none".to_string() |
| } else { |
| ch.active_members.iter().map(|m| format!( |
| " {} ({})", &m.peer_key[..8.min(m.peer_key.len())], m.name |
| )).collect::<Vec<_>>().join("\n") |
| }; |
| json!({"type": "text", "text": format!( |
| "Team [{}] '{}'\nInvited ({}):\n{}\nActive ({}):\n{}", |
| ch.team_id, ch.name, |
| ch.member_count(), members_str, |
| ch.active_count(), active_str |
| )}) |
| } |
| None => json!({"type": "text", "text": format!("Team [{}] not found. Use list to see all channels.", team_id)}), |
| } |
| } |
| other => json!({"type": "text", "text": format!( |
| "Unknown action: '{}'. Valid: create, join, leave, add, list, status", other |
| )}), |
| } |
| } |
|
|
| |
| "spf_flint_execute" => { |
| session.record_action("spf_flint_execute", "called", None); |
|
|
| |
| let transformer_loaded = transformer.as_ref() |
| .and_then(|t| t.read().ok()) |
| .is_some(); |
|
|
| if !transformer_loaded { |
| return json!({"type": "text", "text": |
| "[FLINT] Error: transformer not loaded — cannot execute in worker mode"}); |
| } |
|
|
| let tool_name = args["tool_name"].as_str().unwrap_or(""); |
| let tool_args = if args["arguments"].is_object() { |
| args["arguments"].clone() |
| } else { |
| json!({}) |
| }; |
| let reason = args["reason"].as_str().unwrap_or("(no reason provided)"); |
|
|
| |
| if tool_name == "spf_flint_execute" { |
| return json!({"type": "text", "text": |
| "[FLINT] BLOCKED: recursive self-invocation not permitted"}); |
| } |
|
|
| if tool_name.is_empty() { |
| return json!({"type": "text", "text": "[FLINT] Error: tool_name is required"}); |
| } |
|
|
| |
| session.record_action("spf_flint_execute", "dispatching", |
| Some(&format!("tool={} reason={}", tool_name, reason))); |
|
|
| |
| let flint_gate_params = extract_gate_params(&tool_args); |
| let flint_source = crate::dispatch::Source::Transformer { |
| role: "worker".to_string(), |
| model_id: "flint".to_string(), |
| }; |
| let flint_decision = crate::gate::process(tool_name, &flint_gate_params, config, session, &flint_source); |
| if !flint_decision.allowed { |
| return json!({ |
| "type": "text", |
| "text": format!("[FLINT] BLOCKED: {}", flint_decision.message), |
| "_blocked": true |
| }); |
| } |
| let result = handle_tool_call( |
| tool_name, &tool_args, flint_source, &flint_decision, config, session, storage, |
| config_db, tmp_db, _fs_db, agent_db, pub_key_hex, |
| mesh_tx, peers, transformer, transformer_config, |
| network_config, pool_state, pipeline, |
| browser, ws_browser_channels, http_port, |
| tracked_peers, orchestrator_state, |
| endpoint, tokio_handle, |
| ); |
|
|
| |
| let result_text = result.get("text") |
| .and_then(|t| t.as_str()) |
| .unwrap_or("(no output)"); |
| let was_blocked = result.get("_blocked") |
| .and_then(|b| b.as_bool()) |
| .unwrap_or(false); |
|
|
| json!({ |
| "type": "text", |
| "text": format!("[FLINT] Tool: {} | Reason: {} | {}{}", |
| tool_name, reason, |
| if was_blocked { "BLOCKED: " } else { "" }, |
| result_text |
| ), |
| "_flint_worker": true, |
| "_tool_executed": tool_name, |
| "_was_blocked": was_blocked |
| }) |
| } |
|
|
| |
|
|
| "spf_pool_status" => { |
| match pool_state { |
| Some(pool) => { |
| let snapshot = pool.status_snapshot(); |
| let idle = pool.idle_count(); |
| let active = pool.active_count(); |
| let cap = pool.capacity(); |
| let mut lines = vec![ |
| format!("Network Pool — NetAdmin | {}/{} workers active | {} idle | capacity {}", active, cap, idle, cap), |
| ]; |
| for (name, status, task_id, elapsed_ms) in &snapshot { |
| let task_str = task_id.as_deref().unwrap_or("-"); |
| lines.push(format!(" {:<20} {:8} task={:<30} elapsed={}ms", name, status, task_str, elapsed_ms)); |
| } |
| json!({"type": "text", "text": lines.join("\n")}) |
| } |
| None => { |
| let role = if network_config.is_netadmin() { "NetAdmin (pool not initialised)" } else { "Worker" }; |
| json!({"type": "text", "text": format!("Network role: {}\nPool management delegated to NetAdmin node.", role)}) |
| } |
| } |
| } |
|
|
| "spf_pool_assign" => { |
| if !network_config.is_netadmin() { |
| return json!({"type": "text", "text": "BLOCKED: spf_pool_assign requires NetAdmin role", "_blocked": true}); |
| } |
| let pool = match pool_state { |
| Some(p) => p, |
| None => return json!({"type": "text", "text": "Pool not initialised"}), |
| }; |
| let tool_name = args["tool"].as_str().unwrap_or("unknown"); |
| let worker_name_arg = args["worker_name"].as_str(); |
|
|
| |
| let key_hex = match worker_name_arg { |
| Some(name) => pool.find_by_name(name), |
| None => pool.find_idle(), |
| }; |
| let key_hex = match key_hex { |
| Some(k) => k, |
| None => return json!({"type": "text", "text": "No idle workers available in pool"}), |
| }; |
|
|
| let task_id = crate::network::new_task_id(); |
| match pool.borrow(&key_hex, &task_id) { |
| Ok(()) => { |
| let peer = pool.get_peer(&key_hex); |
| let wname = peer.as_ref().map(|p| p.name.as_str()).unwrap_or("unknown"); |
| session.record_action("spf_pool_assign", &task_id, Some(wname)); |
| json!({"type": "text", "text": format!( |
| "Task assigned\ntask_id: {}\nworker: {}\nworker_key: {}\ntool: {}", |
| task_id, wname, key_hex, tool_name |
| )}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("Assign failed: {}", e)}), |
| } |
| } |
|
|
| "spf_pool_release" => { |
| if !network_config.is_netadmin() { |
| return json!({"type": "text", "text": "BLOCKED: spf_pool_release requires NetAdmin role", "_blocked": true}); |
| } |
| let pool = match pool_state { |
| Some(p) => p, |
| None => return json!({"type": "text", "text": "Pool not initialised"}), |
| }; |
| let task_id = args["task_id"].as_str().unwrap_or(""); |
| let result_val = args.get("result").cloned().unwrap_or(serde_json::json!({})); |
|
|
| |
| let snapshot = pool.status_snapshot(); |
| let worker_name_for_lookup: Option<String> = snapshot.iter() |
| .find(|(_, _, tid, _)| tid.as_deref() == Some(task_id)) |
| .map(|(name, _, _, _)| name.clone()); |
|
|
| let key_hex = worker_name_for_lookup |
| .as_deref() |
| .and_then(|n| pool.find_by_name(n)); |
|
|
| let key_hex = match key_hex { |
| Some(k) => k, |
| None => return json!({"type": "text", "text": format!("No worker found holding task_id: {}", task_id)}), |
| }; |
|
|
| match pool.release(&key_hex) { |
| Ok(wname) => { |
| let result_hash = crate::network::hash_result(&result_val); |
| session.record_manifest(task_id, 0, "POOL_RELEASE", Some(&wname)); |
| json!({"type": "text", "text": format!( |
| "Worker released\ntask_id: {}\nworker: {}\nresult_hash: {}\nPool: {}/{} idle", |
| task_id, wname, result_hash, pool.idle_count(), pool.capacity() |
| )}) |
| } |
| Err(e) => json!({"type": "text", "text": format!("Release failed: {}", e)}), |
| } |
| } |
|
|
| |
| "spf_channel" => { |
| session.record_action("spf_channel", "called", None); |
| let action = args["action"].as_str().unwrap_or("status"); |
| let channel_id = args["channel_id"].as_str().unwrap_or(""); |
| let display_name = args["name"].as_str().unwrap_or("Agent"); |
| let text = args["text"].as_str().unwrap_or(""); |
| let limit = args["limit"].as_u64().unwrap_or(50) as usize; |
| let msg_type_str = args["msg_type"].as_str().unwrap_or("text"); |
| let hub_url = args["hub_url"].as_str().unwrap_or(""); |
|
|
| let msg_type = match msg_type_str { |
| "tool_result" => crate::channel::MessageType::ToolResult, |
| "system" => crate::channel::MessageType::System, |
| _ => crate::channel::MessageType::Text, |
| }; |
|
|
| match action { |
| |
| "create" => { |
| let ch_name = if display_name == "Agent" && !text.is_empty() { text } else { display_name }; |
| let name_arg = args["name"].as_str().unwrap_or(ch_name); |
| let result = crate::channel::api_create_channel(name_arg, pub_key_hex, display_name); |
| json!({"type": "text", "text": serde_json::to_string_pretty(&result).unwrap_or_default()}) |
| } |
|
|
| "join" => { |
| if channel_id.is_empty() { |
| return json!({"type": "text", "text": "Error: channel_id required for join"}); |
| } |
| let result = crate::channel::api_join_channel(channel_id, pub_key_hex, display_name); |
| json!({"type": "text", "text": serde_json::to_string_pretty(&result).unwrap_or_default()}) |
| } |
|
|
| "leave" => { |
| if channel_id.is_empty() { |
| return json!({"type": "text", "text": "Error: channel_id required for leave"}); |
| } |
| let result = crate::channel::api_leave_channel(channel_id, pub_key_hex); |
| json!({"type": "text", "text": serde_json::to_string_pretty(&result).unwrap_or_default()}) |
| } |
|
|
| "send" => { |
| if channel_id.is_empty() || text.is_empty() { |
| return json!({"type": "text", "text": "Error: channel_id and text required for send"}); |
| } |
| let result = crate::channel::api_send_message(channel_id, pub_key_hex, display_name, text, msg_type); |
| json!({"type": "text", "text": serde_json::to_string_pretty(&result).unwrap_or_default()}) |
| } |
|
|
| "history" => { |
| if channel_id.is_empty() { |
| return json!({"type": "text", "text": "Error: channel_id required for history"}); |
| } |
| let result = crate::channel::api_channel_history(channel_id, limit); |
| json!({"type": "text", "text": serde_json::to_string_pretty(&result).unwrap_or_default()}) |
| } |
|
|
| "list" => { |
| let result = crate::channel::api_list_channels(); |
| json!({"type": "text", "text": serde_json::to_string_pretty(&result).unwrap_or_default()}) |
| } |
|
|
| |
| "connect" => { |
| if hub_url.is_empty() || channel_id.is_empty() { |
| return json!({"type": "text", "text": "Error: hub_url and channel_id required for connect"}); |
| } |
| |
| let rt_guard = tokio_handle.lock().unwrap(); |
| let handle = match rt_guard.as_ref() { |
| Some(h) => h, |
| None => return json!({"type": "text", "text": "Error: tokio runtime not available (mesh not started?)"}), |
| }; |
| |
| let api_key = std::env::var("SPF_API_KEY").unwrap_or_default(); |
| match crate::channel::ws_client_connect(handle, hub_url, channel_id, &api_key, pub_key_hex, display_name) { |
| Ok(key) => json!({"type": "text", "text": format!("Connected to {} #{}\nClient key: {}", hub_url, channel_id, key)}), |
| Err(e) => json!({"type": "text", "text": format!("Connect failed: {}", e)}), |
| } |
| } |
|
|
| "disconnect" => { |
| if hub_url.is_empty() || channel_id.is_empty() { |
| return json!({"type": "text", "text": "Error: hub_url and channel_id required for disconnect"}); |
| } |
| let key = format!("{}#{}", hub_url, channel_id); |
| match crate::channel::ws_client_disconnect(&key) { |
| Ok(()) => json!({"type": "text", "text": format!("Disconnected from {}", key)}), |
| Err(e) => json!({"type": "text", "text": format!("Disconnect error: {}", e)}), |
| } |
| } |
|
|
| "listen" => { |
| |
| if !hub_url.is_empty() && !channel_id.is_empty() { |
| let key = format!("{}#{}", hub_url, channel_id); |
| match crate::channel::ws_client_drain(&key) { |
| Ok(msgs) => { |
| if msgs.is_empty() { |
| json!({"type": "text", "text": "No new messages"}) |
| } else { |
| json!({"type": "text", "text": serde_json::to_string_pretty(&msgs).unwrap_or_default()}) |
| } |
| } |
| Err(e) => json!({"type": "text", "text": format!("Listen error: {}", e)}), |
| } |
| } else if !channel_id.is_empty() { |
| |
| let result = crate::channel::api_channel_history(channel_id, limit); |
| json!({"type": "text", "text": serde_json::to_string_pretty(&result).unwrap_or_default()}) |
| } else { |
| json!({"type": "text", "text": "Error: channel_id required for listen (hub_url also needed for remote)"}) |
| } |
| } |
|
|
| "status" => { |
| let clients = crate::channel::ws_client_list(); |
| let channels = crate::channel::api_list_channels(); |
| json!({"type": "text", "text": format!( |
| "Channel Hub Status:\n{}\n\nWS Client Connections:\n{}", |
| serde_json::to_string_pretty(&channels).unwrap_or_default(), |
| serde_json::to_string_pretty(&clients).unwrap_or_default() |
| )}) |
| } |
|
|
| other => { |
| json!({"type": "text", "text": format!("Unknown channel action: {}. Use: create, join, leave, send, listen, history, list, connect, disconnect, status", other)}) |
| } |
| } |
| } |
|
|
| _ => { |
| json!({"type": "text", "text": format!("Unknown tool: {}", name)}) |
| } |
| } |
| } |
|
|
| |
| |
| |
| |
| fn build_listeners( |
| _config: &SpfConfig, |
| transformer_config: &crate::config::TransformerConfig, |
| ) -> (Vec<Box<dyn crate::dispatch::DispatchListener>>, Option<std::sync::Arc<crate::gate_training::GateTrainingCollector>>) { |
| let mut listeners: Vec<Box<dyn crate::dispatch::DispatchListener>> = Vec::new(); |
| let mut collector_arc = None; |
| if transformer_config.enabled { |
| let collector = std::sync::Arc::new( |
| crate::gate_training::GateTrainingCollector::new(10000) |
| ); |
| listeners.push(Box::new(collector.clone())); |
| collector_arc = Some(collector); |
| log("GateTrainingCollector registered as DispatchListener (Arc-shared)"); |
| } |
| (listeners, collector_arc) |
| } |
|
|
| |
| pub fn tool_count() -> usize { |
| tool_definitions().len() |
| } |
|
|
| |
| |
| pub fn run_worker( |
| config: SpfConfig, |
| config_db: SpfConfigDb, |
| session: Session, |
| storage: SpfStorage, |
| http_config: crate::config::HttpConfig, |
| transformer: std::sync::Arc<std::sync::RwLock<crate::transformer_tools::TransformerState>>, |
| transformer_config: crate::config::TransformerConfig, |
| worker_config: crate::worker::WorkerConfig, |
| ) { |
| use std::sync::{Arc, Mutex}; |
|
|
| log("[SPF-WORKER] Starting worker mode (no stdio)"); |
|
|
| |
| crate::voice::init_voice_teams(); |
|
|
| |
| crate::brain_local::init_brain(); |
| crate::brain_local::index_knowledge_docs(); |
| crate::brain_local::index_spf_sources(); |
|
|
| let live_base = spf_root().join("LIVE"); |
|
|
| |
| let tmp_db = crate::tmp_db::SpfTmpDb::open(&live_base.join("TMP/TMP.DB")).ok(); |
| let agent_db = crate::agent_state::AgentStateDb::open(&live_base.join("LMDB5/LMDB5.DB")).ok(); |
| let fs_db = crate::fs::SpfFs::open(&live_base.join("SPF_FS")).ok(); |
|
|
| |
| let config_dir = crate::paths::spf_root().join("LIVE/CONFIG"); |
| let (signing_key, verifying_key) = crate::identity::ensure_identity(&config_dir); |
| let pub_key_hex = hex::encode(verifying_key.to_bytes()); |
| let trusted_keys = crate::identity::load_trusted_keys(&config_dir.join("groups")); |
| let peers = crate::identity::load_peers(&config_dir.join("groups")); |
| log(&format!("[SPF-WORKER] Identity: {}", pub_key_hex)); |
|
|
| |
| crate::identity::boot_integrity_check(); |
|
|
| |
| |
| |
| let mut http_config = http_config; |
| let derived_key = crate::identity::derive_api_key(&signing_key); |
| http_config.api_key = derived_key; |
| log("[SPF-WORKER] API key synced from identity"); |
|
|
| |
| let mesh_config = crate::config::MeshConfig::load( |
| &crate::paths::spf_root().join("LIVE/CONFIG/mesh.json") |
| ).unwrap_or_default(); |
|
|
| let (mesh_tx, mesh_rx) = if mesh_config.enabled { |
| let (tx, rx) = crate::mesh::create_mesh_channel(); |
| (Some(tx), Some(rx)) |
| } else { |
| (None, None) |
| }; |
|
|
| |
| let (listeners, collector_arc) = build_listeners(&config, &transformer_config); |
|
|
| |
| if let Some(ref collector) = collector_arc { |
| let mut t_state = transformer.write().unwrap(); |
| t_state.collector = Some(collector.clone()); |
| } |
|
|
| |
| |
| let worker_network_config = crate::config::NetworkConfig::load( |
| &crate::paths::spf_root().join("LIVE/CONFIG/network.json") |
| ).unwrap_or_default(); |
|
|
| let state = Arc::new(ServerState { |
| config, |
| config_db: Some(config_db), |
| session: Mutex::new(session), |
| storage, |
| tmp_db, |
| agent_db, |
| fs_db, |
| pub_key_hex: pub_key_hex.clone(), |
| trusted_keys, |
| auth_mode: http_config.auth_mode.clone(), |
| nonce_cache: Mutex::new(std::collections::HashMap::new()), |
| listeners, |
| mesh_tx, |
| peers, |
| transformer: Some(transformer.clone()), |
| transformer_config: transformer_config.clone(), |
| pipeline: Arc::new(Mutex::new(crate::pipeline::PipelineState::new())), |
| signing_key: signing_key.clone(), |
| network_config: worker_network_config, |
| pool_state: None, |
| browser: std::sync::Mutex::new(crate::browser::BrowserSession::new()), |
| ws_browser_channels: std::sync::Mutex::new(None), |
| http_port: http_config.port, |
| tracked_peers: std::sync::Mutex::new(std::collections::HashMap::new()), |
| orchestrator_state: std::sync::Mutex::new(None), |
| endpoint: std::sync::Mutex::new(None), |
| tokio_handle: std::sync::Mutex::new(None), |
| }); |
|
|
| |
| if let Some(ref collector) = collector_arc { |
| crate::flint_memory::start_memory_router( |
| collector.clone(), Some(transformer.clone()), state.clone() |
| ); |
| log("FLINT Memory Router started (MB-FR/MB-FT) with auto-train + metrics"); |
| } |
|
|
| |
| if mesh_config.enabled { |
| if let Some(rx) = mesh_rx { |
| let mesh_state = state.clone(); |
| let mesh_key = signing_key.clone(); |
| std::thread::spawn(move || { |
| let rt = tokio::runtime::Builder::new_multi_thread() |
| .worker_threads(2) |
| .enable_all() |
| .build() |
| .expect("[SPF-WORKER] Failed to build mesh runtime"); |
| rt.block_on(crate::mesh::run(mesh_state, mesh_key, mesh_config, rx)); |
| }); |
| } |
| } |
|
|
| |
| if http_config.transport != "stdio" && !http_config.api_key.is_empty() { |
| let http_state = state.clone(); |
| let api_key = http_config.api_key.clone(); |
| let bind = http_config.bind.clone(); |
| let port = http_config.port; |
| std::thread::spawn(move || { |
| crate::http::start(http_state, &bind, port, api_key, None); |
| }); |
| } |
|
|
| |
| let mut worker_state = crate::worker::WorkerState::new(worker_config, transformer_config); |
| crate::worker::run_worker(&mut worker_state, &transformer, &state); |
| } |
|
|
| pub fn run(config: SpfConfig, config_db: SpfConfigDb, session: Session, storage: SpfStorage, http_config: crate::config::HttpConfig) { |
| log(&format!("Starting {} v{}", SERVER_NAME, SERVER_VERSION)); |
| log(&format!("Mode: {:?}", config.enforce_mode)); |
|
|
| |
| let live_base = spf_root().join("LIVE"); |
|
|
| |
| let config_db = Some(config_db); |
| log("SPF_CONFIG LMDB active (passed from main)"); |
|
|
| |
| let tmp_db_path = live_base.join("TMP/TMP.DB"); |
| log(&format!("TMP_DB path: {:?}", tmp_db_path)); |
|
|
| let tmp_db = match SpfTmpDb::open(&tmp_db_path) { |
| Ok(db) => { |
| log(&format!("TMP_DB LMDB initialized at {:?}", tmp_db_path)); |
| Some(db) |
| } |
| Err(e) => { |
| log(&format!("Warning: Failed to open TMP_DB LMDB at {:?}: {}", tmp_db_path, e)); |
| None |
| } |
| }; |
|
|
| |
| let agent_db_path = live_base.join("LMDB5/LMDB5.DB"); |
| log(&format!("AGENT_STATE path: {:?}", agent_db_path)); |
|
|
| let agent_db = match AgentStateDb::open(&agent_db_path) { |
| Ok(db) => { |
| if let Err(e) = db.init_defaults() { |
| log(&format!("Warning: AGENT_STATE init_defaults failed: {}", e)); |
| } |
| log(&format!("AGENT_STATE LMDB initialized at {:?}", agent_db_path)); |
| Some(db) |
| } |
| Err(e) => { |
| log(&format!("Warning: Failed to open AGENT_STATE LMDB at {:?}: {}", agent_db_path, e)); |
| None |
| } |
| }; |
|
|
| |
| let fs_db_storage = live_base.join("SPF_FS"); |
| log(&format!("SPF_FS path: {:?}", fs_db_storage)); |
|
|
| let fs_db = match SpfFs::open(&fs_db_storage) { |
| Ok(db) => { |
| log(&format!("SPF_FS LMDB initialized at {:?}/SPF_FS.DB/", fs_db_storage)); |
| Some(db) |
| } |
| Err(e) => { |
| log(&format!("Warning: Failed to open SPF_FS LMDB: {}", e)); |
| None |
| } |
| }; |
|
|
| |
| |
| |
| let config_dir = crate::paths::spf_root().join("LIVE/CONFIG"); |
| let (signing_key, verifying_key) = crate::identity::ensure_identity(&config_dir); |
| let pub_key_hex = hex::encode(verifying_key.to_bytes()); |
| let trusted_keys = crate::identity::load_trusted_keys(&config_dir.join("groups")); |
| let peers = crate::identity::load_peers(&config_dir.join("groups")); |
| log(&format!("Identity: {}", pub_key_hex)); |
|
|
| |
| crate::identity::boot_integrity_check(); |
|
|
| |
| |
| |
| |
| |
| |
| |
| let mut http_config = http_config; |
| let derived_key = crate::identity::derive_api_key(&signing_key); |
| http_config.api_key = derived_key; |
| log("API key synced from identity"); |
|
|
| |
| |
| |
| let transformer_config = crate::config::TransformerConfig::load( |
| &crate::paths::spf_root().join("LIVE/CONFIG/transformer.json") |
| ).unwrap_or_default(); |
|
|
| let transformer = if transformer_config.enabled { |
| log("Transformer: enabled — initializing runtime state"); |
| let mut state = crate::transformer_tools::TransformerState::from_config( |
| &transformer_config, "writer" |
| ); |
| |
| let ckpt_path = crate::paths::spf_root() |
| .join("LIVE/MODELS") |
| .join(&transformer_config.writer_checkpoint); |
| if ckpt_path.exists() { |
| match std::fs::read(&ckpt_path) { |
| Ok(data) => match crate::checkpoint::deserialize_weights(&data) { |
| Ok((weights, meta)) => { |
| let mut model_weights = state.model.weights_mut(); |
| match crate::checkpoint::apply_weights(&mut model_weights, &weights) { |
| Ok(()) => { |
| log(&format!("Transformer: checkpoint loaded — model={}, step={}", meta.model_id, meta.step)); |
| state.training_step = meta.step; |
| } |
| Err(e) => log(&format!("Transformer: checkpoint shape mismatch — {}, using random weights", e)), |
| } |
| state.last_checkpoint = ckpt_path.to_string_lossy().to_string(); |
| } |
| Err(e) => log(&format!("Transformer: checkpoint corrupt — {}, using random weights", e)), |
| }, |
| Err(e) => log(&format!("Transformer: checkpoint read failed — {}, using random weights", e)), |
| } |
| } |
|
|
| |
| let manifest_path = spf_root().join("LIVE/MODELS/checkpoint_manifest.json"); |
| if manifest_path.exists() { |
| if let Ok(data) = std::fs::read(&manifest_path) { |
| if let Ok(manifest) = serde_json::from_slice::<serde_json::Value>(&data) { |
| if let Some(step) = manifest.get("training_step").and_then(|v| v.as_u64()) { |
| log(&format!("Recovery: manifest loaded — last step={}, checkpoint={}", |
| step, |
| manifest.get("checkpoint").and_then(|v| v.as_str()).unwrap_or("?") |
| )); |
| } |
| } |
| } |
| } |
| Some(std::sync::Arc::new(std::sync::RwLock::new(state))) |
| } else { |
| log("Transformer: disabled (set enabled: true in LIVE/CONFIG/transformer.json)"); |
| None |
| }; |
|
|
| |
| |
| |
| let mesh_config = crate::config::MeshConfig::load( |
| &crate::paths::spf_root().join("LIVE/CONFIG/mesh.json") |
| ).unwrap_or_default(); |
|
|
| let (mesh_tx, mut mesh_rx) = if mesh_config.enabled { |
| let (tx, rx) = crate::mesh::create_mesh_channel(); |
| (Some(tx), Some(rx)) |
| } else { |
| (None, None) |
| }; |
|
|
| |
| |
| |
| let (listeners, collector_arc) = build_listeners(&config, &transformer_config); |
|
|
| |
| |
| |
| |
| if let Some(ref collector) = collector_arc { |
| if let Some(ref t_lock) = transformer { |
| let mut t_state = t_lock.write().unwrap(); |
| t_state.collector = Some(collector.clone()); |
| log("Collector wired to TransformerState (Serve mode)"); |
| } |
| } |
|
|
| |
| |
| |
| let network_config = crate::config::NetworkConfig::load( |
| &crate::paths::spf_root().join("LIVE/CONFIG/network.json") |
| ).unwrap_or_default(); |
|
|
| let pool_state = if network_config.is_netadmin() { |
| let cap = network_config.effective_pool_size(); |
| log(&format!("Network: NetAdmin role — initialising pool ({} workers max)", cap)); |
| Some(crate::network::PoolState::new(&network_config.peers, cap)) |
| } else { |
| log("Network: Worker role — pool management delegated to NetAdmin"); |
| None |
| }; |
| let role_str = if network_config.is_netadmin() { "netadmin" } else { "worker" }; |
| crate::network::log_startup_status(&network_config.peers, role_str); |
|
|
| let state = Arc::new(ServerState { |
| config, |
| config_db, |
| session: Mutex::new(session), |
| storage, |
| tmp_db, |
| agent_db, |
| fs_db, |
| pub_key_hex, |
| trusted_keys, |
| auth_mode: http_config.auth_mode.clone(), |
| nonce_cache: Mutex::new(std::collections::HashMap::new()), |
| listeners, |
| mesh_tx, |
| peers, |
| transformer, |
| transformer_config, |
| pipeline: std::sync::Arc::new(std::sync::Mutex::new(crate::pipeline::PipelineState::new())), |
| signing_key: signing_key.clone(), |
| network_config, |
| pool_state, |
| browser: std::sync::Mutex::new(crate::browser::BrowserSession::new()), |
| ws_browser_channels: std::sync::Mutex::new(None), |
| http_port: http_config.port, |
| tracked_peers: std::sync::Mutex::new(std::collections::HashMap::new()), |
| orchestrator_state: std::sync::Mutex::new(None), |
| endpoint: std::sync::Mutex::new(None), |
| tokio_handle: std::sync::Mutex::new(None), |
| }); |
|
|
| |
| if let Some(ref collector) = collector_arc { |
| crate::flint_memory::start_memory_router( |
| collector.clone(), state.transformer.clone(), state.clone() |
| ); |
| log("FLINT Memory Router started (MB-FR/MB-FT) with auto-train + metrics"); |
| } |
|
|
| |
| if http_config.transport != "stdio" { |
| if http_config.api_key.is_empty() && state.trusted_keys.is_empty() { |
| log("HTTP: No API key and no trusted keys. Falling back to stdio only."); |
| } else { |
| |
| let tls = if http_config.tls_enabled { |
| let config_dir = crate::paths::spf_root().join("LIVE/CONFIG"); |
| let cert_path = config_dir.join(&http_config.tls_cert); |
| let key_path = config_dir.join(&http_config.tls_key); |
| if !cert_path.exists() || !key_path.exists() { |
| |
| |
| let cn = format!("spf-{}", &state.pub_key_hex[..16]); |
| let san_names = vec!["localhost".to_string(), cn.clone()]; |
| let ck = rcgen::generate_simple_self_signed(san_names) |
| .expect("Failed to generate TLS certificate"); |
| if let Some(parent) = cert_path.parent() { |
| std::fs::create_dir_all(parent).ok(); |
| } |
| std::fs::write(&cert_path, ck.cert.pem()).ok(); |
| std::fs::write(&key_path, ck.key_pair.serialize_pem()).ok(); |
| log("Generated self-signed TLS certificate"); |
| } |
| match (std::fs::read(&cert_path), std::fs::read(&key_path)) { |
| (Ok(cert), Ok(key)) => Some((cert, key)), |
| _ => { |
| log("WARNING: Failed to read TLS cert/key files. Starting without TLS."); |
| None |
| } |
| } |
| } else { |
| None |
| }; |
| let scheme = if tls.is_some() { "HTTPS" } else { "HTTP" }; |
| let http_state = Arc::clone(&state); |
| let http_port = http_config.port; |
| let http_bind = http_config.bind.clone(); |
| let http_api_key = http_config.api_key.clone(); |
|
|
| |
| |
| |
| if mesh_config.enabled { |
| let mesh_state = Arc::clone(&state); |
| let mesh_signing_key = signing_key.clone(); |
| let mesh_cfg = mesh_config.clone(); |
| let mesh_receiver = mesh_rx.take().expect("mesh_rx must exist when mesh is enabled"); |
|
|
| std::thread::spawn(move || { |
| let rt = tokio::runtime::Builder::new_multi_thread() |
| .enable_all() |
| .build() |
| .expect("Failed to create shared tokio runtime"); |
|
|
| rt.block_on(async { |
| |
| let http_task = tokio::spawn(crate::http::serve( |
| http_state, http_bind, http_port, http_api_key, tls, |
| )); |
|
|
| |
| let mesh_task = tokio::spawn(crate::mesh::run( |
| mesh_state, mesh_signing_key, mesh_cfg, mesh_receiver, |
| )); |
|
|
| |
| tokio::select! { |
| r = http_task => { |
| if let Err(e) = r { |
| eprintln!("[SPF] HTTP task exited: {}", e); |
| } |
| } |
| r = mesh_task => { |
| if let Err(e) = r { |
| eprintln!("[SPF] Mesh task exited: {}", e); |
| } |
| } |
| } |
| }); |
| }); |
|
|
| log(&format!("{} API started on {}:{}", scheme, http_config.bind, http_port)); |
| log(&format!("Mesh started | Role: {} | Team: {} | Discovery: {}", |
| mesh_config.role, mesh_config.team, mesh_config.discovery)); |
| log("Shared runtime: HTTP + Mesh in 1 thread, 1 tokio runtime"); |
| } else { |
| |
| std::thread::spawn(move || { |
| crate::http::start(http_state, &http_bind, http_port, http_api_key, tls); |
| }); |
| log(&format!("{} API started on {}:{}", scheme, http_config.bind, http_port)); |
| } |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| if mesh_config.enabled && http_config.transport == "stdio" { |
| let mesh_state = Arc::clone(&state); |
| let mesh_signing_key = signing_key.clone(); |
| let mesh_cfg = mesh_config.clone(); |
| let mesh_receiver = mesh_rx.take().expect("mesh_rx must exist when mesh is enabled"); |
| std::thread::spawn(move || { |
| tokio::runtime::Builder::new_multi_thread() |
| .enable_all() |
| .build() |
| .expect("Failed to create mesh tokio runtime") |
| .block_on(crate::mesh::run(mesh_state, mesh_signing_key, mesh_cfg, mesh_receiver)) |
| }); |
| log(&format!("Mesh started (standalone) | Role: {} | Team: {} | Discovery: {}", |
| mesh_config.role, mesh_config.team, mesh_config.discovery)); |
| } else if !mesh_config.enabled { |
| log("Mesh disabled (set enabled: true in LIVE/CONFIG/mesh.json)"); |
| } |
|
|
| |
| crate::voice::init_voice_teams(); |
|
|
| |
| crate::brain_local::init_brain(); |
| crate::brain_local::index_knowledge_docs(); |
| crate::brain_local::index_spf_sources(); |
|
|
| |
| if http_config.transport != "http" { |
| let stdin = io::stdin(); |
| for line in stdin.lock().lines() { |
| let line = match line { |
| Ok(l) => l, |
| Err(e) => { |
| log(&format!("stdin read error: {}", e)); |
| continue; |
| } |
| }; |
|
|
| let line = line.trim().to_string(); |
| if line.is_empty() { |
| continue; |
| } |
|
|
| let msg: Value = match serde_json::from_str(&line) { |
| Ok(v) => v, |
| Err(e) => { |
| log(&format!("JSON parse error: {}", e)); |
| continue; |
| } |
| }; |
|
|
| let method = msg["method"].as_str().unwrap_or(""); |
| let id = &msg["id"]; |
| let params = &msg["params"]; |
|
|
| log(&format!("Received: {}", method)); |
|
|
| match method { |
| "initialize" => { |
| send_response(id, json!({ |
| "protocolVersion": PROTOCOL_VERSION, |
| "capabilities": { "tools": {} }, |
| "serverInfo": { |
| "name": SERVER_NAME, |
| "version": SERVER_VERSION, |
| } |
| })); |
| } |
|
|
| "notifications/initialized" => { |
| |
| } |
|
|
| "tools/list" => { |
| send_response(id, json!({ "tools": tool_definitions() })); |
| } |
|
|
| "tools/call" => { |
| let name = params["name"].as_str().unwrap_or(""); |
| let args = params.get("arguments").cloned().unwrap_or(json!({})); |
|
|
| cmd_log(&format!("CALL {} | {}", name, param_summary(name, &args))); |
|
|
| |
| |
| |
| |
| let response = crate::dispatch::call(&state, crate::dispatch::Source::Stdio, name, &args); |
|
|
| |
| let blocked = response.result.get("_blocked").and_then(|v| v.as_bool()).unwrap_or(false); |
| if blocked { |
| let text = response.result.get("text").and_then(|v| v.as_str()).unwrap_or(""); |
| let snippet: String = text.chars().take(200).collect(); |
| cmd_log(&format!("FAIL {} | {}", name, snippet)); |
| } |
|
|
| send_response(id, json!({ |
| "content": [response.result] |
| })); |
| } |
|
|
| "ping" => { |
| send_response(id, json!({})); |
| } |
|
|
| _ => { |
| if !id.is_null() { |
| send_error(id, -32601, &format!("Unknown method: {}", method)); |
| } |
| } |
| } |
| } |
| |
| if let Some(transformer_arc) = &state.transformer { |
| if let Ok(state_guard) = transformer_arc.try_read() { |
| let mut worker_cfg = crate::worker::WorkerConfig::default(); |
| worker_cfg.role = state_guard.role.clone(); |
| match crate::worker::save_checkpoint(&state_guard, &state.transformer_config, &worker_cfg) { |
| Ok(path) => { |
| log(&format!("Shutdown: checkpoint saved to {}", path)); |
| let manifest = serde_json::json!({ |
| "checkpoint": path, |
| "saved_at": chrono::Utc::now().to_rfc3339(), |
| "training_step": state_guard.training_step, |
| "session_log": "LIVE/SESSION/cmd.log", |
| "agent_state_db": "LIVE/LMDB5/LMDB5.DB", |
| "brain_dir": "LIVE/BRAIN/DOCS/", |
| "status_file": "PROJECTS/PROJECTS/STATUS.txt" |
| }); |
| let manifest_path = spf_root().join("LIVE/MODELS/checkpoint_manifest.json"); |
| if let Ok(json) = serde_json::to_string_pretty(&manifest) { |
| if let Err(e) = std::fs::write(&manifest_path, json) { |
| log(&format!("Shutdown: manifest write failed — {}", e)); |
| } else { |
| log(&format!("Shutdown: manifest written to {:?}", manifest_path)); |
| } |
| } |
| } |
| Err(e) => log(&format!("Shutdown: checkpoint save failed — {}", e)), |
| } |
| } else { |
| log("Shutdown: skipping checkpoint save — transformer busy (training in progress)"); |
| } |
| } |
| } else { |
| |
| log("Transport mode: http-only. Stdio disabled. Main thread parked."); |
| loop { std::thread::park(); } |
| } |
| } |
|
|