Upload 3 files
Browse files- src/bin/jsonl_to_tlog.rs +165 -0
- src/bin/prune_memories.rs +643 -0
src/bin/jsonl_to_tlog.rs
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// jsonl_to_tlog — Convert pruned_memories.jsonl to tlog LMDB format
|
| 2 |
+
// Copyright 2026 Joseph Stone — All Rights Reserved
|
| 3 |
+
//
|
| 4 |
+
// Reads ConsolidatedExample JSONL, converts to TrainingSignal format,
|
| 5 |
+
// and writes to LMDB tlog:* keys for spf_transformer_train().
|
| 6 |
+
//
|
| 7 |
+
// Usage: cargo run --bin jsonl_to_tlog -- <input.jsonl>
|
| 8 |
+
//
|
| 9 |
+
// Output: LIVE/LMDB5/LMDB5.DB state DB (tlog:* keys)
|
| 10 |
+
|
| 11 |
+
use anyhow::Result;
|
| 12 |
+
use heed::types::*;
|
| 13 |
+
use heed::{Database, EnvOpenOptions};
|
| 14 |
+
use serde::{Deserialize, Serialize};
|
| 15 |
+
use std::fs::File;
|
| 16 |
+
use std::io::{BufRead, BufReader};
|
| 17 |
+
use std::path::Path;
|
| 18 |
+
use std::time::{SystemTime, UNIX_EPOCH};
|
| 19 |
+
|
| 20 |
+
// ============================================================================
|
| 21 |
+
// TrainingSignal — matches gate_training.rs EXACTLY
|
| 22 |
+
// ============================================================================
|
| 23 |
+
|
| 24 |
+
#[derive(Debug, Clone, Deserialize, Serialize)]
|
| 25 |
+
pub struct TrainingSignal {
|
| 26 |
+
pub tool: String,
|
| 27 |
+
pub source: String,
|
| 28 |
+
pub allowed: bool,
|
| 29 |
+
pub status: String,
|
| 30 |
+
pub duration_ms: u64,
|
| 31 |
+
pub timestamp: String,
|
| 32 |
+
pub user_override: bool,
|
| 33 |
+
pub false_positive: bool,
|
| 34 |
+
pub recent_call_count: u32,
|
| 35 |
+
pub preceding_tools: Vec<String>,
|
| 36 |
+
#[serde(default)]
|
| 37 |
+
pub evil_score: f32,
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
#[derive(Debug, Clone, Deserialize, Serialize)]
|
| 41 |
+
pub struct ConsolidatedExample {
|
| 42 |
+
pub label: i32,
|
| 43 |
+
pub weight: f32,
|
| 44 |
+
pub tool: String,
|
| 45 |
+
pub context: String,
|
| 46 |
+
pub outcome: String,
|
| 47 |
+
pub source_type: String,
|
| 48 |
+
pub category: String,
|
| 49 |
+
pub occurrence_count: u64,
|
| 50 |
+
pub signal_strength: f64,
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
// ============================================================================
|
| 54 |
+
// Conversion: ConsolidatedExample → TrainingSignal
|
| 55 |
+
// ============================================================================
|
| 56 |
+
|
| 57 |
+
impl ConsolidatedExample {
|
| 58 |
+
/// Convert to TrainingSignal for FLINT training
|
| 59 |
+
fn to_training_signal(&self) -> TrainingSignal {
|
| 60 |
+
// Map 20-level label to TrainingSignal fields
|
| 61 |
+
let (allowed, user_override, false_positive) = match self.label {
|
| 62 |
+
// Negative labels = blocked
|
| 63 |
+
-10..=-1 => (false, false, self.label <= -3),
|
| 64 |
+
// Positive labels = allowed
|
| 65 |
+
1..=10 => (true, self.label >= 9, false),
|
| 66 |
+
// Should never happen (NO ZERO)
|
| 67 |
+
_ => (true, false, false),
|
| 68 |
+
};
|
| 69 |
+
|
| 70 |
+
// Map label to evil_score (higher negative = higher evil)
|
| 71 |
+
let evil_score = if self.label < 0 {
|
| 72 |
+
(-self.label as f32) * 0.07 // -10 → 0.7, -3 → 0.21
|
| 73 |
+
} else {
|
| 74 |
+
0.0
|
| 75 |
+
};
|
| 76 |
+
|
| 77 |
+
TrainingSignal {
|
| 78 |
+
tool: self.tool.clone(),
|
| 79 |
+
source: format!("memory:{}", self.category),
|
| 80 |
+
allowed,
|
| 81 |
+
status: if allowed { "allowed" } else { "blocked" }.to_string(),
|
| 82 |
+
duration_ms: (self.signal_strength * 10.0) as u64, // scale signal to duration
|
| 83 |
+
timestamp: format!("{}", SystemTime::now()
|
| 84 |
+
.duration_since(UNIX_EPOCH)
|
| 85 |
+
.unwrap()
|
| 86 |
+
.as_millis()),
|
| 87 |
+
user_override,
|
| 88 |
+
false_positive,
|
| 89 |
+
recent_call_count: self.occurrence_count.min(u32::MAX as u64) as u32,
|
| 90 |
+
preceding_tools: vec![self.category.clone()],
|
| 91 |
+
evil_score,
|
| 92 |
+
}
|
| 93 |
+
}
|
| 94 |
+
}
|
| 95 |
+
|
| 96 |
+
const MAX_DB_SIZE: usize = 1024 * 1024 * 1024; // 1GB
|
| 97 |
+
|
| 98 |
+
fn main() -> Result<()> {
|
| 99 |
+
// Find input file
|
| 100 |
+
let input_path = std::env::args().nth(1)
|
| 101 |
+
.unwrap_or_else(|| "LIVE/TMP/stoneshell-brain/training_data/raw/pruned_memories.jsonl".to_string());
|
| 102 |
+
|
| 103 |
+
let lmdb_path = "/data/data/com.termux/files/home/SPFsmartGATE/LIVE/LMDB5/LMDB5.DB";
|
| 104 |
+
|
| 105 |
+
println!("[*] jsonl_to_tlog — JSONL to tlog LMDB converter");
|
| 106 |
+
println!("[*] Input: {}", input_path);
|
| 107 |
+
println!("[*] LMDB: {}", lmdb_path);
|
| 108 |
+
|
| 109 |
+
// Open LMDB
|
| 110 |
+
let env = unsafe {
|
| 111 |
+
EnvOpenOptions::new()
|
| 112 |
+
.map_size(MAX_DB_SIZE)
|
| 113 |
+
.max_dbs(8)
|
| 114 |
+
.open(Path::new(lmdb_path))?
|
| 115 |
+
};
|
| 116 |
+
|
| 117 |
+
let state_db: Database<Str, SerdeBincode<String>> = env.open_database(&env.read_txn()?, Some("state"))?
|
| 118 |
+
.ok_or_else(|| anyhow::anyhow!("state sub-DB not found"))?;
|
| 119 |
+
|
| 120 |
+
// Read JSONL
|
| 121 |
+
let file = File::open(&input_path)?;
|
| 122 |
+
let reader = BufReader::new(file);
|
| 123 |
+
|
| 124 |
+
let mut count = 0;
|
| 125 |
+
let mut error_count = 0;
|
| 126 |
+
let now = SystemTime::now().duration_since(UNIX_EPOCH)?.as_millis() as u64;
|
| 127 |
+
|
| 128 |
+
for line in reader.lines() {
|
| 129 |
+
let line = line?;
|
| 130 |
+
if line.trim().is_empty() { continue; }
|
| 131 |
+
|
| 132 |
+
match serde_json::from_str::<ConsolidatedExample>(&line) {
|
| 133 |
+
Ok(example) => {
|
| 134 |
+
let signal = example.to_training_signal();
|
| 135 |
+
let json = serde_json::to_string(&signal)?;
|
| 136 |
+
|
| 137 |
+
// Write to LMDB state DB as tlog: timestamp
|
| 138 |
+
let tlog_key = format!("tlog:{}", now + count);
|
| 139 |
+
|
| 140 |
+
let mut wtxn = env.write_txn()?;
|
| 141 |
+
state_db.put(&mut wtxn, &tlog_key, &json)?;
|
| 142 |
+
wtxn.commit()?;
|
| 143 |
+
|
| 144 |
+
count += 1;
|
| 145 |
+
if count % 500 == 0 {
|
| 146 |
+
println!(" Converted: {} entries", count);
|
| 147 |
+
}
|
| 148 |
+
}
|
| 149 |
+
Err(e) => {
|
| 150 |
+
error_count += 1;
|
| 151 |
+
if error_count <= 5 {
|
| 152 |
+
println!(" Error parsing line {}: {}", count + error_count, e);
|
| 153 |
+
}
|
| 154 |
+
}
|
| 155 |
+
}
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
println!("\n[=] Conversion complete");
|
| 159 |
+
println!(" Entries converted: {}", count);
|
| 160 |
+
println!(" Errors: {}", error_count);
|
| 161 |
+
println!(" tlog keys written: tlog:* in {}", lmdb_path);
|
| 162 |
+
println!("\n Next: Run 'spf_transformer_train()' to train FLINT on these signals");
|
| 163 |
+
|
| 164 |
+
Ok(())
|
| 165 |
+
}
|
src/bin/prune_memories.rs
ADDED
|
@@ -0,0 +1,643 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// BLOCK 0 — Memory Extraction & Pruning Utility (v4)
|
| 2 |
+
// Copyright 2026 Joseph Stone — All Rights Reserved
|
| 3 |
+
//
|
| 4 |
+
// Compiles and runs against LIVE LMDB5 to extract, consolidate, and output
|
| 5 |
+
// training data. Handles bincode-serialized MemoryEntry properly.
|
| 6 |
+
//
|
| 7 |
+
// KEY DESIGN: Duplicates multiply weight, not get discarded.
|
| 8 |
+
// Same action repeated = stronger signal.
|
| 9 |
+
//
|
| 10 |
+
// 20-LEVEL LABEL SCALE: -10 (worst) to +10 (best), NO ZERO
|
| 11 |
+
// Labels map to MSE regression targets for FLINT fine-tuning.
|
| 12 |
+
//
|
| 13 |
+
// WEIGHT RANGE: 1-8x (critical — clamped, never exceeds range)
|
| 14 |
+
//
|
| 15 |
+
// Usage: cargo run --bin prune_memories
|
| 16 |
+
//
|
| 17 |
+
// Outputs:
|
| 18 |
+
// raw/pruned_memories.jsonl — consolidated training survivors
|
| 19 |
+
// raw/archived_memories.jsonl — expired/TTL-cleaned entries (parked)
|
| 20 |
+
// raw/memory_catalog.jsonl — categorized + tagged for brain re-index
|
| 21 |
+
// raw/brain_index_pruned.jsonl — brain-ready format for spf_brain_index
|
| 22 |
+
//
|
| 23 |
+
// Outputs:
|
| 24 |
+
// pruned_memories.jsonl — consolidated training survivors
|
| 25 |
+
// archived_memories.jsonl — expired/TTL-cleaned entries (parked)
|
| 26 |
+
// memory_catalog.jsonl — categorized + tagged for brain re-index
|
| 27 |
+
|
| 28 |
+
use anyhow::Result;
|
| 29 |
+
use heed::types::*;
|
| 30 |
+
use heed::{Database, EnvOpenOptions};
|
| 31 |
+
use serde::{Deserialize, Serialize};
|
| 32 |
+
use std::collections::{BTreeMap, HashMap};
|
| 33 |
+
use std::fs;
|
| 34 |
+
use std::hash::Hasher;
|
| 35 |
+
use std::io::Write;
|
| 36 |
+
use std::path::Path;
|
| 37 |
+
use std::time::{SystemTime, UNIX_EPOCH};
|
| 38 |
+
|
| 39 |
+
// ============================================================================
|
| 40 |
+
// Structs copied EXACTLY from source (agent_state.rs, gate_training.rs)
|
| 41 |
+
// ============================================================================
|
| 42 |
+
|
| 43 |
+
#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, Eq, Hash)]
|
| 44 |
+
pub enum MemoryType {
|
| 45 |
+
Preference,
|
| 46 |
+
Fact,
|
| 47 |
+
Instruction,
|
| 48 |
+
Context,
|
| 49 |
+
Working,
|
| 50 |
+
Pinned,
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
#[derive(Debug, Clone, Deserialize, Serialize)]
|
| 54 |
+
pub struct MemoryEntry {
|
| 55 |
+
pub id: String,
|
| 56 |
+
pub content: String,
|
| 57 |
+
pub memory_type: MemoryType,
|
| 58 |
+
pub tags: Vec<String>,
|
| 59 |
+
pub source: String,
|
| 60 |
+
pub created_at: u64,
|
| 61 |
+
pub last_accessed: u64,
|
| 62 |
+
pub access_count: u64,
|
| 63 |
+
pub relevance: f64,
|
| 64 |
+
pub expires_at: u64,
|
| 65 |
+
}
|
| 66 |
+
|
| 67 |
+
/// TrainingSignal — matches gate_training.rs EXACTLY (no `weight` field)
|
| 68 |
+
#[derive(Debug, Clone, Deserialize, Serialize)]
|
| 69 |
+
pub struct TrainingSignal {
|
| 70 |
+
pub tool: String,
|
| 71 |
+
pub source: String,
|
| 72 |
+
pub allowed: bool,
|
| 73 |
+
pub status: String,
|
| 74 |
+
pub duration_ms: u64,
|
| 75 |
+
pub timestamp: String,
|
| 76 |
+
pub user_override: bool,
|
| 77 |
+
pub false_positive: bool,
|
| 78 |
+
pub recent_call_count: u32,
|
| 79 |
+
pub preceding_tools: Vec<String>,
|
| 80 |
+
#[serde(default)]
|
| 81 |
+
pub evil_score: f32,
|
| 82 |
+
}
|
| 83 |
+
|
| 84 |
+
// ============================================================================
|
| 85 |
+
// Output: consolidated training example — portable format for any model
|
| 86 |
+
// ============================================================================
|
| 87 |
+
|
| 88 |
+
#[derive(Debug, Clone, Serialize)]
|
| 89 |
+
pub struct ConsolidatedExample {
|
| 90 |
+
/// Signed label: -3 (strongest avoid) to +2 (strong use). NO ZERO.
|
| 91 |
+
pub label: i32,
|
| 92 |
+
/// Training weight: 1.0 × occurrence_count. Scales with repetition.
|
| 93 |
+
pub weight: f32,
|
| 94 |
+
/// Tool or action name
|
| 95 |
+
pub tool: String,
|
| 96 |
+
/// Context: what happened, relevant details
|
| 97 |
+
pub context: String,
|
| 98 |
+
/// Outcome: "allowed", "blocked", or free text for memories
|
| 99 |
+
pub outcome: String,
|
| 100 |
+
/// Source: "tlog" (gate decision) or "memory" (agent memory)
|
| 101 |
+
pub source_type: String,
|
| 102 |
+
/// Memory category: edge_case, gate_context, user_intent, code_structure, reference
|
| 103 |
+
pub category: String,
|
| 104 |
+
/// How many occurrences were consolidated into this entry
|
| 105 |
+
pub occurrence_count: u64,
|
| 106 |
+
/// How many times this pattern was observed (for traceability)
|
| 107 |
+
pub signal_strength: f64,
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
// ============================================================================
|
| 111 |
+
// Output: memory catalog for brain re-index
|
| 112 |
+
// ============================================================================
|
| 113 |
+
|
| 114 |
+
#[derive(Debug, Clone, Serialize)]
|
| 115 |
+
pub struct MemoryCatalog {
|
| 116 |
+
pub id: String,
|
| 117 |
+
pub content: String,
|
| 118 |
+
pub category: String,
|
| 119 |
+
pub tags: Vec<String>,
|
| 120 |
+
pub source_type: String,
|
| 121 |
+
pub memory_type: String,
|
| 122 |
+
pub relevance: f64,
|
| 123 |
+
}
|
| 124 |
+
|
| 125 |
+
// ============================================================================
|
| 126 |
+
// Constants
|
| 127 |
+
// ============================================================================
|
| 128 |
+
|
| 129 |
+
const MAX_DB_SIZE: usize = 1024 * 1024 * 1024; // 1GB
|
| 130 |
+
const OUTPUT_DIR: &str = "/data/data/com.termux/files/home/SPFsmartGATE/LIVE/TMP/stoneshell-brain/training_data";
|
| 131 |
+
|
| 132 |
+
// ============================================================================
|
| 133 |
+
// LABEL & WEIGHT FUNCTIONS
|
| 134 |
+
// ============================================================================
|
| 135 |
+
|
| 136 |
+
/// Signed label for training signals. NO ZERO — every signal pushes.
|
| 137 |
+
/// 20 levels: -10 (worst) to +10 (best) with improved granularity.
|
| 138 |
+
fn label_for_training_signal(sig: &TrainingSignal) -> i32 {
|
| 139 |
+
if sig.false_positive && sig.evil_score > 0.95 { return -10; }
|
| 140 |
+
if sig.evil_score > 0.95 { return -10; }
|
| 141 |
+
if sig.evil_score > 0.90 { return -9; }
|
| 142 |
+
if sig.evil_score > 0.85 { return -8; }
|
| 143 |
+
if sig.evil_score > 0.80 { return -7; }
|
| 144 |
+
if sig.evil_score > 0.75 { return -6; }
|
| 145 |
+
if sig.evil_score > 0.70 || (sig.false_positive && sig.evil_score > 0.70) { return -5; }
|
| 146 |
+
if sig.evil_score > 0.65 { return -4; }
|
| 147 |
+
if sig.evil_score > 0.60 || sig.false_positive { return -3; }
|
| 148 |
+
if sig.evil_score > 0.40 || sig.false_positive || (sig.user_override && !sig.allowed) { return -2; }
|
| 149 |
+
if !sig.allowed { return -1; }
|
| 150 |
+
if sig.user_override { return 2; } // bare user override
|
| 151 |
+
1 // regular allow — low positive
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
/// Base training weight for a single signal
|
| 155 |
+
fn weight_for_signal(sig: &TrainingSignal) -> f32 {
|
| 156 |
+
if sig.evil_score > 0.95 { return 8.0; }
|
| 157 |
+
if sig.evil_score > 0.90 { return 7.5; }
|
| 158 |
+
if sig.evil_score > 0.85 { return 7.0; }
|
| 159 |
+
if sig.evil_score > 0.80 { return 6.5; }
|
| 160 |
+
if sig.evil_score > 0.75 { return 6.0; }
|
| 161 |
+
if sig.evil_score > 0.70 { return 5.5; }
|
| 162 |
+
if sig.evil_score > 0.65 { return 5.0; }
|
| 163 |
+
if sig.evil_score > 0.60 { return 4.5; }
|
| 164 |
+
if sig.evil_score > 0.40 { return 4.0; }
|
| 165 |
+
if sig.false_positive { return 4.0; }
|
| 166 |
+
if sig.user_override { return 2.0; }
|
| 167 |
+
1.0
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
/// Inferred label for memory entries (no explicit signal fields)
|
| 171 |
+
/// Maps MemoryType to 20-level scale
|
| 172 |
+
fn label_for_memory(mem: &MemoryEntry) -> i32 {
|
| 173 |
+
match mem.memory_type {
|
| 174 |
+
MemoryType::Pinned => 10, // Highest positive — permanent knowledge
|
| 175 |
+
MemoryType::Instruction => 9, // User directive — high trust
|
| 176 |
+
MemoryType::Fact => 5, // Known fact — moderate positive
|
| 177 |
+
MemoryType::Preference => 4, // User preference — positive
|
| 178 |
+
MemoryType::Context => 3, // Contextual — mild positive
|
| 179 |
+
MemoryType::Working => 0, // expired TTL — goes to archive (NO LABEL)
|
| 180 |
+
}
|
| 181 |
+
}
|
| 182 |
+
|
| 183 |
+
/// BAD DATA LOOP SAFETY: If a +1 signal degrades (denied/fails), flip to -1.
|
| 184 |
+
#[allow(dead_code)]
|
| 185 |
+
fn degrade_positive(old_label: i32, outcome_failed: bool) -> i32 {
|
| 186 |
+
if old_label > 0 && outcome_failed {
|
| 187 |
+
return -1; // +1 or +2 → -1 (block), skip zero
|
| 188 |
+
}
|
| 189 |
+
old_label
|
| 190 |
+
}
|
| 191 |
+
|
| 192 |
+
/// Clamp weight to valid training range [1-8x]. Critical: values outside range
|
| 193 |
+
/// break FLINT training. Max range is -10 to +10 for labels, 1-8x for weights.
|
| 194 |
+
fn clamp_weight(w: f32) -> f32 {
|
| 195 |
+
w.max(1.0).min(8.0)
|
| 196 |
+
}
|
| 197 |
+
|
| 198 |
+
// ================================================================================
|
| 199 |
+
// TODO: degrade_positive() usage — belongs in BLOCK 0 AUTO (live pruning)
|
| 200 |
+
// NOT wired in prune_memories.rs (batch tool) — wire into flint_memory.rs instead
|
| 201 |
+
// Trigger: When recalled memory leads to: gate block, tool failure, or user correction
|
| 202 |
+
// Rule: low-level negatives accumulating = bad data loop. Must flip to -1.
|
| 203 |
+
// ================================================================================
|
| 204 |
+
|
| 205 |
+
/// Value score for ranking memories (not training weight)
|
| 206 |
+
fn score_memory(mem: &MemoryEntry) -> f64 {
|
| 207 |
+
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() as f64 + 1.0;
|
| 208 |
+
let recency = (mem.last_accessed as f64 / now) * 0.3;
|
| 209 |
+
let relevance = mem.relevance * 0.3;
|
| 210 |
+
let access = (mem.access_count as f64).min(10.0) * 0.1;
|
| 211 |
+
let type_bonus = match mem.memory_type {
|
| 212 |
+
MemoryType::Pinned => 3.0,
|
| 213 |
+
MemoryType::Instruction => 2.0,
|
| 214 |
+
MemoryType::Fact | MemoryType::Preference => 1.0,
|
| 215 |
+
_ => 0.5,
|
| 216 |
+
};
|
| 217 |
+
recency + relevance + access + type_bonus
|
| 218 |
+
}
|
| 219 |
+
|
| 220 |
+
/// Content hash for grouping exact duplicates
|
| 221 |
+
fn content_hash(content: &str) -> u64 {
|
| 222 |
+
let mut h = std::collections::hash_map::DefaultHasher::new();
|
| 223 |
+
h.write(content.as_bytes());
|
| 224 |
+
h.finish()
|
| 225 |
+
}
|
| 226 |
+
|
| 227 |
+
/// Categorize a memory entry for the brain catalog
|
| 228 |
+
/// Priority: Pinned (reference) → Instruction/Preference (user_intent) → Fact (reference) → Gate/SPF tags (gate_context) → Code tags (code_structure) → context
|
| 229 |
+
fn categorize_memory(mem: &MemoryEntry) -> &'static str {
|
| 230 |
+
// Pinned facts and references — highest priority (label 10)
|
| 231 |
+
if matches!(mem.memory_type, MemoryType::Pinned) {
|
| 232 |
+
return "reference";
|
| 233 |
+
}
|
| 234 |
+
// User preferences and instructions (label 9, 4)
|
| 235 |
+
if matches!(mem.memory_type, MemoryType::Preference | MemoryType::Instruction) {
|
| 236 |
+
return "user_intent";
|
| 237 |
+
}
|
| 238 |
+
// Fact memories (label 5)
|
| 239 |
+
if matches!(mem.memory_type, MemoryType::Fact) {
|
| 240 |
+
return "reference";
|
| 241 |
+
}
|
| 242 |
+
// Gate/SPF context — check tags AFTER memory_type
|
| 243 |
+
if mem.tags.iter().any(|t| t.contains("gate") || t.contains("gateway") || t.contains("spf")) {
|
| 244 |
+
return "gate_context";
|
| 245 |
+
}
|
| 246 |
+
// Code structure knowledge
|
| 247 |
+
if mem.tags.iter().any(|t| t.starts_with("tool:") || t.contains("code") || t.contains("function")) {
|
| 248 |
+
return "code_structure";
|
| 249 |
+
}
|
| 250 |
+
// Everything else is contextual
|
| 251 |
+
"context"
|
| 252 |
+
}
|
| 253 |
+
|
| 254 |
+
/// Consolidate duplicate memories: count occurrences → scale weight
|
| 255 |
+
/// Returns deduped entries plus consolidated data
|
| 256 |
+
fn consolidate_duplicates(mems: &[MemoryEntry]) -> Vec<(&MemoryEntry, u64, f64)> {
|
| 257 |
+
// Group by content hash
|
| 258 |
+
let mut groups: BTreeMap<u64, Vec<&MemoryEntry>> = BTreeMap::new();
|
| 259 |
+
for mem in mems {
|
| 260 |
+
let h = content_hash(&mem.content);
|
| 261 |
+
groups.entry(h).or_default().push(mem);
|
| 262 |
+
}
|
| 263 |
+
|
| 264 |
+
// For each group: pick the best representative (highest score), count occurrences
|
| 265 |
+
groups.into_iter().map(|(_hash, members)| {
|
| 266 |
+
let count = members.len() as u64;
|
| 267 |
+
// Best representative = highest score
|
| 268 |
+
let best = members.iter().max_by(|a, b| {
|
| 269 |
+
score_memory(a).partial_cmp(&score_memory(b)).unwrap_or(std::cmp::Ordering::Equal)
|
| 270 |
+
}).unwrap();
|
| 271 |
+
|
| 272 |
+
// Consolidated score: base_score × log(occurrences) for diminishing returns
|
| 273 |
+
// but still meaningful: 5 copies = ~1.6x, 50 copies = ~3.9x, 100 copies = ~4.6x
|
| 274 |
+
let base_score = score_memory(best);
|
| 275 |
+
let consolidated_score = base_score * (1.0 + (count as f64).ln());
|
| 276 |
+
|
| 277 |
+
(*best, count, consolidated_score)
|
| 278 |
+
}).collect()
|
| 279 |
+
}
|
| 280 |
+
|
| 281 |
+
fn find_lmdb_path() -> Result<String> {
|
| 282 |
+
let candidates = [
|
| 283 |
+
"LIVE/LMDB5/LMDB5.DB",
|
| 284 |
+
"/data/data/com.termux/files/home/SPFsmartGATE/LIVE/LMDB5/LMDB5.DB",
|
| 285 |
+
];
|
| 286 |
+
for p in candidates {
|
| 287 |
+
if Path::new(p).exists() {
|
| 288 |
+
return Ok(p.to_string());
|
| 289 |
+
}
|
| 290 |
+
}
|
| 291 |
+
Err(anyhow::anyhow!(
|
| 292 |
+
"LMDB5.DB not found. Run from SPFsmartGATE/ directory."
|
| 293 |
+
))
|
| 294 |
+
}
|
| 295 |
+
|
| 296 |
+
// ============================================================================
|
| 297 |
+
// MAIN
|
| 298 |
+
// ============================================================================
|
| 299 |
+
|
| 300 |
+
fn main() -> Result<()> {
|
| 301 |
+
let lmdb_path = find_lmdb_path()?;
|
| 302 |
+
println!("[*] Opening LMDB at {}", lmdb_path);
|
| 303 |
+
|
| 304 |
+
let env = unsafe {
|
| 305 |
+
EnvOpenOptions::new()
|
| 306 |
+
.map_size(MAX_DB_SIZE)
|
| 307 |
+
.max_dbs(8)
|
| 308 |
+
.open(Path::new(&lmdb_path))?
|
| 309 |
+
};
|
| 310 |
+
|
| 311 |
+
// Open sub-DBs
|
| 312 |
+
let rtxn = env.read_txn()?;
|
| 313 |
+
let memory_db: Database<Str, SerdeBincode<MemoryEntry>> =
|
| 314 |
+
env.open_database(&rtxn, Some("memory"))?
|
| 315 |
+
.ok_or_else(|| anyhow::anyhow!("memory sub-DB not found"))?;
|
| 316 |
+
let state_db: Database<Str, Str> =
|
| 317 |
+
env.open_database(&rtxn, Some("state"))?
|
| 318 |
+
.ok_or_else(|| anyhow::anyhow!("state sub-DB not found"))?;
|
| 319 |
+
|
| 320 |
+
// ========================================================================
|
| 321 |
+
// DUMP
|
| 322 |
+
// ========================================================================
|
| 323 |
+
println!("[*] Dumping memories...");
|
| 324 |
+
let mut all_memories: Vec<MemoryEntry> = Vec::new();
|
| 325 |
+
for result in memory_db.iter(&rtxn)? {
|
| 326 |
+
let (_, entry) = result?;
|
| 327 |
+
all_memories.push(entry);
|
| 328 |
+
}
|
| 329 |
+
println!(" Found {} memories", all_memories.len());
|
| 330 |
+
|
| 331 |
+
println!("[*] Dumping tlog:* state keys...");
|
| 332 |
+
let mut all_tlogs: Vec<TrainingSignal> = Vec::new();
|
| 333 |
+
for result in state_db.iter(&rtxn)? {
|
| 334 |
+
let (key, value) = result?;
|
| 335 |
+
if key.starts_with("tlog:") {
|
| 336 |
+
if let Ok(signal) = serde_json::from_str::<TrainingSignal>(value) {
|
| 337 |
+
all_tlogs.push(signal);
|
| 338 |
+
}
|
| 339 |
+
}
|
| 340 |
+
}
|
| 341 |
+
println!(" Found {} tlog entries", all_tlogs.len());
|
| 342 |
+
|
| 343 |
+
// ========================================================================
|
| 344 |
+
// 01: Expire TTL cleanup (separate expired from active)
|
| 345 |
+
// ========================================================================
|
| 346 |
+
println!("\n[01] Expire TTL cleanup...");
|
| 347 |
+
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs();
|
| 348 |
+
let _before = all_memories.len();
|
| 349 |
+
let mut expired_memories: Vec<MemoryEntry> = Vec::new();
|
| 350 |
+
all_memories.retain(|m| {
|
| 351 |
+
if m.expires_at != 0 && m.expires_at < now {
|
| 352 |
+
expired_memories.push(m.clone());
|
| 353 |
+
false
|
| 354 |
+
} else {
|
| 355 |
+
true
|
| 356 |
+
}
|
| 357 |
+
});
|
| 358 |
+
println!(" Expired: {}. Active: {}.", expired_memories.len(), all_memories.len());
|
| 359 |
+
|
| 360 |
+
// ========================================================================
|
| 361 |
+
// 02: Consolidate duplicates (multiples → weight, not discard)
|
| 362 |
+
// ========================================================================
|
| 363 |
+
println!("\n[02] Consolidate duplicates (multiples → signal weight)...");
|
| 364 |
+
let consolidated = consolidate_duplicates(&all_memories);
|
| 365 |
+
|
| 366 |
+
// Separate by category
|
| 367 |
+
let mut edge: Vec<(&MemoryEntry, u64, f64)> = Vec::new();
|
| 368 |
+
let mut gate: Vec<(&MemoryEntry, u64, f64)> = Vec::new();
|
| 369 |
+
let mut regular: Vec<(&MemoryEntry, u64, f64)> = Vec::new();
|
| 370 |
+
|
| 371 |
+
for (mem, count, score) in consolidated {
|
| 372 |
+
let cat = categorize_memory(mem);
|
| 373 |
+
match cat {
|
| 374 |
+
"gate_context" => gate.push((mem, count, score)),
|
| 375 |
+
_ if matches!(mem.memory_type, MemoryType::Pinned | MemoryType::Instruction)
|
| 376 |
+
|| score > 5.0
|
| 377 |
+
|| mem.access_count > 5 =>
|
| 378 |
+
{
|
| 379 |
+
edge.push((mem, count, score));
|
| 380 |
+
}
|
| 381 |
+
_ => regular.push((mem, count, score)),
|
| 382 |
+
}
|
| 383 |
+
}
|
| 384 |
+
|
| 385 |
+
println!(" Before: {} memories", all_memories.len());
|
| 386 |
+
println!(" Unique patterns: {}", edge.len() + gate.len() + regular.len());
|
| 387 |
+
println!(" Duplicates collapsed: {}", all_memories.len() - (edge.len() + gate.len() + regular.len()));
|
| 388 |
+
println!(" Edge (high-value): {}", edge.len());
|
| 389 |
+
println!(" Gate context: {}", gate.len());
|
| 390 |
+
println!(" Regular: {}", regular.len());
|
| 391 |
+
|
| 392 |
+
// ========================================================================
|
| 393 |
+
// 03: Build consolidated training examples
|
| 394 |
+
// ========================================================================
|
| 395 |
+
println!("\n[03] Building consolidated examples...");
|
| 396 |
+
let mut pruned: Vec<ConsolidatedExample> = Vec::new();
|
| 397 |
+
|
| 398 |
+
// Tlogs: explicit labels from TrainingSignal
|
| 399 |
+
for sig in &all_tlogs {
|
| 400 |
+
let label = label_for_training_signal(sig);
|
| 401 |
+
let weight = weight_for_signal(sig);
|
| 402 |
+
let outcome = if sig.allowed { "allowed" } else { "blocked" }.to_string();
|
| 403 |
+
let context = format!(
|
| 404 |
+
"Tool: {}. Source: {}. Duration: {}ms. Overrides: {}. Preceding: {}.",
|
| 405 |
+
sig.tool, sig.source, sig.duration_ms,
|
| 406 |
+
if sig.user_override { "yes" } else { "no" },
|
| 407 |
+
sig.preceding_tools.join(", ")
|
| 408 |
+
);
|
| 409 |
+
pruned.push(ConsolidatedExample {
|
| 410 |
+
label, weight,
|
| 411 |
+
tool: sig.tool.clone(),
|
| 412 |
+
context, outcome,
|
| 413 |
+
source_type: "tlog".to_string(),
|
| 414 |
+
category: "gate_decision".to_string(),
|
| 415 |
+
occurrence_count: 1,
|
| 416 |
+
signal_strength: weight as f64 * (1.0 + sig.evil_score as f64),
|
| 417 |
+
});
|
| 418 |
+
}
|
| 419 |
+
|
| 420 |
+
// Edge memories: highest priority, full inclusion
|
| 421 |
+
for (mem, count, consolidated_score) in &edge {
|
| 422 |
+
let label = label_for_memory(mem);
|
| 423 |
+
if label == 0 { continue; }
|
| 424 |
+
let cat = categorize_memory(mem);
|
| 425 |
+
// Weight: occurrence count scaled, clamped to 1-8x range
|
| 426 |
+
let raw_weight = (*count as f32).log2().min(3.0); // log2(8)=3, cap at 8x
|
| 427 |
+
pruned.push(ConsolidatedExample {
|
| 428 |
+
label,
|
| 429 |
+
weight: clamp_weight(raw_weight + 1.0), // +1 base, up to 8x
|
| 430 |
+
tool: match mem.memory_type {
|
| 431 |
+
MemoryType::Instruction => "instruction".to_string(),
|
| 432 |
+
MemoryType::Pinned => "pinned_fact".to_string(),
|
| 433 |
+
_ => "memory".to_string(),
|
| 434 |
+
},
|
| 435 |
+
context: mem.tags.join(", "),
|
| 436 |
+
outcome: mem.content.clone(),
|
| 437 |
+
source_type: "memory".to_string(),
|
| 438 |
+
category: cat.to_string(),
|
| 439 |
+
occurrence_count: *count,
|
| 440 |
+
signal_strength: *consolidated_score,
|
| 441 |
+
});
|
| 442 |
+
}
|
| 443 |
+
|
| 444 |
+
// Gate memories: full inclusion (first baseline needs all data)
|
| 445 |
+
for (mem, count, consolidated_score) in &gate {
|
| 446 |
+
let label = label_for_memory(mem);
|
| 447 |
+
if label == 0 { continue; }
|
| 448 |
+
let cat = categorize_memory(mem);
|
| 449 |
+
let raw_weight = (*count as f32).log2().min(3.0);
|
| 450 |
+
pruned.push(ConsolidatedExample {
|
| 451 |
+
label,
|
| 452 |
+
weight: clamp_weight(raw_weight + 1.0),
|
| 453 |
+
tool: "gate_context".to_string(),
|
| 454 |
+
context: mem.tags.join(", "),
|
| 455 |
+
outcome: mem.content.clone(),
|
| 456 |
+
source_type: "memory".to_string(),
|
| 457 |
+
category: cat.to_string(),
|
| 458 |
+
occurrence_count: *count,
|
| 459 |
+
signal_strength: *consolidated_score,
|
| 460 |
+
});
|
| 461 |
+
}
|
| 462 |
+
|
| 463 |
+
// Regular memories: full inclusion
|
| 464 |
+
for (mem, count, consolidated_score) in ®ular {
|
| 465 |
+
let label = label_for_memory(mem);
|
| 466 |
+
if label == 0 { continue; }
|
| 467 |
+
let cat = categorize_memory(mem);
|
| 468 |
+
let raw_weight = (*count as f32).log2().min(3.0);
|
| 469 |
+
pruned.push(ConsolidatedExample {
|
| 470 |
+
label,
|
| 471 |
+
weight: clamp_weight(raw_weight + 1.0),
|
| 472 |
+
tool: "memory".to_string(),
|
| 473 |
+
context: mem.tags.join(", "),
|
| 474 |
+
outcome: mem.content.clone(),
|
| 475 |
+
source_type: "memory".to_string(),
|
| 476 |
+
category: cat.to_string(),
|
| 477 |
+
occurrence_count: *count,
|
| 478 |
+
signal_strength: *consolidated_score,
|
| 479 |
+
});
|
| 480 |
+
}
|
| 481 |
+
|
| 482 |
+
// Sort by signal_strength (highest = most important first)
|
| 483 |
+
pruned.sort_by(|a, b| b.signal_strength.partial_cmp(&a.signal_strength)
|
| 484 |
+
.unwrap_or(std::cmp::Ordering::Equal));
|
| 485 |
+
|
| 486 |
+
// ========================================================================
|
| 487 |
+
// 04: Build memory catalog for brain re-index
|
| 488 |
+
// ========================================================================
|
| 489 |
+
println!("\n[04] Building memory catalog for brain re-index...");
|
| 490 |
+
let catalog: Vec<MemoryCatalog> = all_memories.iter()
|
| 491 |
+
.map(|m| MemoryCatalog {
|
| 492 |
+
id: m.id.clone(),
|
| 493 |
+
content: m.content.clone(),
|
| 494 |
+
category: categorize_memory(m).to_string(),
|
| 495 |
+
tags: m.tags.clone(),
|
| 496 |
+
source_type: m.source.clone(),
|
| 497 |
+
memory_type: format!("{:?}", m.memory_type),
|
| 498 |
+
relevance: m.relevance,
|
| 499 |
+
})
|
| 500 |
+
.collect();
|
| 501 |
+
|
| 502 |
+
// ========================================================================
|
| 503 |
+
// 05: Write output files
|
| 504 |
+
// ========================================================================
|
| 505 |
+
println!("\n[05] Writing output files...");
|
| 506 |
+
let raw_dir = format!("{}/raw", OUTPUT_DIR);
|
| 507 |
+
fs::create_dir_all(&raw_dir)?;
|
| 508 |
+
|
| 509 |
+
// Consolidated training survivors
|
| 510 |
+
let out_path = format!("{}/raw/pruned_memories.jsonl", OUTPUT_DIR);
|
| 511 |
+
let mut w = std::io::BufWriter::new(fs::File::create(&out_path)?);
|
| 512 |
+
for entry in &pruned {
|
| 513 |
+
serde_json::to_writer(&mut w, entry)?;
|
| 514 |
+
w.write_all(b"\n")?;
|
| 515 |
+
}
|
| 516 |
+
drop(w);
|
| 517 |
+
println!(" Training data: {} entries → {}", pruned.len(), out_path);
|
| 518 |
+
|
| 519 |
+
// Archived (expired/TTL-cleaned, parked not deleted)
|
| 520 |
+
let archive_path = format!("{}/raw/archived_memories.jsonl", OUTPUT_DIR);
|
| 521 |
+
let mut aw = std::io::BufWriter::new(fs::File::create(&archive_path)?);
|
| 522 |
+
for mem in &expired_memories {
|
| 523 |
+
serde_json::to_writer(&mut aw, &serde_json::json!({
|
| 524 |
+
"id": mem.id,
|
| 525 |
+
"type": format!("{:?}", mem.memory_type),
|
| 526 |
+
"content": mem.content,
|
| 527 |
+
"tags": mem.tags,
|
| 528 |
+
"relevance": mem.relevance,
|
| 529 |
+
"archived": "ttl_expired"
|
| 530 |
+
}))?;
|
| 531 |
+
aw.write_all(b"\n")?;
|
| 532 |
+
}
|
| 533 |
+
drop(aw);
|
| 534 |
+
println!(" Archive: {} entries → {}", expired_memories.len(), archive_path);
|
| 535 |
+
|
| 536 |
+
// Memory catalog for brain re-index
|
| 537 |
+
let catalog_path = format!("{}/raw/memory_catalog.jsonl", OUTPUT_DIR);
|
| 538 |
+
let mut cw = std::io::BufWriter::new(fs::File::create(&catalog_path)?);
|
| 539 |
+
for entry in &catalog {
|
| 540 |
+
serde_json::to_writer(&mut cw, entry)?;
|
| 541 |
+
cw.write_all(b"\n")?;
|
| 542 |
+
}
|
| 543 |
+
drop(cw);
|
| 544 |
+
println!(" Catalog: {} entries → {}", catalog.len(), catalog_path);
|
| 545 |
+
|
| 546 |
+
// ========================================================================
|
| 547 |
+
// 06: Index pruned memories to brain
|
| 548 |
+
// ========================================================================
|
| 549 |
+
println!("\n[06] Indexing training data to brain...");
|
| 550 |
+
let brain_index_path = format!("{}/raw/brain_index_pruned.jsonl", OUTPUT_DIR);
|
| 551 |
+
let mut bw = std::io::BufWriter::new(fs::File::create(&brain_index_path)?);
|
| 552 |
+
for entry in &pruned {
|
| 553 |
+
// Format for brain: text = context + outcome + tool + category
|
| 554 |
+
let text = format!(
|
| 555 |
+
"[{}] {} | {} | {} | {}",
|
| 556 |
+
entry.source_type,
|
| 557 |
+
entry.category,
|
| 558 |
+
entry.tool,
|
| 559 |
+
entry.context,
|
| 560 |
+
entry.outcome
|
| 561 |
+
);
|
| 562 |
+
serde_json::to_writer(&mut bw, &serde_json::json!({
|
| 563 |
+
"text": text,
|
| 564 |
+
"label": entry.label,
|
| 565 |
+
"weight": entry.weight,
|
| 566 |
+
"tool": entry.tool,
|
| 567 |
+
"source": entry.source_type
|
| 568 |
+
}))?;
|
| 569 |
+
bw.write_all(b"\n")?;
|
| 570 |
+
}
|
| 571 |
+
drop(bw);
|
| 572 |
+
println!(" Brain index: {} entries → {}", pruned.len(), brain_index_path);
|
| 573 |
+
println!(" NOTE: Run 'spf_brain_index' tool on this file to add to brain collection");
|
| 574 |
+
|
| 575 |
+
// ========================================================================
|
| 576 |
+
// 07: Archive raw memories for future improved pruning
|
| 577 |
+
// ========================================================================
|
| 578 |
+
println!("\n[07] Archiving raw memories for future pruning...");
|
| 579 |
+
let catalog_path = format!("{}/raw/memory_catalog.jsonl", OUTPUT_DIR);
|
| 580 |
+
println!(" Raw archive: {} entries at {}", catalog.len(), catalog_path);
|
| 581 |
+
println!(" Safe for future improved pruning methods");
|
| 582 |
+
|
| 583 |
+
// ========================================================================
|
| 584 |
+
// 08: Prepare for FLINT training (tlog conversion)
|
| 585 |
+
// ========================================================================
|
| 586 |
+
println!("\n[08] FLINT training preparation...");
|
| 587 |
+
println!(" Training entries: {}", pruned.len());
|
| 588 |
+
println!(" Format: ConsolidatedExample (JSONL) → tlog:* LMDB keys");
|
| 589 |
+
println!(" Next step: Run jsonl_to_tlog tool to convert + inject into LMDB");
|
| 590 |
+
println!(" Then: spf_transformer_train() will read native tlog entries");
|
| 591 |
+
|
| 592 |
+
// ========================================================================
|
| 593 |
+
// Summary
|
| 594 |
+
// ========================================================================
|
| 595 |
+
let mut by_label: HashMap<i32, usize> = HashMap::new();
|
| 596 |
+
for e in &pruned {
|
| 597 |
+
*by_label.entry(e.label).or_default() += 1;
|
| 598 |
+
}
|
| 599 |
+
let by_source: HashMap<&str, usize> = pruned.iter()
|
| 600 |
+
.fold(HashMap::new(), |mut m, e| {
|
| 601 |
+
*m.entry(e.source_type.as_str()).or_default() += 1;
|
| 602 |
+
m
|
| 603 |
+
});
|
| 604 |
+
let by_category: HashMap<&str, usize> = pruned.iter()
|
| 605 |
+
.fold(HashMap::new(), |mut m, e| {
|
| 606 |
+
*m.entry(e.category.as_str()).or_default() += 1;
|
| 607 |
+
m
|
| 608 |
+
});
|
| 609 |
+
|
| 610 |
+
println!("\n[=] BLOCK 0 — MEMORY CONSOLIDATION COMPLETE");
|
| 611 |
+
println!(" Consolidated examples: {}", pruned.len());
|
| 612 |
+
println!(" Archived (TTL expired): {}", expired_memories.len());
|
| 613 |
+
println!(" Memory catalog entries: {}", catalog.len());
|
| 614 |
+
println!("\n By source:");
|
| 615 |
+
for (k, v) in &by_source {
|
| 616 |
+
println!(" {}: {}", k, v);
|
| 617 |
+
}
|
| 618 |
+
println!("\n By category:");
|
| 619 |
+
for (k, v) in &by_category {
|
| 620 |
+
println!(" {}: {}", k, v);
|
| 621 |
+
}
|
| 622 |
+
println!("\n By label (20-level scale):");
|
| 623 |
+
for &lbl in &[-10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] {
|
| 624 |
+
if let Some(c) = by_label.get(&lbl) {
|
| 625 |
+
println!(" {:>3}: {}", lbl, c);
|
| 626 |
+
}
|
| 627 |
+
}
|
| 628 |
+
|
| 629 |
+
// Weight amplification summary
|
| 630 |
+
let total_occurrences: u64 = pruned.iter().map(|e| e.occurrence_count).sum();
|
| 631 |
+
let avg_weight: f64 = pruned.iter().map(|e| e.weight as f64).sum::<f64>() / pruned.len() as f64;
|
| 632 |
+
let max_weight = pruned.iter().max_by(|a, b| a.weight.partial_cmp(&b.weight).unwrap()).map(|e| e.weight).unwrap();
|
| 633 |
+
let max_dups = pruned.iter().max_by(|a, b| a.occurrence_count.cmp(&b.occurrence_count)).map(|e| e.occurrence_count).unwrap();
|
| 634 |
+
|
| 635 |
+
println!("\n Weight amplification from duplicates:");
|
| 636 |
+
println!(" Total occurrences consolidated: {}", total_occurrences);
|
| 637 |
+
println!(" Avg weight per example: {:.2}", avg_weight);
|
| 638 |
+
println!(" Max weight (single entry): {:.2}", max_weight);
|
| 639 |
+
println!(" Max occurrence count: {}", max_dups);
|
| 640 |
+
println!(" RULE: NO ZERO. Weight zero floors to +1. If +1 degrades, flips to -1 (skip zero).");
|
| 641 |
+
|
| 642 |
+
Ok(())
|
| 643 |
+
}
|