node_search / memory_logic.py
broadfield-dev's picture
Create memory_logic.py
f3b6eeb verified
raw
history blame
6.88 kB
import os
import json
import logging
from datetime import datetime
import re # For insight format validation
logger = logging.getLogger(__name__)
DATA_DIR = "app_data"
MEMORIES_FILE = os.path.join(DATA_DIR, "conversation_memories.jsonl") # JSON Lines format
RULES_FILE = os.path.join(DATA_DIR, "learned_rules.jsonl") # Rules/Insights, also JSON Lines
# Ensure data directory exists
os.makedirs(DATA_DIR, exist_ok=True)
# --- Rules/Insights Management ---
def load_rules_from_file() -> list[str]:
"""Loads rules (insights) from the JSON Lines file."""
rules = []
if not os.path.exists(RULES_FILE):
return rules
try:
with open(RULES_FILE, 'r', encoding='utf-8') as f:
for line in f:
if line.strip():
try:
# Assuming each line is a JSON object like {"rule_text": "...", "timestamp": "..."}
# For simplicity, if we only stored the text previously, adapt here.
# Let's assume we store {"text": "rule_text_content"}
data = json.loads(line)
if "text" in data and isinstance(data["text"], str) and data["text"].strip():
rules.append(data["text"].strip())
elif isinstance(data, str): # If old format was just text per line
rules.append(data.strip())
except json.JSONDecodeError:
logger.warning(f"Skipping malformed JSON line in rules file: {line.strip()}")
logger.info(f"Loaded {len(rules)} rules from {RULES_FILE}")
except Exception as e:
logger.error(f"Error loading rules from {RULES_FILE}: {e}", exc_info=True)
return sorted(list(set(rules))) # Ensure unique and sorted
def save_rule_to_file(rule_text: str) -> bool:
"""Saves a single rule (insight) to the JSON Lines file if it's new and valid."""
rule_text = rule_text.strip()
if not rule_text:
logger.warning("Attempted to save an empty rule.")
return False
# Validate format: [TYPE|SCORE] Text
if not re.match(r"\[(CORE_RULE|RESPONSE_PRINCIPLE|BEHAVIORAL_ADJUSTMENT|GENERAL_LEARNING)\|([\d\.]+?)\](.*)", rule_text, re.I|re.DOTALL):
logger.warning(f"Rule '{rule_text[:50]}...' has invalid format. Not saving.")
return False
current_rules = load_rules_from_file()
if rule_text in current_rules:
logger.info(f"Rule '{rule_text[:50]}...' already exists. Not saving duplicate.")
return False # Or True if "already exists" is considered success
try:
with open(RULES_FILE, 'a', encoding='utf-8') as f:
# Store as JSON object for potential future metadata
json.dump({"text": rule_text, "added_at": datetime.utcnow().isoformat()}, f)
f.write('\n')
logger.info(f"Saved new rule: {rule_text[:70]}...")
return True
except Exception as e:
logger.error(f"Error saving rule '{rule_text[:50]}...' to {RULES_FILE}: {e}", exc_info=True)
return False
def delete_rule_from_file(rule_text_to_delete: str) -> bool:
"""Deletes a rule from the file."""
rule_text_to_delete = rule_text_to_delete.strip()
if not rule_text_to_delete: return False
current_rules = load_rules_from_file()
if rule_text_to_delete not in current_rules:
logger.info(f"Rule '{rule_text_to_delete[:50]}...' not found for deletion.")
return False
updated_rules = [rule for rule in current_rules if rule != rule_text_to_delete]
try:
with open(RULES_FILE, 'w', encoding='utf-8') as f: # Overwrite with updated list
for rule_text in updated_rules:
json.dump({"text": rule_text, "added_at": "unknown"}, f) # timestamp lost on rewrite this way
f.write('\n')
logger.info(f"Deleted rule: {rule_text_to_delete[:70]}...")
return True
except Exception as e:
logger.error(f"Error deleting rule '{rule_text_to_delete[:50]}...' from {RULES_FILE}: {e}", exc_info=True)
return False
# --- Conversation Memories Management ---
def load_memories_from_file() -> list[dict]:
"""Loads conversation memories from the JSON Lines file."""
memories = []
if not os.path.exists(MEMORIES_FILE):
return memories
try:
with open(MEMORIES_FILE, 'r', encoding='utf-8') as f:
for line in f:
if line.strip():
try:
mem_obj = json.loads(line)
# Basic validation for expected keys
if all(k in mem_obj for k in ["user_input", "bot_response", "metrics", "timestamp"]):
memories.append(mem_obj)
else:
logger.warning(f"Skipping memory object with missing keys: {line.strip()}")
except json.JSONDecodeError:
logger.warning(f"Skipping malformed JSON line in memories file: {line.strip()}")
logger.info(f"Loaded {len(memories)} memories from {MEMORIES_FILE}")
except Exception as e:
logger.error(f"Error loading memories from {MEMORIES_FILE}: {e}", exc_info=True)
# Sort by timestamp if needed, though append-only usually keeps order
return sorted(memories, key=lambda x: x.get("timestamp", ""))
def save_memory_to_file(user_input: str, bot_response: str, metrics: dict) -> bool:
"""Saves a conversation memory to the JSON Lines file."""
if not user_input or not bot_response: # Metrics can be empty
logger.warning("Attempted to save memory with empty user input or bot response.")
return False
memory_entry = {
"user_input": user_input,
"bot_response": bot_response,
"metrics": metrics,
"timestamp": datetime.utcnow().isoformat()
}
try:
with open(MEMORIES_FILE, 'a', encoding='utf-8') as f:
json.dump(memory_entry, f)
f.write('\n')
logger.info(f"Saved new memory. User: {user_input[:50]}...")
return True
except Exception as e:
logger.error(f"Error saving memory to {MEMORIES_FILE}: {e}", exc_info=True)
return False
def clear_all_rules() -> bool:
try:
if os.path.exists(RULES_FILE):
os.remove(RULES_FILE)
logger.info("All rules cleared.")
return True
except Exception as e:
logger.error(f"Error clearing rules file {RULES_FILE}: {e}")
return False
def clear_all_memories() -> bool:
try:
if os.path.exists(MEMORIES_FILE):
os.remove(MEMORIES_FILE)
logger.info("All memories cleared.")
return True
except Exception as e:
logger.error(f"Error clearing memories file {MEMORIES_FILE}: {e}")
return False