"""
AI Historical Simulation Platform - Database Manager

This module provides comprehensive database management including vector database
integration, historical data management, user interaction logging, backup and
recovery systems.
"""

import asyncio
import json
import logging
import sqlite3
import time
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, List, Optional, Any, Tuple, Union
from dataclasses import dataclass, asdict
import numpy as np
import pickle
import hashlib
from contextlib import asynccontextmanager, contextmanager

try:
    import faiss
    FAISS_AVAILABLE = True
except ImportError:
    FAISS_AVAILABLE = False

try:
    import chromadb
    CHROMA_AVAILABLE = True
except ImportError:
    CHROMA_AVAILABLE = False

logger = logging.getLogger(__name__)


@dataclass
class DatabaseConfig:
    """Database configuration settings."""
    db_path: str = "platform.db"
    vector_db_type: str = "faiss"  # faiss, chroma, or sqlite
    vector_dimension: int = 10000
    backup_interval_hours: int = 24
    max_backup_files: int = 7
    enable_compression: bool = True
    enable_encryption: bool = False
    connection_pool_size: int = 10


@dataclass
class VectorMetadata:
    """Metadata for vector storage."""
    id: str
    personality_id: str
    vector_type: str  # personality, memory, etc.
    timestamp: datetime
    metadata: Dict[str, Any]
    version: int = 1


class VectorDatabase:
    """Abstract base class for vector database implementations."""
    
    def __init__(self, dimension: int, db_path: str):
        self.dimension = dimension
        self.db_path = db_path
        self.is_initialized = False
    
    async def initialize(self) -> None:
        """Initialize the vector database."""
        raise NotImplementedError
    
    async def store_vector(self, vector_id: str, vector: np.ndarray, 
                          metadata: VectorMetadata) -> None:
        """Store a vector with metadata."""
        raise NotImplementedError
    
    async def retrieve_vector(self, vector_id: str) -> Optional[Tuple[np.ndarray, VectorMetadata]]:
        """Retrieve a vector by ID."""
        raise NotImplementedError
    
    async def search_similar(self, query_vector: np.ndarray, k: int = 10,
                           filter_metadata: Optional[Dict[str, Any]] = None) -> List[Tuple[str, np.ndarray, float, VectorMetadata]]:
        """Search for similar vectors."""
        raise NotImplementedError
    
    async def delete_vector(self, vector_id: str) -> bool:
        """Delete a vector by ID."""
        raise NotImplementedError
    
    async def cleanup(self) -> None:
        """Clean up resources."""
        pass


class FAISSVectorDatabase(VectorDatabase):
    """FAISS-based vector database implementation."""
    
    def __init__(self, dimension: int, db_path: str):
        if not FAISS_AVAILABLE:
            raise ImportError("FAISS not available. Install with: pip install faiss-cpu")
        
        super().__init__(dimension, db_path)
        self.index = None
        self.metadata_store: Dict[int, VectorMetadata] = {}
        self.id_to_index: Dict[str, int] = {}
        self.index_to_id: Dict[int, str] = {}
        self.next_index = 0
    
    async def initialize(self) -> None:
        """Initialize FAISS index."""
        # Create FAISS index (using IndexFlatL2 for exact search)
        self.index = faiss.IndexFlatL2(self.dimension)
        
        # Try to load existing index
        index_path = Path(self.db_path) / "faiss_index"
        metadata_path = Path(self.db_path) / "faiss_metadata.json"
        
        index_path.parent.mkdir(exist_ok=True)
        
        if index_path.exists() and metadata_path.exists():
            try:
                # Load FAISS index
                self.index = faiss.read_index(str(index_path))
                
                # Load metadata
                with open(metadata_path, 'r') as f:
                    metadata_data = json.load(f)
                
                # Restore metadata structures
                self.next_index = metadata_data.get('next_index', 0)
                self.id_to_index = metadata_data.get('id_to_index', {})
                self.index_to_id = {int(k): v for k, v in metadata_data.get('index_to_id', {}).items()}
                
                # Restore metadata objects
                for index_str, meta_dict in metadata_data.get('metadata_store', {}).items():
                    index = int(index_str)
                    meta_dict['timestamp'] = datetime.fromisoformat(meta_dict['timestamp'])
                    self.metadata_store[index] = VectorMetadata(**meta_dict)
                
                logger.info(f"Loaded FAISS index with {self.index.ntotal} vectors")
                
            except Exception as e:
                logger.error(f"Failed to load existing FAISS index: {e}")
                # Create new index
                self.index = faiss.IndexFlatL2(self.dimension)
        
        self.is_initialized = True
    
    async def store_vector(self, vector_id: str, vector: np.ndarray, 
                          metadata: VectorMetadata) -> None:
        """Store vector in FAISS index."""
        if not self.is_initialized:
            await self.initialize()
        
        # Check if vector already exists
        if vector_id in self.id_to_index:
            # Update existing vector
            index = self.id_to_index[vector_id]
            # FAISS doesn't support in-place updates, so we'd need to rebuild
            # For now, we'll treat this as a new vector
            pass
        
        # Add vector to index
        vector_normalized = vector.reshape(1, -1).astype(np.float32)
        self.index.add(vector_normalized)
        
        # Store metadata
        index = self.next_index
        self.id_to_index[vector_id] = index
        self.index_to_id[index] = vector_id
        self.metadata_store[index] = metadata
        self.next_index += 1
        
        # Periodically save to disk
        if self.next_index % 100 == 0:
            await self._save_index()
    
    async def retrieve_vector(self, vector_id: str) -> Optional[Tuple[np.ndarray, VectorMetadata]]:
        """Retrieve vector by ID."""
        if vector_id not in self.id_to_index:
            return None
        
        index = self.id_to_index[vector_id]
        
        # Get vector from FAISS index
        vector = self.index.reconstruct(index)
        metadata = self.metadata_store[index]
        
        return vector, metadata
    
    async def search_similar(self, query_vector: np.ndarray, k: int = 10,
                           filter_metadata: Optional[Dict[str, Any]] = None) -> List[Tuple[str, np.ndarray, float, VectorMetadata]]:
        """Search for similar vectors."""
        if not self.is_initialized or self.index.ntotal == 0:
            return []
        
        query_normalized = query_vector.reshape(1, -1).astype(np.float32)
        
        # Search in FAISS
        distances, indices = self.index.search(query_normalized, min(k, self.index.ntotal))
        
        results = []
        for i, (distance, index) in enumerate(zip(distances[0], indices[0])):
            if index < 0:  # Invalid index
                continue
            
            vector_id = self.index_to_id[index]
            vector = self.index.reconstruct(index)
            metadata = self.metadata_store[index]
            
            # Apply metadata filtering
            if filter_metadata:
                if not self._matches_filter(metadata, filter_metadata):
                    continue
            
            similarity = 1.0 / (1.0 + distance)  # Convert distance to similarity
            results.append((vector_id, vector, similarity, metadata))
        
        return results
    
    async def delete_vector(self, vector_id: str) -> bool:
        """Delete vector (FAISS doesn't support deletion, so we mark as deleted)."""
        if vector_id not in self.id_to_index:
            return False
        
        index = self.id_to_index[vector_id]
        
        # Mark as deleted in metadata
        if index in self.metadata_store:
            self.metadata_store[index].metadata['deleted'] = True
        
        return True
    
    async def cleanup(self) -> None:
        """Clean up and save index."""
        if self.is_initialized:
            await self._save_index()
    
    def _matches_filter(self, metadata: VectorMetadata, filter_dict: Dict[str, Any]) -> bool:
        """Check if metadata matches filter criteria."""
        for key, value in filter_dict.items():
            if hasattr(metadata, key):
                if getattr(metadata, key) != value:
                    return False
            elif key in metadata.metadata:
                if metadata.metadata[key] != value:
                    return False
            else:
                return False
        return True
    
    async def _save_index(self) -> None:
        """Save FAISS index and metadata to disk."""
        try:
            index_path = Path(self.db_path) / "faiss_index"
            metadata_path = Path(self.db_path) / "faiss_metadata.json"
            
            # Save FAISS index
            faiss.write_index(self.index, str(index_path))
            
            # Prepare metadata for serialization
            metadata_data = {
                'next_index': self.next_index,
                'id_to_index': self.id_to_index,
                'index_to_id': {str(k): v for k, v in self.index_to_id.items()},
                'metadata_store': {}
            }
            
            for index, metadata in self.metadata_store.items():
                metadata_dict = asdict(metadata)
                metadata_dict['timestamp'] = metadata.timestamp.isoformat()
                metadata_data['metadata_store'][str(index)] = metadata_dict
            
            # Save metadata
            with open(metadata_path, 'w') as f:
                json.dump(metadata_data, f, indent=2)
            
        except Exception as e:
            logger.error(f"Failed to save FAISS index: {e}")


class SQLiteVectorDatabase(VectorDatabase):
    """SQLite-based vector database with approximate similarity search."""
    
    def __init__(self, dimension: int, db_path: str):
        super().__init__(dimension, db_path)
        self.db_file = Path(db_path) / "vectors.db"
    
    async def initialize(self) -> None:
        """Initialize SQLite database."""
        self.db_file.parent.mkdir(exist_ok=True)
        
        with self._get_connection() as conn:
            conn.execute("""
                CREATE TABLE IF NOT EXISTS vectors (
                    id TEXT PRIMARY KEY,
                    personality_id TEXT,
                    vector_type TEXT,
                    vector_data BLOB,
                    metadata TEXT,
                    timestamp TEXT,
                    version INTEGER DEFAULT 1
                )
            """)
            
            conn.execute("""
                CREATE INDEX IF NOT EXISTS idx_personality_type 
                ON vectors(personality_id, vector_type)
            """)
            
            conn.execute("""
                CREATE INDEX IF NOT EXISTS idx_timestamp 
                ON vectors(timestamp)
            """)
        
        self.is_initialized = True
    
    @contextmanager
    def _get_connection(self):
        """Get database connection with proper handling."""
        conn = sqlite3.connect(str(self.db_file))
        try:
            yield conn
            conn.commit()
        except Exception:
            conn.rollback()
            raise
        finally:
            conn.close()
    
    async def store_vector(self, vector_id: str, vector: np.ndarray, 
                          metadata: VectorMetadata) -> None:
        """Store vector in SQLite."""
        if not self.is_initialized:
            await self.initialize()
        
        vector_blob = pickle.dumps(vector)
        metadata_json = json.dumps(asdict(metadata), default=str)
        
        with self._get_connection() as conn:
            conn.execute("""
                INSERT OR REPLACE INTO vectors 
                (id, personality_id, vector_type, vector_data, metadata, timestamp, version)
                VALUES (?, ?, ?, ?, ?, ?, ?)
            """, (
                vector_id,
                metadata.personality_id,
                metadata.vector_type,
                vector_blob,
                metadata_json,
                metadata.timestamp.isoformat(),
                metadata.version
            ))
    
    async def retrieve_vector(self, vector_id: str) -> Optional[Tuple[np.ndarray, VectorMetadata]]:
        """Retrieve vector by ID."""
        with self._get_connection() as conn:
            cursor = conn.execute(
                "SELECT vector_data, metadata FROM vectors WHERE id = ?",
                (vector_id,)
            )
            row = cursor.fetchone()
        
        if not row:
            return None
        
        vector = pickle.loads(row[0])
        metadata_dict = json.loads(row[1])
        metadata_dict['timestamp'] = datetime.fromisoformat(metadata_dict['timestamp'])
        metadata = VectorMetadata(**metadata_dict)
        
        return vector, metadata
    
    async def search_similar(self, query_vector: np.ndarray, k: int = 10,
                           filter_metadata: Optional[Dict[str, Any]] = None) -> List[Tuple[str, np.ndarray, float, VectorMetadata]]:
        """Search for similar vectors (brute force for SQLite)."""
        # Build query with filters
        query = "SELECT id, vector_data, metadata FROM vectors"
        params = []
        
        if filter_metadata:
            conditions = []
            if 'personality_id' in filter_metadata:
                conditions.append("personality_id = ?")
                params.append(filter_metadata['personality_id'])
            if 'vector_type' in filter_metadata:
                conditions.append("vector_type = ?")
                params.append(filter_metadata['vector_type'])
            
            if conditions:
                query += " WHERE " + " AND ".join(conditions)
        
        # Get all relevant vectors
        with self._get_connection() as conn:
            cursor = conn.execute(query, params)
            rows = cursor.fetchall()
        
        # Calculate similarities
        similarities = []
        for row in rows:
            vector_id, vector_blob, metadata_json = row
            vector = pickle.loads(vector_blob)
            
            # Calculate cosine similarity
            similarity = self._cosine_similarity(query_vector, vector)
            
            metadata_dict = json.loads(metadata_json)
            metadata_dict['timestamp'] = datetime.fromisoformat(metadata_dict['timestamp'])
            metadata = VectorMetadata(**metadata_dict)
            
            similarities.append((vector_id, vector, similarity, metadata))
        
        # Sort by similarity and return top k
        similarities.sort(key=lambda x: x[2], reverse=True)
        return similarities[:k]
    
    async def delete_vector(self, vector_id: str) -> bool:
        """Delete vector by ID."""
        with self._get_connection() as conn:
            cursor = conn.execute("DELETE FROM vectors WHERE id = ?", (vector_id,))
            return cursor.rowcount > 0
    
    def _cosine_similarity(self, a: np.ndarray, b: np.ndarray) -> float:
        """Calculate cosine similarity between two vectors."""
        dot_product = np.dot(a, b)
        norm_a = np.linalg.norm(a)
        norm_b = np.linalg.norm(b)
        
        if norm_a == 0 or norm_b == 0:
            return 0.0
        
        return dot_product / (norm_a * norm_b)


class DatabaseManager:
    """
    Comprehensive database manager for the AI Historical Simulation Platform.
    
    Features:
    - Vector database integration (FAISS, Chroma, SQLite)
    - Historical data management and versioning
    - User interaction logging and analytics
    - Backup and recovery systems
    - Performance monitoring and optimization
    """
    
    def __init__(self, config: Optional[DatabaseConfig] = None):
        """
        Initialize database manager.
        
        Args:
            config: Database configuration
        """
        self.config = config or DatabaseConfig()
        
        # Database paths
        self.db_dir = Path(self.config.db_path).parent
        self.db_dir.mkdir(exist_ok=True)
        
        # Vector database
        self.vector_db = self._create_vector_database()
        
        # SQLite for structured data
        self.main_db = self.db_dir / "platform.db"
        
        # Backup system
        self.backup_dir = self.db_dir / "backups"
        self.backup_dir.mkdir(exist_ok=True)
        
        # Performance metrics
        self.metrics = {
            'queries_executed': 0,
            'vectors_stored': 0,
            'vectors_retrieved': 0,
            'avg_query_time': 0.0,
            'last_backup': None
        }
        
        # Background tasks
        self.backup_task: Optional[asyncio.Task] = None
        self.cleanup_task: Optional[asyncio.Task] = None
        self.shutdown_event = asyncio.Event()
        
        logger.info(f"DatabaseManager initialized with {self.config.vector_db_type} vector DB")
    
    def _create_vector_database(self) -> VectorDatabase:
        """Create vector database based on configuration."""
        vector_db_path = self.db_dir / "vectors"
        vector_db_path.mkdir(exist_ok=True)
        
        if self.config.vector_db_type == "faiss" and FAISS_AVAILABLE:
            return FAISSVectorDatabase(self.config.vector_dimension, str(vector_db_path))
        elif self.config.vector_db_type == "chroma" and CHROMA_AVAILABLE:
            # Would implement ChromaDB here
            logger.warning("ChromaDB not implemented, falling back to SQLite")
            return SQLiteVectorDatabase(self.config.vector_dimension, str(vector_db_path))
        else:
            return SQLiteVectorDatabase(self.config.vector_dimension, str(vector_db_path))
    
    async def initialize(self) -> None:
        """Initialize all database systems."""
        logger.info("Initializing DatabaseManager")
        
        # Initialize vector database
        await self.vector_db.initialize()
        
        # Initialize main SQLite database
        await self._initialize_main_database()
        
        # Start background tasks
        await self._start_background_tasks()
        
        logger.info("DatabaseManager initialized successfully")
    
    async def shutdown(self) -> None:
        """Shutdown database manager."""
        logger.info("Shutting down DatabaseManager")
        
        # Signal shutdown
        self.shutdown_event.set()
        
        # Cancel background tasks
        if self.backup_task:
            self.backup_task.cancel()
            try:
                await self.backup_task
            except asyncio.CancelledError:
                pass
        
        if self.cleanup_task:
            self.cleanup_task.cancel()
            try:
                await self.cleanup_task
            except asyncio.CancelledError:
                pass
        
        # Cleanup vector database
        await self.vector_db.cleanup()
        
        # Final backup
        await self.create_backup()
        
        logger.info("DatabaseManager shutdown completed")
    
    # Historical Figure Management
    
    async def store_figure(self, figure) -> None:
        """Store historical figure data."""
        try:
            # Store personality vector
            metadata = VectorMetadata(
                id=f"personality_{figure.personality_id}",
                personality_id=figure.personality_id,
                vector_type="personality",
                timestamp=datetime.now(),
                metadata={
                    'figure_name': figure.name,
                    'historical_period': figure.historical_period,
                    'created_at': figure.created_at.isoformat()
                }
            )
            
            await self.vector_db.store_vector(
                f"personality_{figure.personality_id}",
                figure.personality_vector,
                metadata
            )
            
            # Store structured data
            with self._get_main_db_connection() as conn:
                conn.execute("""
                    INSERT OR REPLACE INTO historical_figures
                    (personality_id, name, historical_period, biography, cultural_context,
                     traits, created_at, last_active, total_conversations)
                    VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
                """, (
                    figure.personality_id,
                    figure.name,
                    figure.historical_period,
                    json.dumps(figure.biography),
                    json.dumps(figure.cultural_context),
                    json.dumps(asdict(figure.traits)),
                    figure.created_at.isoformat(),
                    figure.last_active.isoformat() if figure.last_active else None,
                    figure.total_conversations
                ))
            
            self.metrics['vectors_stored'] += 1
            
        except Exception as e:
            logger.error(f"Failed to store figure {figure.name}: {e}")
            raise
    
    async def load_figure_data(self, figure_name: str) -> Optional[Dict[str, Any]]:
        """Load historical figure data."""
        try:
            # First check if we have default data for this figure
            default_data = await self._get_default_figure_data(figure_name)
            if default_data:
                return default_data
            
            # Try to load from database
            with self._get_main_db_connection() as conn:
                cursor = conn.execute(
                    "SELECT * FROM historical_figures WHERE name = ?",
                    (figure_name,)
                )
                row = cursor.fetchone()
            
            if not row:
                return None
            
            # Convert row to dict (assuming we know the column order)
            columns = ['personality_id', 'name', 'historical_period', 'biography', 
                      'cultural_context', 'traits', 'created_at', 'last_active', 
                      'total_conversations']
            
            data = dict(zip(columns, row))
            
            # Parse JSON fields
            data['biography'] = json.loads(data['biography'])
            data['cultural_context'] = json.loads(data['cultural_context'])
            data['traits'] = json.loads(data['traits'])
            
            return data
            
        except Exception as e:
            logger.error(f"Failed to load figure data for {figure_name}: {e}")
            return None
    
    async def _get_default_figure_data(self, figure_name: str) -> Optional[Dict[str, Any]]:
        """Get default figure data for common historical figures."""
        default_figures = {
            "Napoleon Bonaparte": {
                "traits": {
                    "extraversion": 0.8,
                    "agreeableness": 0.3,
                    "conscientiousness": 0.9,
                    "neuroticism": 0.4,
                    "openness": 0.7
                },
                "historical_period": "Early 19th century",
                "cultural_context": {
                    "language_style": "formal",
                    "conceptual_framework": {"military": 0.9, "politics": 0.8, "leadership": 0.95},
                    "cultural_values": ["honor", "glory", "empire", "meritocracy"]
                },
                "biography": {
                    "birth_year": 1769,
                    "death_year": 1821,
                    "nationality": "French",
                    "occupation": "Military leader, Emperor"
                },
                "key_events": [
                    "Rose to power during French Revolution",
                    "Crowned Emperor of France in 1804",
                    "Conducted numerous military campaigns across Europe",
                    "Exiled to Elba and later Saint Helena"
                ]
            },
            "William Shakespeare": {
                "traits": {
                    "extraversion": 0.6,
                    "agreeableness": 0.7,
                    "conscientiousness": 0.8,
                    "neuroticism": 0.5,
                    "openness": 0.95
                },
                "historical_period": "Late 16th to early 17th century",
                "cultural_context": {
                    "language_style": "elizabethan",
                    "conceptual_framework": {"literature": 0.95, "drama": 0.9, "poetry": 0.9},
                    "cultural_values": ["art", "human nature", "love", "tragedy"]
                },
                "biography": {
                    "birth_year": 1564,
                    "death_year": 1616,
                    "nationality": "English",
                    "occupation": "Playwright, Poet, Actor"
                },
                "key_events": [
                    "Born in Stratford-upon-Avon",
                    "Wrote 37 plays and 154 sonnets",
                    "Member of the Lord Chamberlain's Men theater company",
                    "Created timeless characters like Hamlet, Romeo, and Juliet"
                ]
            },
            "Albert Einstein": {
                "traits": {
                    "extraversion": 0.4,
                    "agreeableness": 0.6,
                    "conscientiousness": 0.9,
                    "neuroticism": 0.3,
                    "openness": 0.95
                },
                "historical_period": "Early to mid-20th century",
                "cultural_context": {
                    "language_style": "scientific",
                    "conceptual_framework": {"physics": 0.95, "mathematics": 0.9, "philosophy": 0.8},
                    "cultural_values": ["knowledge", "peace", "curiosity", "relativity"]
                },
                "biography": {
                    "birth_year": 1879,
                    "death_year": 1955,
                    "nationality": "German-born, American citizen",
                    "occupation": "Theoretical Physicist"
                },
                "key_events": [
                    "Developed the theory of relativity",
                    "Won Nobel Prize in Physics in 1921",
                    "Fled Nazi Germany and moved to Princeton",
                    "Advocated for civil rights and nuclear disarmament"
                ]
            }
        }
        
        return default_figures.get(figure_name)
    
    # Memory and Interaction Management
    
    async def store_memory_vector(self, personality_id: str, memory_id: str, 
                                memory_vector: np.ndarray, memory_data: Dict[str, Any]) -> None:
        """Store memory vector."""
        metadata = VectorMetadata(
            id=memory_id,
            personality_id=personality_id,
            vector_type="memory",
            timestamp=datetime.now(),
            metadata=memory_data
        )
        
        await self.vector_db.store_vector(memory_id, memory_vector, metadata)
        self.metrics['vectors_stored'] += 1
    
    async def retrieve_similar_memories(self, personality_id: str, query_vector: np.ndarray,
                                      k: int = 10) -> List[Tuple[str, np.ndarray, float, Dict[str, Any]]]:
        """Retrieve similar memories for a personality."""
        results = await self.vector_db.search_similar(
            query_vector, 
            k=k,
            filter_metadata={'personality_id': personality_id, 'vector_type': 'memory'}
        )
        
        self.metrics['vectors_retrieved'] += len(results)
        return [(r[0], r[1], r[2], r[3].metadata) for r in results]
    
    # Analytics and Logging
    
    async def log_interaction(self, session_id: str, user_id: Optional[str],
                            figure_name: str, user_input: str, figure_response: str,
                            response_time: float, metadata: Dict[str, Any]) -> None:
        """Log user interaction."""
        try:
            with self._get_main_db_connection() as conn:
                conn.execute("""
                    INSERT INTO interactions
                    (session_id, user_id, figure_name, user_input, figure_response,
                     response_time, metadata, timestamp)
                    VALUES (?, ?, ?, ?, ?, ?, ?, ?)
                """, (
                    session_id,
                    user_id,
                    figure_name,
                    user_input,
                    figure_response,
                    response_time,
                    json.dumps(metadata),
                    datetime.now().isoformat()
                ))
            
            self.metrics['queries_executed'] += 1
            
        except Exception as e:
            logger.error(f"Failed to log interaction: {e}")
    
    async def get_interaction_analytics(self, days: int = 7) -> Dict[str, Any]:
        """Get interaction analytics."""
        start_date = (datetime.now() - timedelta(days=days)).isoformat()
        
        with self._get_main_db_connection() as conn:
            # Total interactions
            cursor = conn.execute(
                "SELECT COUNT(*) FROM interactions WHERE timestamp >= ?",
                (start_date,)
            )
            total_interactions = cursor.fetchone()[0]
            
            # Interactions by figure
            cursor = conn.execute("""
                SELECT figure_name, COUNT(*) 
                FROM interactions 
                WHERE timestamp >= ? 
                GROUP BY figure_name
                ORDER BY COUNT(*) DESC
            """, (start_date,))
            by_figure = dict(cursor.fetchall())
            
            # Average response time
            cursor = conn.execute(
                "SELECT AVG(response_time) FROM interactions WHERE timestamp >= ?",
                (start_date,)
            )
            avg_response_time = cursor.fetchone()[0] or 0.0
        
        return {
            'total_interactions': total_interactions,
            'interactions_by_figure': by_figure,
            'avg_response_time': avg_response_time,
            'period_days': days
        }
    
    # System Metrics
    
    async def store_metrics(self, metrics: Dict[str, Any]) -> None:
        """Store system metrics."""
        try:
            with self._get_main_db_connection() as conn:
                conn.execute("""
                    INSERT INTO system_metrics (timestamp, metrics_data)
                    VALUES (?, ?)
                """, (datetime.now().isoformat(), json.dumps(metrics)))
                
        except Exception as e:
            logger.error(f"Failed to store metrics: {e}")
    
    async def get_metrics_history(self, hours: int = 24) -> List[Dict[str, Any]]:
        """Get metrics history."""
        start_time = (datetime.now() - timedelta(hours=hours)).isoformat()
        
        with self._get_main_db_connection() as conn:
            cursor = conn.execute(
                "SELECT timestamp, metrics_data FROM system_metrics WHERE timestamp >= ? ORDER BY timestamp",
                (start_time,)
            )
            
            return [
                {
                    'timestamp': row[0],
                    'metrics': json.loads(row[1])
                }
                for row in cursor.fetchall()
            ]
    
    # Backup and Recovery
    
    async def create_backup(self) -> str:
        """Create system backup."""
        try:
            backup_timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
            backup_filename = f"platform_backup_{backup_timestamp}.zip"
            backup_path = self.backup_dir / backup_filename
            
            # Create backup (simplified - would use proper backup tools in production)
            import zipfile
            
            with zipfile.ZipFile(backup_path, 'w', zipfile.ZIP_DEFLATED) as backup_zip:
                # Backup main database
                if self.main_db.exists():
                    backup_zip.write(self.main_db, "platform.db")
                
                # Backup vector database files
                vector_dir = self.db_dir / "vectors"
                if vector_dir.exists():
                    for file_path in vector_dir.rglob("*"):
                        if file_path.is_file():
                            arc_path = "vectors" / file_path.relative_to(vector_dir)
                            backup_zip.write(file_path, str(arc_path))
            
            # Clean up old backups
            await self._cleanup_old_backups()
            
            self.metrics['last_backup'] = datetime.now().isoformat()
            logger.info(f"Created backup: {backup_filename}")
            
            return str(backup_path)
            
        except Exception as e:
            logger.error(f"Backup creation failed: {e}")
            raise
    
    async def restore_backup(self, backup_path: str) -> None:
        """Restore from backup."""
        try:
            import zipfile
            
            with zipfile.ZipFile(backup_path, 'r') as backup_zip:
                backup_zip.extractall(self.db_dir)
            
            # Reinitialize after restore
            await self.vector_db.initialize()
            
            logger.info(f"Restored from backup: {backup_path}")
            
        except Exception as e:
            logger.error(f"Backup restore failed: {e}")
            raise
    
    # Private methods
    
    @contextmanager
    def _get_main_db_connection(self):
        """Get main database connection."""
        conn = sqlite3.connect(str(self.main_db))
        try:
            yield conn
            conn.commit()
        except Exception:
            conn.rollback()
            raise
        finally:
            conn.close()
    
    async def _initialize_main_database(self) -> None:
        """Initialize main SQLite database schema."""
        with self._get_main_db_connection() as conn:
            # Historical figures table
            conn.execute("""
                CREATE TABLE IF NOT EXISTS historical_figures (
                    personality_id TEXT PRIMARY KEY,
                    name TEXT UNIQUE,
                    historical_period TEXT,
                    biography TEXT,
                    cultural_context TEXT,
                    traits TEXT,
                    created_at TEXT,
                    last_active TEXT,
                    total_conversations INTEGER DEFAULT 0
                )
            """)
            
            # Interactions table
            conn.execute("""
                CREATE TABLE IF NOT EXISTS interactions (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    session_id TEXT,
                    user_id TEXT,
                    figure_name TEXT,
                    user_input TEXT,
                    figure_response TEXT,
                    response_time REAL,
                    metadata TEXT,
                    timestamp TEXT
                )
            """)
            
            # System metrics table
            conn.execute("""
                CREATE TABLE IF NOT EXISTS system_metrics (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    timestamp TEXT,
                    metrics_data TEXT
                )
            """)
            
            # Create indexes
            conn.execute("CREATE INDEX IF NOT EXISTS idx_interactions_timestamp ON interactions(timestamp)")
            conn.execute("CREATE INDEX IF NOT EXISTS idx_interactions_figure ON interactions(figure_name)")
            conn.execute("CREATE INDEX IF NOT EXISTS idx_metrics_timestamp ON system_metrics(timestamp)")
    
    async def _start_background_tasks(self) -> None:
        """Start background tasks."""
        self.backup_task = asyncio.create_task(self._background_backup())
        self.cleanup_task = asyncio.create_task(self._background_cleanup())
        logger.info("Started database background tasks")
    
    async def _background_backup(self) -> None:
        """Background backup task."""
        while not self.shutdown_event.is_set():
            try:
                await asyncio.sleep(self.config.backup_interval_hours * 3600)
                if not self.shutdown_event.is_set():
                    await self.create_backup()
            except Exception as e:
                logger.error(f"Background backup error: {e}")
                await asyncio.sleep(3600)  # Wait 1 hour on error
    
    async def _background_cleanup(self) -> None:
        """Background cleanup task."""
        while not self.shutdown_event.is_set():
            try:
                await asyncio.sleep(24 * 3600)  # Daily cleanup
                if not self.shutdown_event.is_set():
                    await self._cleanup_old_data()
            except Exception as e:
                logger.error(f"Background cleanup error: {e}")
                await asyncio.sleep(12 * 3600)  # Wait 12 hours on error
    
    async def _cleanup_old_backups(self) -> None:
        """Clean up old backup files."""
        try:
            backup_files = sorted(self.backup_dir.glob("platform_backup_*.zip"))
            
            if len(backup_files) > self.config.max_backup_files:
                old_backups = backup_files[:-self.config.max_backup_files]
                for backup_file in old_backups:
                    backup_file.unlink()
                    logger.info(f"Deleted old backup: {backup_file.name}")
                    
        except Exception as e:
            logger.error(f"Backup cleanup error: {e}")
    
    async def _cleanup_old_data(self) -> None:
        """Clean up old data to prevent database bloat."""
        try:
            # Clean up old interactions (keep last 90 days)
            cutoff_date = (datetime.now() - timedelta(days=90)).isoformat()
            
            with self._get_main_db_connection() as conn:
                cursor = conn.execute(
                    "DELETE FROM interactions WHERE timestamp < ?",
                    (cutoff_date,)
                )
                deleted_interactions = cursor.rowcount
                
                # Clean up old metrics (keep last 30 days)
                metrics_cutoff = (datetime.now() - timedelta(days=30)).isoformat()
                cursor = conn.execute(
                    "DELETE FROM system_metrics WHERE timestamp < ?",
                    (metrics_cutoff,)
                )
                deleted_metrics = cursor.rowcount
            
            if deleted_interactions > 0 or deleted_metrics > 0:
                logger.info(f"Cleaned up {deleted_interactions} old interactions and {deleted_metrics} old metrics")
                
        except Exception as e:
            logger.error(f"Data cleanup error: {e}")