from __future__ import annotations

import gzip
import os
import shutil
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Optional, Tuple
import structlog

from agent_society.memory.event_log import parse_blocks, format_block, Event

logger = structlog.get_logger(__name__)

class EventLogRotator:
    """Handles rotation and archival of event logs to prevent unbounded growth."""

    def __init__(
        self,
        base_dir: Path,
        max_size_mb: float = 10.0,
        max_age_days: int = 30,
        archive_dir: Optional[Path] = None,
        compression: bool = True
    ):
        """
        Initialize the event log rotator.

        Args:
            base_dir: Base directory containing event logs
            max_size_mb: Maximum size in MB before rotation
            max_age_days: Maximum age in days before archival
            archive_dir: Directory for archived logs (default: base_dir/archives)
            compression: Whether to compress archived logs
        """
        self.base_dir = Path(base_dir)
        self.max_size_bytes = int(max_size_mb * 1024 * 1024)
        self.max_age = timedelta(days=max_age_days)
        self.archive_dir = archive_dir or self.base_dir / "archives"
        self.compression = compression
        self.archive_dir.mkdir(parents=True, exist_ok=True)

    def _get_file_age(self, file_path: Path) -> timedelta:
        """Get the age of a file based on modification time."""
        mtime = datetime.fromtimestamp(file_path.stat().st_mtime, tz=timezone.utc)
        return datetime.now(timezone.utc) - mtime

    def _get_file_size(self, file_path: Path) -> int:
        """Get file size in bytes."""
        return file_path.stat().st_size if file_path.exists() else 0

    def _should_rotate(self, file_path: Path) -> Tuple[bool, str]:
        """
        Check if a log file should be rotated.

        Returns:
            Tuple of (should_rotate, reason)
        """
        if not file_path.exists():
            return False, ""

        size = self._get_file_size(file_path)
        if size > self.max_size_bytes:
            return True, f"size ({size / 1024 / 1024:.2f}MB) exceeds limit"

        age = self._get_file_age(file_path)
        if age > self.max_age:
            return True, f"age ({age.days} days) exceeds limit"

        return False, ""

    def _archive_log(self, log_path: Path, archive_name: str) -> Path:
        """
        Archive a log file.

        Args:
            log_path: Path to the log file to archive
            archive_name: Name for the archived file

        Returns:
            Path to the archived file
        """
        archive_path = self.archive_dir / archive_name

        if self.compression:
            archive_path = archive_path.with_suffix(".log.gz")
            with open(log_path, 'rb') as f_in:
                with gzip.open(archive_path, 'wb') as f_out:
                    shutil.copyfileobj(f_in, f_out)
            logger.info(
                "log.archived.compressed",
                source=str(log_path),
                destination=str(archive_path),
                size_mb=self._get_file_size(archive_path) / 1024 / 1024
            )
        else:
            shutil.copy2(log_path, archive_path)
            logger.info(
                "log.archived",
                source=str(log_path),
                destination=str(archive_path)
            )

        return archive_path

    def _split_events_by_age(self, events: list[Event], cutoff_date: datetime) -> Tuple[list[Event], list[Event]]:
        """
        Split events into recent and old based on a cutoff date.

        Args:
            events: List of events to split
            cutoff_date: DateTime cutoff for splitting

        Returns:
            Tuple of (recent_events, old_events)
        """
        recent = []
        old = []

        for event in events:
            try:
                # Parse ISO format timestamp
                event_time = datetime.fromisoformat(event.created_at.replace('Z', '+00:00'))
                if event_time >= cutoff_date:
                    recent.append(event)
                else:
                    old.append(event)
            except (ValueError, AttributeError):
                # If we can't parse the timestamp, keep the event
                recent.append(event)

        return recent, old

    def rotate_log(self, owner: str, actor: str) -> Optional[Path]:
        """
        Rotate a specific event log if needed.

        Args:
            owner: Owner of the event log
            actor: Actor of the event log

        Returns:
            Path to archived file if rotation occurred, None otherwise
        """
        log_path = self.base_dir / owner / actor / "activity.log"

        should_rotate, reason = self._should_rotate(log_path)
        if not should_rotate:
            return None

        logger.info(
            "log.rotation.started",
            owner=owner,
            actor=actor,
            reason=reason,
            path=str(log_path)
        )

        # Read current events
        try:
            content = log_path.read_text(encoding="utf-8")
            events = list(parse_blocks(content))
        except Exception as e:
            logger.error("log.rotation.read_error", error=str(e), path=str(log_path))
            return None

        # Generate archive name with timestamp
        timestamp = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S")
        archive_name = f"{owner}_{actor}_{timestamp}.log"

        # Archive the entire log
        archive_path = self._archive_log(log_path, archive_name)

        # Determine what triggered rotation
        size_triggered = "size" in reason

        # Keep only recent events in the active log
        cutoff_date = datetime.now(timezone.utc) - self.max_age
        recent_events, old_events = self._split_events_by_age(events, cutoff_date)

        # If rotation was triggered by size and all events are recent,
        # we need to trim by count to avoid immediate re-rotation
        events_to_keep = recent_events
        if size_triggered and recent_events:
            # Calculate approximate size per event
            current_size = self._get_file_size(log_path)
            avg_event_size = current_size / len(events) if events else 0

            if avg_event_size > 0:
                # Keep only enough events to stay under 80% of max size
                # to provide buffer before next rotation
                target_size = self.max_size_bytes * 0.8
                max_events_to_keep = int(target_size / avg_event_size)

                if max_events_to_keep < len(recent_events):
                    # Sort by timestamp to keep newest events
                    recent_events_sorted = sorted(
                        recent_events,
                        key=lambda e: e.created_at,
                        reverse=True
                    )
                    events_to_keep = recent_events_sorted[:max_events_to_keep]
                    logger.info(
                        "log.rotation.size_trimmed",
                        owner=owner,
                        actor=actor,
                        original_count=len(recent_events),
                        trimmed_to=len(events_to_keep)
                    )

        # Rewrite the log with retained events
        if events_to_keep:
            # Sort by timestamp to maintain chronological order
            events_to_keep = sorted(events_to_keep, key=lambda e: e.created_at)
            new_content = "".join(format_block(event) for event in events_to_keep)
            log_path.write_text(new_content, encoding="utf-8")
            logger.info(
                "log.rotation.retained",
                owner=owner,
                actor=actor,
                retained_events=len(events_to_keep),
                original_events=len(events),
                archived_old=len(old_events) if not size_triggered else 0
            )
        else:
            # If no events to keep, create empty log
            log_path.write_text("", encoding="utf-8")
            logger.info("log.rotation.emptied", owner=owner, actor=actor)

        return archive_path

    def rotate_all_logs(self) -> list[Path]:
        """
        Check and rotate all event logs in the base directory.

        Returns:
            List of paths to archived files
        """
        archived = []

        if not self.base_dir.exists():
            return archived

        # Iterate through owner directories
        for owner_dir in self.base_dir.iterdir():
            if not owner_dir.is_dir() or owner_dir.name == "archives":
                continue

            owner = owner_dir.name

            # Iterate through actor directories
            for actor_dir in owner_dir.iterdir():
                if not actor_dir.is_dir():
                    continue

                actor = actor_dir.name
                archive_path = self.rotate_log(owner, actor)
                if archive_path:
                    archived.append(archive_path)

        logger.info("log.rotation.completed", total_archived=len(archived))
        return archived

    def clean_old_archives(self, archive_age_days: int = 90) -> int:
        """
        Clean up old archived logs.

        Args:
            archive_age_days: Delete archives older than this many days

        Returns:
            Number of archives deleted
        """
        if not self.archive_dir.exists():
            return 0

        cutoff = timedelta(days=archive_age_days)
        deleted = 0

        for archive_file in self.archive_dir.iterdir():
            if archive_file.is_file():
                age = self._get_file_age(archive_file)
                if age > cutoff:
                    try:
                        archive_file.unlink()
                        deleted += 1
                        logger.info(
                            "archive.deleted",
                            file=archive_file.name,
                            age_days=age.days
                        )
                    except Exception as e:
                        logger.error(
                            "archive.delete_error",
                            file=archive_file.name,
                            error=str(e)
                        )

        logger.info("archive.cleanup.completed", deleted=deleted)
        return deleted

    def get_statistics(self) -> dict:
        """
        Get statistics about event logs and archives.

        Returns:
            Dictionary containing statistics
        """
        stats = {
            "active_logs": 0,
            "total_active_size_mb": 0.0,
            "archived_logs": 0,
            "total_archive_size_mb": 0.0,
            "logs_needing_rotation": []
        }

        # Active logs statistics
        if self.base_dir.exists():
            for owner_dir in self.base_dir.iterdir():
                if not owner_dir.is_dir() or owner_dir.name == "archives":
                    continue

                for actor_dir in owner_dir.iterdir():
                    if not actor_dir.is_dir():
                        continue

                    log_path = actor_dir / "activity.log"
                    if log_path.exists():
                        stats["active_logs"] += 1
                        size_mb = self._get_file_size(log_path) / 1024 / 1024
                        stats["total_active_size_mb"] += size_mb

                        should_rotate, reason = self._should_rotate(log_path)
                        if should_rotate:
                            stats["logs_needing_rotation"].append({
                                "path": str(log_path.relative_to(self.base_dir)),
                                "reason": reason,
                                "size_mb": size_mb
                            })

        # Archive statistics
        if self.archive_dir.exists():
            for archive_file in self.archive_dir.iterdir():
                if archive_file.is_file():
                    stats["archived_logs"] += 1
                    stats["total_archive_size_mb"] += (
                        self._get_file_size(archive_file) / 1024 / 1024
                    )

        return stats