"""
SQLAlchemy models for GCR Solver Manager database.
"""

from datetime import datetime, timezone
from typing import Optional, Dict, Any, List
from sqlalchemy import (
    Column, Integer, String, Float, Boolean, DateTime, Text, JSON,
    ForeignKey, Index, UniqueConstraint
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, validates
from sqlalchemy.sql import func

Base = declarative_base()


class TimestampMixin:
    """Mixin for adding created_at and updated_at timestamps."""
    
    created_at = Column(DateTime(timezone=True), default=func.now(), nullable=False)
    updated_at = Column(DateTime(timezone=True), default=func.now(), onupdate=func.now(), nullable=False)


class BuildConfiguration(Base, TimestampMixin):
    """Build configuration model."""
    
    __tablename__ = 'build_configurations'
    
    id = Column(Integer, primary_key=True, autoincrement=True)
    name = Column(String(255), nullable=False, unique=True)
    gpu_type = Column(String(50), nullable=False)  # cuda, hip
    solver_type = Column(String(50), nullable=False)  # gcr, ca-gcr, gmres, bca-gmres
    debug_mode = Column(Boolean, default=False, nullable=False)
    
    # Build parameters
    cmake_args = Column(JSON, default=dict)  # CMake arguments as JSON
    environment_vars = Column(JSON, default=dict)  # Environment variables
    source_hash = Column(String(64), nullable=True)  # Hash of source files
    
    # Configuration metadata
    description = Column(Text, nullable=True)
    is_active = Column(Boolean, default=True, nullable=False)
    
    # Relationships
    test_runs = relationship("TestRun", back_populates="build_config", cascade="all, delete-orphan")
    build_caches = relationship("BuildCache", back_populates="build_config", cascade="all, delete-orphan")
    
    # Indexes
    __table_args__ = (
        Index('idx_build_config_gpu_solver', 'gpu_type', 'solver_type'),
        Index('idx_build_config_active', 'is_active'),
        Index('idx_build_config_created', 'created_at'),
    )
    
    @validates('gpu_type')
    def validate_gpu_type(self, key, gpu_type):
        valid_types = ['cuda', 'hip']
        if gpu_type not in valid_types:
            raise ValueError(f"GPU type must be one of: {valid_types}")
        return gpu_type
    
    @validates('solver_type')
    def validate_solver_type(self, key, solver_type):
        valid_types = ['gcr', 'ca-gcr', 'gmres', 'bca-gmres']
        if solver_type not in valid_types:
            raise ValueError(f"Solver type must be one of: {valid_types}")
        return solver_type
    
    def __repr__(self):
        return f"<BuildConfiguration(id={self.id}, name='{self.name}', gpu='{self.gpu_type}', solver='{self.solver_type}')>"


class TestRun(Base, TimestampMixin):
    """Test run model."""
    
    __tablename__ = 'test_runs'
    
    id = Column(Integer, primary_key=True, autoincrement=True)
    build_config_id = Column(Integer, ForeignKey('build_configurations.id'), nullable=False)
    
    # Test parameters
    nproc = Column(Integer, nullable=False)
    nprocx = Column(Integer, nullable=True)
    resolution = Column(Float, nullable=False)
    maxit = Column(Integer, nullable=True)
    
    # Execution details
    start_time = Column(DateTime(timezone=True), nullable=True)
    end_time = Column(DateTime(timezone=True), nullable=True)
    status = Column(String(50), default='pending', nullable=False)  # pending, running, completed, failed, cancelled
    exit_code = Column(Integer, nullable=True)
    
    # Job information
    slurm_job_id = Column(String(50), nullable=True)
    node_info = Column(JSON, default=dict)  # Node information as JSON
    
    # Environment details
    hostname = Column(String(255), nullable=True)
    user = Column(String(100), nullable=True)
    working_directory = Column(String(500), nullable=True)
    
    # Performance metrics
    duration_seconds = Column(Float, nullable=True)
    memory_usage_mb = Column(Float, nullable=True)
    cpu_usage_percent = Column(Float, nullable=True)
    
    # Additional metadata
    command_line = Column(Text, nullable=True)
    error_message = Column(Text, nullable=True)
    notes = Column(Text, nullable=True)
    
    # Relationships
    build_config = relationship("BuildConfiguration", back_populates="test_runs")
    log_entries = relationship("LogEntry", back_populates="test_run", cascade="all, delete-orphan")
    norm_data = relationship("NormData", back_populates="test_run", cascade="all, delete-orphan")
    performance_metrics = relationship("PerformanceMetric", back_populates="test_run", cascade="all, delete-orphan")
    slurm_jobs = relationship("SlurmJob", back_populates="test_run", cascade="all, delete-orphan")
    
    # Indexes
    __table_args__ = (
        Index('idx_test_run_build_config', 'build_config_id'),
        Index('idx_test_run_status', 'status'),
        Index('idx_test_run_start_time', 'start_time'),
        Index('idx_test_run_slurm_job', 'slurm_job_id'),
    )
    
    @validates('status')
    def validate_status(self, key, status):
        valid_statuses = ['pending', 'running', 'completed', 'failed', 'cancelled']
        if status not in valid_statuses:
            raise ValueError(f"Status must be one of: {valid_statuses}")
        return status
    
    @property
    def duration(self) -> Optional[float]:
        """Calculate test duration in seconds."""
        if self.start_time and self.end_time:
            return (self.end_time - self.start_time).total_seconds()
        return None
    
    def __repr__(self):
        return f"<TestRun(id={self.id}, config_id={self.build_config_id}, status='{self.status}')>"


class LogEntry(Base, TimestampMixin):
    """Log entry model."""
    
    __tablename__ = 'log_entries'
    
    id = Column(Integer, primary_key=True, autoincrement=True)
    test_run_id = Column(Integer, ForeignKey('test_runs.id'), nullable=False)
    
    # Log metadata
    log_type = Column(String(50), nullable=False)  # build, test, debug, slurm, error
    log_level = Column(String(20), nullable=False)  # DEBUG, INFO, WARNING, ERROR, CRITICAL
    source = Column(String(100), nullable=False)  # Source of the log (e.g., 'cuda_solver', 'slurm')
    
    # File information
    file_path = Column(String(1000), nullable=True)
    file_size = Column(Integer, nullable=True)
    content_hash = Column(String(64), nullable=True)
    is_compressed = Column(Boolean, default=False, nullable=False)
    
    # Log content (for small logs or excerpts)
    content = Column(Text, nullable=True)
    
    # Categorization and tagging
    category = Column(String(100), nullable=True)
    tags = Column(JSON, default=list)  # Tags as JSON array
    
    # Processing status
    is_processed = Column(Boolean, default=False, nullable=False)
    processing_error = Column(Text, nullable=True)
    
    # Relationships
    test_run = relationship("TestRun", back_populates="log_entries")
    norm_data = relationship("NormData", back_populates="log_entry", cascade="all, delete-orphan")
    
    # Indexes
    __table_args__ = (
        Index('idx_log_entry_test_run', 'test_run_id'),
        Index('idx_log_entry_type', 'log_type'),
        Index('idx_log_entry_level', 'log_level'),
        Index('idx_log_entry_hash', 'content_hash'),
        Index('idx_log_entry_created', 'created_at'),
        Index('idx_log_entry_processed', 'is_processed'),
    )
    
    @validates('log_type')
    def validate_log_type(self, key, log_type):
        valid_types = ['build', 'test', 'debug', 'slurm', 'error', 'analysis']
        if log_type not in valid_types:
            raise ValueError(f"Log type must be one of: {valid_types}")
        return log_type
    
    @validates('log_level')
    def validate_log_level(self, key, log_level):
        valid_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
        if log_level not in valid_levels:
            raise ValueError(f"Log level must be one of: {valid_levels}")
        return log_level
    
    def __repr__(self):
        return f"<LogEntry(id={self.id}, test_run_id={self.test_run_id}, type='{self.log_type}', level='{self.log_level}')>"


class NormData(Base, TimestampMixin):
    """Norm data model for storing solver norm values."""
    
    __tablename__ = 'norm_data'
    
    id = Column(Integer, primary_key=True, autoincrement=True)
    test_run_id = Column(Integer, ForeignKey('test_runs.id'), nullable=False)
    log_entry_id = Column(Integer, ForeignKey('log_entries.id'), nullable=True)
    
    # Norm identification
    solver_tag = Column(String(100), nullable=False)  # e.g., 'CA_Cdebug', 'CPU-BCA-GMRES-debug'
    iteration = Column(Integer, nullable=True)  # Iteration number (-1 for global values)
    step = Column(Integer, nullable=True)  # Step number within iteration
    
    # Norm information
    norm_type = Column(String(100), nullable=False)  # e.g., 'pi', 'h_r_norm_after_update'
    value = Column(Float, nullable=False)
    
    # Additional context
    timestamp = Column(DateTime(timezone=True), nullable=True)
    extra_data = Column(JSON, default=dict)  # Additional metadata as JSON
    
    # Relationships
    test_run = relationship("TestRun", back_populates="norm_data")
    log_entry = relationship("LogEntry", back_populates="norm_data")
    
    # Indexes
    __table_args__ = (
        Index('idx_norm_data_test_run', 'test_run_id'),
        Index('idx_norm_data_solver_tag', 'solver_tag'),
        Index('idx_norm_data_norm_type', 'norm_type'),
        Index('idx_norm_data_iteration', 'iteration'),
        Index('idx_norm_data_timestamp', 'timestamp'),
        UniqueConstraint('test_run_id', 'solver_tag', 'iteration', 'step', 'norm_type', 
                        name='uq_norm_data_unique'),
    )
    
    def __repr__(self):
        return f"<NormData(id={self.id}, tag='{self.solver_tag}', type='{self.norm_type}', value={self.value})>"


class PerformanceMetric(Base, TimestampMixin):
    """Performance metric model."""
    
    __tablename__ = 'performance_metrics'
    
    id = Column(Integer, primary_key=True, autoincrement=True)
    test_run_id = Column(Integer, ForeignKey('test_runs.id'), nullable=False)
    
    # Metric identification
    metric_name = Column(String(100), nullable=False)
    metric_type = Column(String(50), nullable=False)  # timing, memory, convergence, throughput
    
    # Metric value
    value = Column(Float, nullable=False)
    unit = Column(String(50), nullable=True)  # seconds, MB, iterations, etc.
    
    # Context
    timestamp = Column(DateTime(timezone=True), nullable=True)
    phase = Column(String(50), nullable=True)  # build, initialization, solve, cleanup
    component = Column(String(100), nullable=True)  # matrixpro, halo_exchange, etc.
    
    # Additional metadata
    extra_data = Column(JSON, default=dict)
    
    # Relationships
    test_run = relationship("TestRun", back_populates="performance_metrics")
    
    # Indexes
    __table_args__ = (
        Index('idx_perf_metric_test_run', 'test_run_id'),
        Index('idx_perf_metric_name', 'metric_name'),
        Index('idx_perf_metric_type', 'metric_type'),
        Index('idx_perf_metric_timestamp', 'timestamp'),
        Index('idx_perf_metric_phase', 'phase'),
    )
    
    def __repr__(self):
        return f"<PerformanceMetric(id={self.id}, name='{self.metric_name}', value={self.value} {self.unit})>"


class SlurmJob(Base, TimestampMixin):
    """SLURM job model."""
    
    __tablename__ = 'slurm_jobs'
    
    id = Column(Integer, primary_key=True, autoincrement=True)
    test_run_id = Column(Integer, ForeignKey('test_runs.id'), nullable=False)
    
    # SLURM job information
    job_id = Column(String(50), nullable=False, unique=True)
    job_name = Column(String(255), nullable=True)
    partition = Column(String(100), nullable=True)
    account = Column(String(100), nullable=True)
    
    # Job status and timing
    state = Column(String(50), nullable=True)  # PENDING, RUNNING, COMPLETED, FAILED, etc.
    submit_time = Column(DateTime(timezone=True), nullable=True)
    start_time = Column(DateTime(timezone=True), nullable=True)
    end_time = Column(DateTime(timezone=True), nullable=True)
    
    # Resource allocation
    nodes = Column(String(1000), nullable=True)  # Node list
    cpus = Column(Integer, nullable=True)
    memory_mb = Column(Integer, nullable=True)
    time_limit_minutes = Column(Integer, nullable=True)
    
    # Exit information
    exit_code = Column(Integer, nullable=True)
    exit_signal = Column(Integer, nullable=True)
    
    # File paths
    stdout_path = Column(String(1000), nullable=True)
    stderr_path = Column(String(1000), nullable=True)
    script_path = Column(String(1000), nullable=True)
    
    # Additional job details
    working_directory = Column(String(1000), nullable=True)
    environment = Column(JSON, default=dict)
    
    # Relationships
    test_run = relationship("TestRun", back_populates="slurm_jobs")
    
    # Indexes
    __table_args__ = (
        Index('idx_slurm_job_test_run', 'test_run_id'),
        Index('idx_slurm_job_id', 'job_id'),
        Index('idx_slurm_job_state', 'state'),
        Index('idx_slurm_job_submit_time', 'submit_time'),
    )
    
    def __repr__(self):
        return f"<SlurmJob(id={self.id}, job_id='{self.job_id}', state='{self.state}')>"


class BuildCache(Base, TimestampMixin):
    """Build cache model for tracking build artifacts."""
    
    __tablename__ = 'build_cache'
    
    id = Column(Integer, primary_key=True, autoincrement=True)
    build_config_id = Column(Integer, ForeignKey('build_configurations.id'), nullable=False)
    
    # Cache identification
    cache_key = Column(String(64), nullable=False, unique=True)  # Hash of build inputs
    artifact_path = Column(String(1000), nullable=False)  # Path to cached artifact
    
    # Cache metadata
    build_time_seconds = Column(Float, nullable=True)
    artifact_size_bytes = Column(Integer, nullable=True)
    is_valid = Column(Boolean, default=True, nullable=False)
    
    # Usage tracking
    hit_count = Column(Integer, default=0, nullable=False)
    last_accessed = Column(DateTime(timezone=True), nullable=True)
    
    # Relationships
    build_config = relationship("BuildConfiguration", back_populates="build_caches")
    
    # Indexes
    __table_args__ = (
        Index('idx_build_cache_config', 'build_config_id'),
        Index('idx_build_cache_key', 'cache_key'),
        Index('idx_build_cache_valid', 'is_valid'),
        Index('idx_build_cache_accessed', 'last_accessed'),
    )
    
    def mark_accessed(self):
        """Mark cache entry as accessed."""
        self.hit_count += 1
        self.last_accessed = datetime.now(timezone.utc)
    
    def __repr__(self):
        return f"<BuildCache(id={self.id}, key='{self.cache_key[:8]}...', hits={self.hit_count})>"