import datetime
from sqlalchemy.orm import Session
from . import models, schemas
import uuid
from typing import Dict, Any, List

# --- Server DAO ---
def get_server(db: Session, server_id: str):
    return db.query(models.Server).filter(models.Server.id == server_id).first()

def get_app_by_type(db: Session, server_id: str, app_type: str):
    return db.query(models.App).filter(models.App.server_id == server_id, models.App.type == app_type).first()

def get_server_by_name(db: Session, servername: str):
    return db.query(models.Server).filter(models.Server.servername == servername).first()

def get_servers(db: Session, skip: int = 0, limit: int = 100):
    return db.query(models.Server).offset(skip).limit(limit).all()

def create_server(db: Session, server: schemas.ServerCreate, info: Dict[str, str]):
    server_id = f"server_{uuid.uuid4().hex[:12]}"
    # TODO: Encrypt password before storing
    db_server = models.Server(
        id=server_id,
        **server.dict(),
        os=info.get("os", "unknown"),
        hostname=info.get("hostname", "unknown"),
        uptime=info.get("uptime", "unknown"),
        architecture=info.get("architecture", "unknown")
    )
    db.add(db_server)
    db.commit()
    db.refresh(db_server)
    return db_server

def delete_server(db: Session, server_id: str):
    db_server = db.query(models.Server).filter(models.Server.id == server_id).first()
    if db_server:
        db.delete(db_server)
        db.commit()
        return True
    return False

def set_active_server(db: Session, server_id: str):
    # Deactivate all other servers
    db.query(models.Server).filter(models.Server.is_active == True).update({"is_active": False})
    # Activate the selected one
    db_server = db.query(models.Server).filter(models.Server.id == server_id).first()
    if db_server:
        db_server.is_active = True
        db.commit()
        db.refresh(db_server)
        return db_server
    return None

def get_active_server(db: Session):
    return db.query(models.Server).filter(models.Server.is_active == True).first()


# --- Metrics DAO ---

def save_all_metrics(db: Session, server_id: str, metrics: Dict[str, Any]):
    """Saves all collected metrics into their respective tables."""
    
    # Create metric objects
    cpu_data = metrics.get('cpu', {})
    cpu_metric = models.CpuMetric(
        server_id=server_id,
        nproc=cpu_data.get('nproc', 0),
        loadavg=cpu_data.get('loadavg', {}),
        syscalls_per_sec=cpu_data.get('syscalls_per_sec', 0),
        context_switches_interrupts=cpu_data.get('context_switches_interrupts', {}),
        process_count=cpu_data.get('process_count', 0),
        cpu_usage=cpu_data.get('cpu_usage', {}),
        ps_state=cpu_data.get('ps_state', []),
        timestamp= datetime.datetime.utcnow()
    )

    mem_data = metrics.get('memory', {})
    mem_metric = models.MemoryMetric(
        server_id=server_id,
        info=mem_data.get('info', {}),
        oom_kill=mem_data.get('oom_kill', 0),
        swappiness=mem_data.get('swappiness', 0),
        page_scan=mem_data.get('page_scan', {}),
        vfs_cache_pressure=mem_data.get('vfs_cache_pressure', 0),
        dirty_background_ratio=mem_data.get('dirty_background_ratio', 0),
        dirty_ratio=mem_data.get('dirty_ratio', 0),
        transparent_hugepage=mem_data.get('transparent_hugepage', 'unknown'),
        timestamp=datetime.datetime.utcnow()
    )

    io_data = metrics.get('io', {})
    io_metric = models.IoMetric(
        server_id=server_id,
        device_stats=io_data.get('device_stats', {}),
        device_size=io_data.get('device_size', {}),
        device_sched_queDepth_rdAhead=io_data.get('device_sched_queDepth_rdAhead', {}),
        inode_usage=io_data.get('inode_usage', {}),
        fs_space=io_data.get('fs_space', {}),
        timestamp=datetime.datetime.utcnow()
    )

    net_data = metrics.get('network', {})
    net_metric = models.NetworkMetric(
        server_id=server_id,
        net_stats=net_data.get('net_stats', {}),
        net_err_stats=net_data.get('net_err_stats', {}),
        net_speed_duplex_mtu=net_data.get('net_speed_duplex_mtu', {}),
        tcp_drops=net_data.get('tcp_drops', {}),
        tcp_retrans=net_data.get('tcp_retrans', {}),
        tcp_congestion_control=net_data.get('tcp_congestion_control', 'cubic'),
        tcp_rmem=net_data.get('tcp_rmem', '4096 87380 6291456'),
        tcp_wmem=net_data.get('tcp_wmem', '4096 65536 6291456'),
        tcp_max_syn_backlog=net_data.get('tcp_max_syn_backlog', 1024),
        somaxconn=net_data.get('somaxconn', 128),
        tcp_window_scaling=net_data.get('tcp_window_scaling', '1'),
        timestamp=datetime.datetime.utcnow()
    )

    mysql_data = metrics.get('mysql', {})
    mysql_metric = models.MysqlMetric(
        server_id=server_id,
        qps=mysql_data.get('qps', 0),
        tps=mysql_data.get('tps', 0),
        slow_queries=mysql_data.get('slow_queries', 0),
        threads_connected=mysql_data.get('threads_connected', 0),
        threads_running=mysql_data.get('threads_running', 0),
        aborted_connects=mysql_data.get('aborted_connects', 0),
        innodb_buffer_pool_reads=mysql_data.get('innodb_buffer_pool_reads', 0),
        innodb_buffer_pool_read_requests=mysql_data.get('innodb_buffer_pool_read_requests', 0),
        innodb_row_locks_waits=mysql_data.get('innodb_row_locks_waits', 0),
        innodb_row_lock_time=mysql_data.get('innodb_row_lock_time', 0),
        innodb_log_waits=mysql_data.get('innodb_log_waits', 0),
        timestamp=datetime.datetime.utcnow()
    )

    # print("mysql_metric:=================\n", mysql_data)

    db.add_all([cpu_metric, mem_metric, io_metric, net_metric, mysql_metric])
    # print(cpu_metric, mem_metric, io_metric, net_metric, mysql_metric)
    db.commit()

def get_latest_metric(db: Session, server_id: str, model: Any):
    """Gets the latest metric for a given server and model."""
    return db.query(model)\
        .filter(model.server_id == server_id)\
        .order_by(model.timestamp.desc())\
        .first()

def get_historical_metrics(db: Session, server_id: str, model: Any, timerange: str):
    """
    Get historical metrics for a given server and model within a specified time range.
    Returns 15 evenly distributed data points across the time range.
    timerange: '15m', '30m', '1h', '3h'
    """
    time_ranges = {
        '15m': 15 * 60,
        '30m': 30 * 60,
        '1h': 60 * 60,
        '3h': 3 * 60 * 60,
    }
    
    if timerange not in time_ranges:
        raise ValueError("Invalid timerange specified. Use one of: " + ", ".join(time_ranges.keys()))

    time_limit = datetime.datetime.utcnow() - datetime.timedelta(seconds=time_ranges[timerange])
    
    # Get total count of records in the time range
    total_count = db.query(model)\
        .filter(model.server_id == server_id, model.timestamp >= time_limit)\
        .count()
    
    if total_count == 0:
        return []
    
    # Use a simpler approach: get records at regular intervals
    all_records = db.query(model)\
        .filter(model.server_id == server_id, model.timestamp >= time_limit)\
        .order_by(model.timestamp.desc())\
        .all()
    
    # If we have 15 or fewer records, return all of them
    if total_count <= 15:
        return all_records[::-1]
    
    # Calculate step size to get evenly distributed points
    step = max(1, total_count // 15)
    
    # Select evenly distributed records
    selected_records = []
    for i in range(0, len(all_records), step):
        selected_records.append(all_records[i])
        if len(selected_records) >= 15:
            break

    return selected_records[::-1]

# --- Application DAO ---
def get_app(db: Session, app_id: str):
    return db.query(models.App).filter(models.App.id == app_id).first()

def get_apps(db: Session, server_id: str, skip: int = 0, limit: int = 100):
    return db.query(models.App).filter(models.App.server_id == server_id).offset(skip).limit(limit).all()

def create_app(db: Session, app: schemas.AppCreate):
    app_id = f"app_{uuid.uuid4().hex[:12]}"
    db_app = models.App(
        id=app_id,
        server_id=app.serverId,
        name=app.name,
        type=app.appType,
        description=app.description,
        config=app.config
    )
    db.add(db_app)
    db.commit()
    db.refresh(db_app)
    return db_app

def delete_app(db: Session, app_id: str):
    db_app = db.query(models.App).filter(models.App.id == app_id).first()
    if db_app:
        db.delete(db_app)
        db.commit()
        return True
    return False

# --- Optimization History DAO ---
def get_optimization_history(db: Session, server_id: str, isApp: bool = False, skip: int = 0, limit: int = 100):
    query = db.query(models.OptimizationHistory)\
        .filter(models.OptimizationHistory.server_id == server_id)

    if isApp:
        query = query.filter(models.OptimizationHistory.app_id.isnot(None))

    return query.order_by(models.OptimizationHistory.timestamp.desc())\
        .offset(skip).limit(limit).all()

def create_optimization_history(db: Session, suggestion_id: str, result: Dict[str, Any], app_id: str = None):
    suggestion = db.query(models.SuggestionHistory)\
        .filter(models.SuggestionHistory.id == suggestion_id).first()

    if not suggestion:
        raise ValueError("Suggestion not found")
    
    if suggestion.applied:
        raise ValueError("Suggestion has already been applied")
    
    history_id = f"opt_{uuid.uuid4().hex[:12]}"
    db_history = models.OptimizationHistory(
        id=history_id,
        server_id=suggestion.server_id,
        suggestion_id= suggestion.id,
        timestamp=datetime.datetime.utcnow(),
        app_id=app_id,
        improvement=result.get('improvement', 0),
        health=result.get('health', 80),
        before=result.get('baseline', 0),
        after=result.get('bestresult', 0)
    )
    db.add(db_history)
    suggestion.applied = True  # Mark the suggestion as applied
    db.commit()
    db.refresh(db_history)
    return db_history

def get_latest_suggestion(db: Session, server_id: str, app_id: str = None, timerange: str = '2m'):
    """
    Get the latest suggestion for a server or app within a specified time range.
    timerange: '15m', '30m', '1h', '3h'
    """
    time_ranges = {
        '1m': 60,
        '2m': 2 * 60,
        '3m': 3 * 60,
        '4m': 4 * 60,
        '5m': 5 * 60,
    }
    
    if timerange not in time_ranges:
        raise ValueError("Invalid timerange specified. Use one of: " + ", ".join(time_ranges.keys()))

    time_limit = datetime.datetime.utcnow() - datetime.timedelta(seconds=time_ranges[timerange])
    
    query = db.query(models.SuggestionHistory)\
        .filter(models.SuggestionHistory.server_id == server_id, models.SuggestionHistory.timestamp >= time_limit)
    
    if app_id:
        query = query.filter(models.SuggestionHistory.app_id == app_id)
    else:
        query = query.filter(models.SuggestionHistory.app_id.is_(None))
    
    return query.order_by(models.SuggestionHistory.timestamp.desc()).first()

def create_suggestion(db: Session, result: dict, stage_result: dict, server_id: str, app_id: str = None):
    suggestion_id = f"sugg_{uuid.uuid4().hex[:12]}"
    db_suggestion = models.SuggestionHistory(
        id=suggestion_id,
        server_id=server_id,
        suggestion=result,
        applied=False,
        app_id=app_id,
        stage_result=stage_result,
    )
    db.add(db_suggestion)
    db.commit()
    db.refresh(db_suggestion)
    return db_suggestion

def get_suggestions(db: Session, suggestion_id: str, server_id: str = None, app_id: str = None, isApplied: bool = False):
    query = db.query(models.SuggestionHistory).filter(models.SuggestionHistory.applied == isApplied)
    
    if suggestion_id:
        query = query.filter(models.SuggestionHistory.id == suggestion_id)
    
    if server_id:
        query = query.filter(models.SuggestionHistory.server_id == server_id)
    
    if app_id:
        query = query.filter(models.SuggestionHistory.app_id == app_id)
    
    return query.first()