from sqlalchemy import create_engine, Column, Integer, String, Float, BigInteger, JSON, DateTime, Text, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from .config import DATABASE_URL
import datetime

Base = declarative_base()

class Server(Base):
    __tablename__ = "servers"
    id = Column(String, primary_key=True, index=True)
    servername = Column(String, unique=True, index=True)
    ip = Column(String, unique=True)
    port = Column(Integer)
    user = Column(String)
    authMethod = Column(Integer) # 0 for password, 1 for key
    password = Column(String, nullable=True) # Encrypted
    authKey = Column(Text, nullable=True)
    description = Column(String, nullable=True)
    is_active = Column(Boolean, default=False)
    os = Column(String, nullable=True)
    hostname = Column(String, nullable=True)
    uptime = Column(String, nullable=True)
    architecture = Column(String, nullable=True)

class App(Base):
    __tablename__ = "apps"
    id = Column(String, primary_key=True, index=True)
    server_id = Column(String, index=True)
    name = Column(String, index=True)
    type = Column(String, index=True)  # e.g., "web", "db", etc.
    description = Column(String, nullable=True)
    config = Column(JSON, nullable=True)  # Store app-specific configuration

class SuggestionHistory(Base):
    __tablename__ = "suggestion_history"
    id = Column(String, primary_key=True, index=True)
    server_id = Column(String, index=True)
    timestamp = Column(DateTime, default=datetime.datetime.utcnow, index=True)
    suggestion = Column(JSON)
    applied = Column(Boolean, default=False)
    # for app
    app_id = Column(String, index=True, nullable=True)
    app_type = Column(String, nullable=True)
    # stage result
    stage_result = Column(JSON, nullable=True)

class OptimizationHistory(Base):
    __tablename__ = "optimization_history"
    id = Column(String, primary_key=True, index=True)
    server_id = Column(String, index=True)
    suggestion_id = Column(String, index=True)
    timestamp = Column(DateTime, default=datetime.datetime.utcnow, index=True)
    app_id = Column(String, index=True, nullable=True)
    improvement = Column(Float, nullable=True)
    health = Column(Integer, nullable=True)
    before = Column(Float, nullable=True)
    after = Column(Float, nullable=True)

# Metrics tables - Redesigned for structured data
class CpuMetric(Base):
    __tablename__ = 'cpu_metrics'
    id = Column(Integer, primary_key=True)
    server_id = Column(String, index=True)
    timestamp = Column(DateTime, default=datetime.datetime.utcnow, index=True)
    nproc = Column(Integer)
    loadavg = Column(JSON)
    syscalls_per_sec = Column(Integer)
    context_switches_interrupts = Column(JSON)
    process_count = Column(Integer)
    cpu_usage = Column(JSON)  # Store per-core usage as JSON
    ps_state = Column(JSON)

    def __repr__(self):
        return (f"(CpuMetric server_id={self.server_id}, timestamp={self.timestamp}, "
                f"nproc={self.nproc}, loadavg={self.loadavg}, "
                f"syscalls_per_sec={self.syscalls_per_sec}, context_switches_interrupts={self.context_switches_interrupts}, "
                f"process_count={self.process_count}, cpu_usage={self.cpu_usage})\n")

class MemoryMetric(Base):
    __tablename__ = 'memory_metrics'
    id = Column(Integer, primary_key=True)
    server_id = Column(String, index=True)
    timestamp = Column(DateTime, default=datetime.datetime.utcnow, index=True)
    info = Column(JSON)
    oom_kill = Column(Integer)
    swappiness = Column(Integer)
    page_scan = Column(JSON)
    vfs_cache_pressure = Column(Integer)
    dirty_background_ratio = Column(Integer)
    dirty_ratio = Column(Integer)
    transparent_hugepage = Column(String)

    def __repr__(self):
        return (f"(MemoryMetric server_id={self.server_id}, timestamp={self.timestamp}, "
                f"info={self.info}, oom_kill={self.oom_kill}, swappiness={self.swappiness}, "
                f"page_scan={self.page_scan}, vfs_cache_pressure={self.vfs_cache_pressure}, "
                f"dirty_background_ratio={self.dirty_background_ratio}, dirty_ratio={self.dirty_ratio}, "
                f"transparent_hugepage={self.transparent_hugepage})\n")

class IoMetric(Base):
    __tablename__ = 'io_metrics'
    id = Column(Integer, primary_key=True)
    server_id = Column(String, index=True)
    timestamp = Column(DateTime, default=datetime.datetime.utcnow, index=True)
    device_stats = Column(JSON)
    device_size = Column(JSON)  # Store per-device size as JSON
    device_sched_queDepth_rdAhead = Column(JSON)  # Store per-device read-ahead settings
    inode_usage = Column(JSON) # Store per-filesystem stats as JSON
    fs_space = Column(JSON)

    def __repr__(self):
        return (f"(IoMetric server_id={self.server_id}, timestamp={self.timestamp}, "
                f"device_stats={self.device_stats}, device_size={self.device_size}, "
                f"device_sched_queDepth_rdAhead={self.device_sched_queDepth_rdAhead}, "
                f"inode_usage={self.inode_usage}, fs_space={self.fs_space})\n")

class NetworkMetric(Base):
    __tablename__ = 'network_metrics'
    id = Column(Integer, primary_key=True)
    server_id = Column(String, index=True)
    timestamp = Column(DateTime, default=datetime.datetime.utcnow, index=True)
    net_stats = Column(JSON) # Store per-interface stats as JSON
    net_err_stats = Column(JSON) # Store per-interface error stats as JSON
    net_speed_duplex_mtu = Column(JSON) # Store speed, duplex, and MTU as JSON
    tcp_drops = Column(JSON)
    tcp_retrans= Column(JSON)
    tcp_congestion_control= Column(String)
    tcp_rmem = Column(String)
    tcp_wmem = Column(String)
    tcp_max_syn_backlog = Column(Integer)
    somaxconn = Column(Integer)
    tcp_window_scaling = Column(String)

    def __repr__(self):
        return (f"(NetworkMetric server_id={self.server_id}, timestamp={self.timestamp}, "
                f"net_stats={self.net_stats}, net_err_stats={self.net_err_stats}, "
                f"net_speed_duplex_mtu={self.net_speed_duplex_mtu}, tcp_drops={self.tcp_drops}, "
                f"tcp_retrans={self.tcp_retrans}, tcp_congestion_control={self.tcp_congestion_control}, "
                f"tcp_rmem={self.tcp_rmem}, tcp_wmem={self.tcp_wmem}, "
                f"tcp_max_syn_backlog={self.tcp_max_syn_backlog}, somaxconn={self.somaxconn}, "
                f"tcp_window_scaling={self.tcp_window_scaling})\n")

class MysqlMetric(Base):
    __tablename__ = 'mysql_metrics'
    id = Column(Integer, primary_key=True)
    server_id = Column(String, index=True)
    timestamp = Column(DateTime, default=datetime.datetime.utcnow, index=True)
    qps = Column(Float)
    tps = Column(Float)
    slow_queries = Column(Integer)
    threads_connected = Column(Integer)
    threads_running = Column(Integer)
    aborted_connects = Column(Integer)
    innodb_buffer_pool_reads = Column(BigInteger)
    innodb_buffer_pool_read_requests = Column(BigInteger)
    innodb_row_locks_waits = Column(BigInteger)
    innodb_row_lock_time = Column(BigInteger)
    innodb_log_waits = Column(BigInteger)

    def __repr__(self):
        return (f"(MysqlMetric server_id={self.server_id}, timestamp={self.timestamp}, "
                f"qps={self.qps}, tps={self.tps}, slow_queries={self.slow_queries}, "
                f"threads_connected={self.threads_connected}, threads_running={self.threads_running}, "
                f"aborted_connects={self.aborted_connects}, innodb_buffer_pool_reads={self.innodb_buffer_pool_reads}, "
                f"innodb_buffer_pool_read_requests={self.innodb_buffer_pool_read_requests}, "
                f"innodb_row_locks_waits={self.innodb_row_locks_waits}, innodb_row_lock_time={self.innodb_row_lock_time}, "
                f"innodb_log_waits={self.innodb_log_waits})\n")

engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False})
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)

def init_db():
    Base.metadata.create_all(bind=engine)

def get_db():
    db = SessionLocal()
    try:
        yield db
    finally:
        db.close()
