import asyncio
import asyncssh
import logging
from typing import Dict, Any, List

from .config import METRIC_COLLECTION
from . import dao
from .models import get_db

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

class SSHManager:
    """
    Manages SSH connections and command execution for a single server.
    """
    def __init__(self, server_info: Dict[str, Any]):
        self.server_info = server_info
        self.connection = None
        self.mysql_config: Dict[str, Any] = None

    async def connect(self):
        """Establishes an SSH connection."""
        if self.connection and not self.connection.is_closing():
            return
        
        if self.server_info['authMethod'] == 1:
            # Key-based authentication
            if not self.server_info.get('authKey'):
                raise ValueError("SSH key authentication requires 'authKey' in server info.")

            authKey = asyncssh.import_private_key(self.server_info['authKey'])

        try:
            self.connection = await asyncssh.connect(
                self.server_info['ip'],
                port=self.server_info['port'],
                username=self.server_info['user'],
                password=self.server_info.get('password') if self.server_info['authMethod'] == 0 else None,
                client_keys=[authKey] if self.server_info['authMethod'] == 1 else None,
                known_hosts=None
            )
            logger.info(f"Successfully connected to {self.server_info['servername']}.")
        except Exception as e:
            logger.error(f"Failed to connect to {self.server_info['servername']}: {e}")
            self.connection = None
            raise

    async def disconnect(self):
        """Closes the SSH connection."""
        if self.connection:
            self.connection.close()
            await self.connection.wait_closed()
            logger.info(f"Disconnected from {self.server_info['servername']}.")
            self.connection = None

    async def run_command(self, command: str) -> asyncssh.process:
        """Runs a command on the remote server."""
        if not self.connection:
            await self.connect()
        if not self.connection:
            raise ConnectionError(f"Not connected to {self.server_info['servername']}.")
        
        try:
            result = await self.connection.run(command, check=True)
            return result
        except asyncssh.ProcessError as e:
            logger.error(f"Command '{command}' failed on {self.server_info['servername']}: {e.stderr}")
            raise
        except Exception as e:
            logger.error(f"Error running command '{command}' on {self.server_info['servername']}: {e}")
            raise

    async def get_server_info(self):
        result = await self.run_command("cat /etc/os-release | grep PRETTY_NAME | awk -F'\"' '{print $2}' && " \
                                        "hostname && uptime -p && uname -m")
        if result.exit_status != 0:
            logger.error(f"Failed to get server info for {self.server_info['servername']}: {result.stderr}")
            return None
        output = result.stdout.strip().split('\n')
        return {
            "os": output[0],
            "hostname": output[1],
            "uptime": output[2],
            "architecture": output[3]
        }
    
    async def ping_mysql(self, config: Dict[str, Any]) -> bool:
        """Checks if MySQL is reachable on the server."""
        port = config.get('port', 3306)
        user = config.get('user')
        password = config.get('password')
        if not user or not password:
            logger.error(f"MySQL config must include 'user' and 'password'.")
            return False
        command = f"mysql -u'{user}' -p'{password}' -P{port} -e 'SELECT 1;'"
        try:
            result = await self.run_command(command)
            if result.exit_status == 0:
                logger.info(f"MySQL is reachable on {self.server_info['servername']}.")
                return True
            else:
                logger.error(f"MySQL ping failed on {self.server_info['servername']}: {result.stderr}")
                return False
        except Exception as e:
            logger.error(f"Error pinging MySQL on {self.server_info['servername']}: {e}")
            return False
    
    async def get_running_processes(self, interested: List[str]) -> List[str]:
        """Returns a list of running processes on the server."""
        result = []
        for proc in interested:
            command = f"ps -eo comm | grep '{proc}'"
            try:
                output = await self.run_command(command)
                if output.stdout.strip():
                    result.append(proc)
            except Exception as e:
                logger.error(f"Failed to get process '{proc}' on {self.server_info['servername']}: {e}")
        return result
    
    async def set_system_param(self, param: str, value: Any) -> bool:
        """Sets a system parameter on the server."""
        command = f"sysctl -w {param}={value}"
        try:
            result = await self.run_command(command)
            if result.exit_status == 0:
                logger.info(f"Successfully set {param} to {value} on {self.server_info['servername']}.")
                return True
            else:
                logger.error(f"Failed to set {param} on {self.server_info['servername']}: {result.stderr}")
                return False
        except Exception as e:
            logger.error(f"Error setting {param} on {self.server_info['servername']}: {e}")
            return False

class DataCollector:
    """
    Collects and parses data from a remote server via SSH.
    """
    def __init__(self, ssh_manager: SSHManager):
        self.ssh_manager = ssh_manager
        self.semaphore = asyncio.Semaphore(8) # 限制并发SSH命令为8

    async def _run_command_with_semaphore(self, command: str):
        """Wrapper to run command with semaphore."""
        async with self.semaphore:
            return await self.ssh_manager.run_command(command)

    async def collect_and_save(self, server_id: str):
        """Collects all metrics, parses them, and saves to the database."""
        all_metrics = {}
        
        # Collect all commands to run
        commands_to_run = []
        command_mapping = {}
        
        for category, metrics in METRIC_COLLECTION.items():
            if category == "mysql":
                # Skip MySQL metrics if MySQL is not reachable
                mysql_config = self.ssh_manager.mysql_config
                if not mysql_config or not await self.ssh_manager.ping_mysql(mysql_config):
                    logger.warning(f"Skipping MySQL metrics for {self.ssh_manager.server_info['servername']} as MySQL is not reachable.")
                    continue

            all_metrics[category] = {}
            for metric_name, config in metrics.items():
                command = config['command']
                if category == "mysql":
                    command = command.replace("{user}", mysql_config.get('user', 'root'))
                    command = command.replace("{password}", mysql_config.get('password', '123456'))
                    command = command.replace("{port}", str(mysql_config.get('port', 3306)))
                commands_to_run.append(command)
                command_mapping[command] = (category, metric_name, config)
        
        # Execute all commands concurrently
        try:
            results = await asyncio.gather(
                *[self._run_command_with_semaphore(cmd) for cmd in commands_to_run],
                return_exceptions=True
            )
            
            # Process results
            for i, result in enumerate(results):
                command = commands_to_run[i]
                category, metric_name, config = command_mapping[command]
                
                if isinstance(result, Exception):
                    logger.error(f"Failed to collect metric '{category}.{metric_name}': {result}")
                    continue
                
                try:
                    stdout = result.stdout.strip()
                    parser_func_name = config.get('parser')
                    if parser_func_name and hasattr(self, parser_func_name):
                        parser_func = getattr(self, parser_func_name)
                        parsed_data = parser_func(stdout)
                        all_metrics[category][metric_name] = parsed_data
                    else: # No parser needed
                        all_metrics[category][metric_name] = stdout
                except Exception as e:
                    logger.error(f"Failed to parse metric '{category}.{metric_name}': {e}")
        
        except Exception as e:
            logger.error(f"Failed to execute commands concurrently: {e}")
            return

        # Now, save the collected metrics into their respective tables
        db = next(get_db())
        try:
            dao.save_all_metrics(db, server_id, all_metrics)
            logger.info(f"Successfully collected and saved all metrics for server {server_id}")
        except Exception as e:
            logger.error(f"Failed to save metrics for server {server_id}: {e}")
        finally:
            db.close()


    # --- Parser Functions ---

    def none(self, output: str) -> str:
        return output
    
    def parse_rate(self, output: str) -> float:
        """
        input:
        12345
        12346

        output:
        1
        """

        lines = output.strip().split('\n')
        if len(lines) < 2:
            return 0.0

        return (int(lines[1]) - int(lines[0])) / 1.0

    def parse_loadavg(self, output: str) -> Dict[str, Any]:
        parts = output.split()
        return {
            "1m": float(parts[0]),
            "5m": float(parts[1]),
            "15m": float(parts[2]),
            # "running_processes": int(parts[3].split('/')[0]),
            # "total_processes": int(parts[3].split('/')[1]),
        }

    def parse_mpstat(self, output: str) -> Dict[str, Any]:
        """
        input:
        %usr %sys %iowait %idle
        0.00 0.00 0.00 100.00
        0.00 0.00 0.00 100.00
        0.00 0.00 0.00 100.00

        output:
        {
            "-1": {
                "user": 0.00,
                "system": 0.00,
                "iowait": 0.00,
                "idle": 100.00
            },
            ...
        }
        """
        lines = output.strip().split('\n')
        cpu_usage = {}
        for i, line in enumerate(lines[1:]):
            parts = line.split()
            if len(parts) < 4:
                continue
            cpu_usage[i - 1] = {
                "user": float(parts[0]),
                "system": float(parts[1]),
                "iowait": float(parts[2]),
                "idle": float(parts[3])
            }
        return cpu_usage

    def parse_vmstat_cs_in(self, output: str) -> Dict[str, Any]:
        """
        input:
        in cs
        4 4

        output:
        {
            "contextSwitchesPerSec": 4,
            "interruptsPerSec": 4,
        }
        """
        lines = output.strip().split('\n')
        if len(lines) < 2:
            return {}

        parts = lines[1].split()
        if len(parts) < 2:
            return {}

        return {
            "contextSwitchesPerSec": int(parts[0]),
            "interruptsPerSec": int(parts[1])
        }
    
    def parse_ps_state(self, output: str) -> List[Dict[str, Any]]:
        """
        input:
    PID    PPID USER     %CPU %MEM    VSZ   RSS STAT COMMAND             TIME  STARTED
      1       0 root      0.0  0.5 167700 10380 Ss   systemd         00:04:48   Dec 04
      2       0 root      0.0  0.0      0     0 S    kthreadd        00:00:01   Dec 04
      3       2 root      0.0  0.0      0     0 I<   rcu_gp          00:00:00   Dec 04
      4       2 root      0.0  0.0      0     0 I<   rcu_par_gp      00:00:00   Dec 04
           ...

        output:
        [
            {
                "pid": 1,
                "ppid": 0,
                "user": "root",
                "cpu": 0.0,
                "mem": 0.1,
                "vsz": 169084,
                "rss": 5564,
                "stat": "Ss",
                "start": "Dec 04",
                "time": "00:04:48",
                "command": "/sbin/init"
            },
            ...
        ]
        """
        lines = output.strip().split('\n')
        if len(lines) < 2:
            return []

        processes = []
        for line in lines[1:]:
            parts = line.strip().split(maxsplit=10)
            if len(parts) < 11:
                continue
            processes.append({
                "pid": int(parts[0]),
                "ppid": int(parts[1]),
                "user": parts[2],
                "cpu": float(parts[3]),
                "mem": float(parts[4]),
                "vsz": int(parts[5]),
                "rss": int(parts[6]),
                "stat": parts[7],
                "command": parts[8],
                "time": parts[9],
                "start": parts[10].strip()
            })
        return processes

    def parse_free(self, output: str) -> Dict[str, Any]:
        """
        input:
                    total        used        free      shared  buff/cache   available
        Mem:        16040132      657212    15108456        4180      274464    15171376
        Swap:        4194304           0     4194304

        output:
        {
            "total": 16040132,
            "used": 657212,
            "free": 15108456,
            "buffers": 274464,
            "shared": 4180,
            "available": 15171376,
            "swap": {
                "total": 4194304,
                "used": 0,
                "free": 4194304
            }
        }
        """
        lines = output.strip().split('\n')
        if len(lines) < 3:
            return {}

        mem = lines[1].split()
        swap = lines[2].split()
        if len(mem) < 7 or len(swap) < 4:
            return {}

        return {
            "total": int(mem[1]),
            "used": int(mem[2]),
            "free": int(mem[3]),
            "shared": int(mem[4]),
            "buffers": int(mem[5]),
            "available": int(mem[6]),
            "swap": {
                "total": int(swap[1]),
                "used": int(swap[2]),
                "free": int(swap[3])
            }
        }

    def parse_sysctl(self, output: str) -> str:
        return output.split('=')[-1].strip()

    def parse_vmstat_page_scan(self, output: str) -> Dict[str, Any]:
        """
        input:
        pgscan_kswapd 240640053
        pgscan_direct 26111416
        pgscan_direct_throttle 0
        pgscan_kswapd 240640053
        pgscan_direct 26111416
        pgscan_direct_throttle 0

        output:
        {
            "kswapd": 240640053,
            "direct": 26111416,
            "pageScanRate": 0
        }
        """
        lines = output.strip().split('\n')
        if len(lines) < 6:
            return {
                "kswapd": 0,
                "direct": 0,
                "pageScanRate": 0
            }
        
        return {
            "kswapd": int(lines[0].split()[1]),
            "direct": int(lines[1].split()[1]),
            "pageScanRate": int(lines[3].split()[1]) + int(lines[4].split()[1]) - int(lines[0].split()[1]) - int(lines[1].split()[1])
        }

    def parse_iostat(self, output: str) -> Dict[str, Any]:
        """
        input:
        Device            r/s     rkB/s   rrqm/s  %rrqm r_await rareq-sz     w/s     wkB/s   wrqm/s  %wrqm w_await wareq-sz     d/s     dkB/s   drqm/s  %drqm d_await dareq-sz     f/s f_await  aqu-sz  %util
        sda              0.10      6.64     0.03  24.31    0.29    64.18    0.00      0.00     0.00   0.00    0.00     0.00    0.00      0.00     0.00   0.00    0.00     0.00    0.00    0.00    0.00   0.00
        sdb              0.01      1.01     0.01  29.17    0.60    76.57    0.00      0.00     0.00   0.00    0.00     0.00    0.00      0.00     0.00   0.00    0.00     0.00    0.00    0.00    0.00   0.00
        sdc              0.01      0.17     0.00   0.00    0.09    22.73    0.00      0.00     0.00   0.00    1.50     2.00    0.00      0.00     0.00   0.00    0.00     0.00    0.00    1.00    0.00   0.00
        sdd              1.17     62.38     1.15  49.54    0.47    53.21    0.77     43.14     0.91  54.11    7.48    55.97    0.03    139.88     0.01  20.15    0.16  4339.28    0.22    1.87    0.01   0.20

        output:
        {
            "sda": {
                "r/s": 0.11,
                "rKB/s": 6.90,
                "r_await": 0.00,
                "w/s": 0.00,
                "wKB/s": 0.00,
                "w_await": 0.00,
                "await": 0.00,
                "utilization": 0.00
            },
            ...
        }
        """
        lines = output.strip().split('\n')[1:]  # Skip header lines
        devices = {}
        for line in lines:
            parts = line.split()
            if len(parts) < 23:
                continue
            devices[parts[0]] = {
                "r/s": float(parts[1]),
                "rKB/s": float(parts[2]),
                "r_await": float(parts[5]),
                "w/s": float(parts[7]),
                "wKB/s": float(parts[8]),
                "w_await": float(parts[11]),
                "await": float(parts[-3]),
                "utilization": float(parts[-1])
            }
        return devices

    def parse_df_i(self, output: str) -> Dict[str, Any]:
        """
        input:
        /dev/vda3      2608144 135723 2472421    6% /
        /dev/vda2            0      0       0     - /boot/efi

        output:
        {
            "/dev/vda3": {
                "total": 226728,
                "used": 769,
                "available": 225959,
                "usage": 1
            },
            ...
        }
        """
        lines = output.strip().split('\n')
        inode_usage = {}
        for line in lines:
            parts = line.split()
            if len(parts) < 6:
                continue
            inode_usage[parts[0]] = {
                "total": int(parts[1]),
                "used": int(parts[2]),
                "available": int(parts[3]),
                "usage": float(parts[4].rstrip('%')) if parts[4].endswith('%') else -1
            }
        return inode_usage

    def parse_sar_dev(self, output: str) -> Dict[str, Any]:
        """
        input:
        Average:        IFACE   rxpck/s   txpck/s    rxkB/s    txkB/s   rxcmp/s   txcmp/s  rxmcst/s   %ifutil
        Average:           lo      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00
        Average:         eth0      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00
        Average:         eth1      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00
        Average:    loopback0      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00
        Average:         eth2      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00

        output:
        {
            "lo": {
                "rxpck/s": 0.00,
                "txpck/s": 0.00,
                "rxkB/s": 0.00,
                "txkB/s": 0.00,
                "rxcmp/s": 0.00,
                "txcmp/s": 0.00,
                "rxmcst/s": 0.00,
                "ifutil": 0.00
            },
            ...
        }
        """
        lines = output.strip().split('\n')[1:]
        interfaces = {}
        for line in lines:
            parts = line.split()
            if len(parts) < 10:
                continue
            interfaces[parts[1]] = {
                "rxpck/s": float(parts[2]),
                "txpck/s": float(parts[3]),
                "rxkB/s": float(parts[4]),
                "txkB/s": float(parts[5]),
                "rxcmp/s": float(parts[6]),
                "txcmp/s": float(parts[7]),
                "rxmcst/s": float(parts[8]),
                "ifutil": float(parts[9])
            }
        return interfaces

    def parse_netstat_drops(self, output: str) -> Dict[str, Any]:
        """
        input:
        SyncookiesSent ListenOverflows ListenDrops
        136 0 21
        """
        lines = output.strip().split('\n')
        values = lines[1].split()
        return {
            "tcp_syncookies_sent": int(values[0]),
            "tcp_listen_overflows": int(values[1]),
            "tcp_listen_drops": int(values[2])
        }

    def parse_netstat_retrans(self, output: str) -> Dict[str, Any]:
        """
        input:
        6620 segments retransmitted
        6620 segments retransmitted

        output:
        {
            "tcp_retrans_segs/s": 0
        }
        """
        lines = output.strip().split('\n')
        if len(lines) < 2:
            return {"tcp_retrans_segs/s": 0}
        
        a = lines[0].split()[0]
        b = lines[1].split()[0]
        return {
            "tcp_retrans_segs/s": int(b) - int(a)
        }

    def parse_df_TP(self, output: str) -> Dict[str, Any]:
        """
        input:
        /dev/vda3      ext4       40901312 6803084  32207860      18% /
        /dev/vda2      vfat         201615    6192    195423       4% /boot/efi

        output:
        {
            "/dev/vda3": {
                "type": "ext4",
                "size": 40901312,
                "used": 6803084,
                "available": 32207860,
                "usage": 18,
                "mountPoint": "/"
            },
            ...
        }
        """
        lines = output.strip().split('\n')
        fs_space = {}
        for line in lines:
            parts = line.split()
            if len(parts) < 7:
                continue
            fs_space[parts[0]] = {
                "type": parts[1],
                "size": int(parts[2]) * 1024,  # Convert to bytes
                "used": int(parts[3]) * 1024,   # Convert to bytes
                "available": int(parts[4]) * 1024,  # Convert to bytes
                "usage": float(parts[5].rstrip('%')) if parts[5].endswith('%') else -1,
                "mountPoint": parts[6]
            }
        return fs_space

    def parse_lsblk(self, output: str) -> Dict[str, Any]:
        """
        input:
        loop0     77492224 loop   77594624   100%
        loop1     83832832 loop   83886080   100%
        loop3     53366784 loop
        loop4     66863104 loop   66977792   100%
        loop5     93745152 loop   93847552   100%
        loop6     66863104 loop   66977792   100%
        loop7     53370880 loop   53477376   100%
        loop8     51687424 loop   51773440   100%
        loop9     77504512 loop   77594624   100%
        vda    42949672960 disk
        ├─vda1     1048576 part
        ├─vda2   209715200 part    6340608     3%
        └─vda3 42737843712 part 6966800384    17%

        output:
        {
            "loop0": {
                "size": 77492224,
                "type": "loop",
                "used": 77594624,
                "usage": 100
            },
            "vda": {
                "size": 42949672960,
                "type": "disk",
                "used": 6340608 + 6966800384,
                "usage": 3 + 17
            },
            ...
        }
        """
        devices: Dict[str, Any] = {}
        last_parent = None

        for line in output.strip().split('\n'):
            parts = line.split()
            if not parts:
                continue

            # Check if it's a child partition by looking for tree characters
            is_child = '─' in parts[0]

            if is_child:
                if last_parent and len(parts) >= 5:
                    # It's a partition with usage info, add it to the parent
                    try:
                        used_space = int(parts[3])
                        # Remove '%' and convert usage to int
                        usage_percent = int(parts[4].replace('%', ''))
                        
                        devices[last_parent]['used'] += used_space
                        devices[last_parent]['usage'] += usage_percent
                    except (ValueError, IndexError):
                        # Ignore malformed partition lines
                        continue
            else:
                # It's a top-level device
                try:
                    name = parts[0]
                    last_parent = name
                    
                    devices[name] = {
                        "size": int(parts[1]),
                        "type": parts[2],
                        "used": 0,
                        "usage": 0
                    }

                    # Handle top-level devices that have their own usage info (like loop devices)
                    if len(parts) >= 5:
                        devices[name]['used'] = int(parts[3])
                        devices[name]['usage'] = int(parts[4].replace('%', ''))
                except (ValueError, IndexError):
                    # Ignore malformed parent device lines
                    continue
        
        return devices

    def parse_sched_queDepth_rdAhead(self, output: str) -> Dict[str, Any]:
        """
        input:
        loop0 [none] 128 4096
        loop1 [none] 128 4096
        loop3 [none] 128 4096
        loop4 [none] 128 4096
        loop5 [none] 128 4096
        loop6 [none] 128 4096
        loop7 [none] 128 4096
        loop8 [none] 128 4096
        loop9 [none] 128 128
        vda [mq-deadline] 256 4096

        output:
        {
            "vda": {
                "scheduler": "mq-deadline",
                "queue_depth": 256,
                "read_ahead_kB": 4096
            },
            ...
        }
        """
        lines = output.strip().split('\n')
        devices = {}
        for line in lines:
            parts = line.split()
            if len(parts) < 4:
                continue
            device_name = parts[0]
            devices[device_name] = {
                "scheduler": parts[1].replace('[', '').replace(']', ''),
                "queue_depth": int(parts[2]),
                "read_ahead_kB": int(parts[3])
            }
        return devices

    def parse_speed_duplex_mtu(self, output: str) -> Dict[str, Any]:
        """
        input:
        eth0 10000Mb/s Full 1500
        eth1 10000Mb/s Full 1500
        eth2 10000Mb/s Full 1500
        lo 65536
        loopback0 10000Mb/s Full 1500

        output:
        {
            "eth0": {
                "speed": "10000Mb/s",
                "duplex": "Full",
                "mtu": 1500
            },
            "lo": {
                "speed": "N/A",
                "duplex": "N/A",
                "mtu": 65536
            },
            ...
        }
        """
        lines = output.strip().split('\n')
        interfaces = {}
        for line in lines:
            parts = line.split()
            if not parts:
                continue
            
            if len(parts) == 4:
                interfaces[parts[0]] = {
                    "speed": parts[1],
                    "duplex": parts[2],
                    "mtu": int(parts[3])
                }
            elif len(parts) == 2:
                # Handle interfaces with only speed and mtu (like lo)
                interfaces[parts[0]] = {
                    "speed": "N/A",
                    "duplex": "N/A",
                    "mtu": int(parts[1])
                }
        return interfaces

    def parse_sar_edev(self, output: str) -> Dict[str, Any]:
        """
        input:
        Average:        IFACE   rxerr/s   txerr/s    coll/s  rxdrop/s  txdrop/s  txcarr/s  rxfram/s  rxfifo/s  txfifo/s
        Average:           lo      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00
        Average:         eth0      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00
        Average:      docker0      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00
        Average:    br-1ae9b65eaab6      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00
        Average:    veth0f93d3d      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00      0.00

        output:
        {
            "lo": {
                "rxerr/s": 0.00,
                "txerr/s": 0.00,
                "rxdrop/s": 0.00,
                "txdrop/s": 0.00,
            },
            ...
        }
        """
        lines = output.strip().split('\n')[1:]
        interfaces = {}
        for line in lines:
            parts = line.split()
            if len(parts) < 10:
                continue
            interfaces[parts[1]] = {
                "rxerr/s": float(parts[2]),
                "txerr/s": float(parts[3]),
                "rxdrop/s": float(parts[5]),
                "txdrop/s": float(parts[6]),
            }
        return interfaces

class CollectionScheduler:
    """
    Schedules a single periodic data collection task for the active server.
    """
    def __init__(self, interval: int = 10):
        self.task: asyncio.Task = None
        self.ssh_manager: SSHManager = None
        self.server_id: str = None
        self.interval = interval # Collection interval in seconds

    async def start(self, server_id: str):
        """Starts the collection task for a given server."""
        await self.stop() # Stop any existing tasks
        self.server_id = server_id
        db = next(get_db())
        server = dao.get_server(db, server_id)
        mysql = dao.get_app_by_type(db, server_id, 'mysql')
        db.close()

        if not server:
            logger.error(f"Server with id {server_id} not found. Cannot start scheduler.")
            return

        self.ssh_manager = SSHManager(server.__dict__)
        
        if mysql:
            self.ssh_manager.mysql_config = mysql.config

        await self.ssh_manager.connect()
        
        self.task = asyncio.create_task(self._run_periodically())
        logger.info(f"Started collection scheduler for server {server.servername} ({server_id}).")

    async def stop(self):
        """Stops all running collection tasks."""
        if self.task and not self.task.done():
            self.task.cancel()
        self.task = None
        if self.ssh_manager:
            await self.ssh_manager.disconnect()
            self.ssh_manager = None
        logger.info("Stopped collection scheduler.")

    async def _run_periodically(self):
        """The coroutine that runs the collection task periodically."""
        data_collector = DataCollector(self.ssh_manager)
        while True:
            try:
                logger.info(f"Collecting all metrics for server {self.server_id}")
                await data_collector.collect_and_save(self.server_id)
            except Exception as e:
                logger.error(f"Error in periodic collection task: {e}")
            
            await asyncio.sleep(self.interval)

# Global scheduler instance
scheduler = CollectionScheduler(interval=60) # Set a global collection interval
