"""
节点服务模块
负责云端和边侧节点的管理、状态更新和通信
"""

import json
import random
from typing import Dict, List, Optional, Tuple
from datetime import datetime
from sqlalchemy.orm import Session
from sqlalchemy import func

from database import Node, get_db
from config import config
from utils import LogManager, HTTPClient
from schemas import NodeCreate, NodeUpdate, NodeMonitoringInfo, NodeCPUInfo, NodeMemoryInfo, NodeGPUInfo, NodeNetworkInfo, NodeStorageInfo
from exceptions import NotFoundException, ValidationException
from edge.edge_client import EdgeClient
from edge.edge_manager import EdgeManager
from process_manager import (
    register_edge_task, register_batch_edge_task, 
    register_sample_collection_task
)

# 初始化日志系统
LogManager.initialize_logging(config.log.level)
logger = LogManager.get_logger(__name__)


class NodeService:
    """云端节点管理服务"""
    
    def __init__(self):
        self.config = config
        self.logger = LogManager.get_logger(self.__class__.__name__)
        self.edge_client = EdgeClient()
        # 使用全局的edge_manager实例
        from edge.edge_manager import edge_manager
        self.edge_manager = edge_manager
    
    def initialize_nodes_from_config(self, db: Session):
        """从配置文件初始化或更新节点数据到数据库 (Upsert逻辑)"""
        try:
            nodes_config = self.config.raw_config.get('nodes', [])
            if not nodes_config:
                self.logger.warning("配置文件中没有找到节点配置")
                return

            self.logger.info(f"开始从配置文件同步 {len(nodes_config)} 个节点...")
            
            # 获取现有节点
            existing_nodes = {node.node_id: node for node in db.query(Node).all()}
            config_node_ids = {n['id'] for n in nodes_config}

            # 更新或创建节点
            for node_config in nodes_config:
                node_id = node_config['id']
                if node_id in existing_nodes:
                    # 更新现有节点 (只更新静态配置)
                    node = existing_nodes[node_id]
                    self.logger.debug(f"更新节点: {node_id}")
                    node.name = node_config.get('name', node.name)
                    node.node_type = node_config.get('node_type', node.node_type)
                    node.location = node_config.get('location', node.location)
                    node.service_ip = node_config.get('service_ip', node.service_ip)
                    node.cpu = node_config.get('cpu', node.cpu)
                    node.memory = node_config.get('memory', node.memory)
                    node.gpu = node_config.get('gpu', node.gpu)
                    node.bandwidth = node_config.get('bandwidth', node.bandwidth)
                else:
                    # 创建新节点
                    self.logger.debug(f"创建新节点: {node_id}")
                    node = Node(
                        node_id=node_config['id'],
                        name=node_config['name'],
                        node_type=node_config.get('node_type', 'cloud'),
                        location=node_config['location'],
                        status=node_config.get('status', 'unknown'), # 默认为unknown，等待心跳
                        service_ip=node_config.get('service_ip', ''),
                        cpu=node_config.get('cpu', 'N/A'),
                        memory=node_config.get('memory', 'N/A'),
                        gpu=node_config.get('gpu', 'N/A'),
                        bandwidth=node_config.get('bandwidth', 'N/A')
                    )
                    db.add(node)

            # (可选) 删除配置文件中已不存在的节点
            nodes_to_delete = [node for node_id, node in existing_nodes.items() if node_id not in config_node_ids]
            for node in nodes_to_delete:
                self.logger.info(f"删除已不存在于配置文件的节点: {node.node_id}")
                db.delete(node)

            db.commit()
            self.logger.info("节点配置同步完成")
            
        except Exception as e:
            self.logger.error(f"从配置初始化节点失败: {str(e)}")
            db.rollback()
            raise
    
    def create_node(self, db: Session, node_create: NodeCreate) -> Node:
        """创建新节点"""
        if db.query(Node).filter(Node.node_id == node_create.node_id).first():
            raise ValidationException(f"Node with id {node_create.node_id} already exists.")
        
        new_node = Node(**node_create.model_dump())
        db.add(new_node)
        db.commit()
        db.refresh(new_node)
        return new_node

    def create_or_update_node(self, db: Session, node_create: NodeCreate) -> Tuple[Node, bool]:
        """创建或更新节点，返回 (节点对象, 是否为新创建)"""
        existing_node = db.query(Node).filter(Node.node_id == node_create.node_id).first()
        
        if existing_node:
            # 节点已存在，执行更新操作
            self.logger.info(f"节点 {node_create.node_id} 已存在，执行更新操作")
            
            # 获取当前数据
            node_data = node_create.model_dump()
            
            # 更新字段
            for key, value in node_data.items():
                if value is not None and hasattr(existing_node, key):
                    # 对于需要JSON序列化的字段进行特殊处理
                    if key in ['running_services', 'deployed_models'] and isinstance(value, (list, dict)):
                        setattr(existing_node, key, json.dumps(value))
                    else:
                        setattr(existing_node, key, value)
            
            existing_node.updated_at = datetime.utcnow()
            db.commit()
            db.refresh(existing_node)
            return existing_node, False
        else:
            # 节点不存在，创建新节点
            self.logger.info(f"创建新节点: {node_create.node_id}")
            
            # 对于需要JSON序列化的字段进行处理
            node_data = node_create.model_dump()
            for key in ['running_services', 'deployed_models']:
                if key in node_data and isinstance(node_data[key], (list, dict)):
                    node_data[key] = json.dumps(node_data[key])
            
            new_node = Node(**node_data)
            db.add(new_node)
            db.commit()
            db.refresh(new_node)
            return new_node, True
    
    def list_nodes(
        self, db: Session, page: int, page_size: int, 
        node_type: Optional[str] = None, 
        status: Optional[str] = None, 
        location: Optional[str] = None
    ) -> Tuple[List[Node], int]:
        """获取节点列表，支持过滤和分页"""
        query = db.query(Node)
        
        # 应用过滤条件
        if node_type:
            query = query.filter(Node.node_type == node_type)
        if status:
            query = query.filter(Node.status == status)
        if location:
            query = query.filter(func.lower(Node.location).contains(location.lower()))
            
        # 获取总数
        total_count = query.count()
        
        # 应用分页
        nodes = query.offset((page - 1) * page_size).limit(page_size).all()
        
        return nodes, total_count

    def get_node_by_id(self, db: Session, node_id: str) -> Optional[Node]:
        """根据ID获取节点对象"""
        node = db.query(Node).filter(Node.node_id == node_id).first()
        if not node:
            raise NotFoundException(f"Node with id {node_id} not found.")
        return node

    def get_node_by_name(self, db: Session, name: str) -> Optional[Node]:
        """根据名称获取节点对象"""
        return db.query(Node).filter(Node.name == name).first()

    def update_node(self, db: Session, node_id: str, node_update: NodeUpdate) -> Optional[Node]:
        """更新节点信息"""
        node = self.get_node_by_id(db, node_id)
        
        update_data = node_update.model_dump(exclude_unset=True)
        for key, value in update_data.items():
            # 对于需要JSON序列化的字段进行特殊处理
            if key in ['running_services', 'deployed_models'] and isinstance(value, (list, dict)):
                setattr(node, key, json.dumps(value))
            elif value is not None:
                setattr(node, key, value)
        
        node.updated_at = datetime.utcnow()
        db.commit()
        db.refresh(node)
        return node

    def update_node_status(self, db: Session, node_id: str, update_data: Dict) -> Optional[Node]:
        """更新节点状态和配置 (通常由心跳或内部服务调用)"""
        node = self.get_node_by_id(db, node_id)
        if not node:
            return None
        
        for field, value in update_data.items():
            if value is not None and hasattr(node, field):
                if field in ['running_services', 'deployed_models'] and isinstance(value, (list, dict)):
                    setattr(node, field, json.dumps(value))
                else:
                    setattr(node, field, value)
        
        node.updated_at = datetime.utcnow()
        db.commit()
        db.refresh(node)
        return node
    
    def reload_nodes(self, db: Session) -> Dict:
        """重新加载节点配置"""
        self.initialize_nodes_from_config(db)
        # 可以返回一些统计信息
        count = db.query(Node).count()
        return {"message": "节点配置重新加载成功", "total_nodes": count}

    async def get_edge_samples_info(self, db: Session, node_id: str) -> Optional[Dict]:
        """向边侧节点请求样本信息"""
        node = self.get_node_by_id(db, node_id)
        if node.node_type != 'edge':
            raise ValidationException(f"Node {node_id} is not an edge node.")
        
        # 构建边侧节点URL
        edge_url = f"http://{node.service_ip}"
        if ':' not in (node.service_ip or ""):
            edge_url = f"http://{node.service_ip}:8001"
        
        try:
            # 使用同步客户端进行请求
            client = HTTPClient(edge_url, timeout=30)
            success, result = client.get("samples/info")
            
            if success:
                self.logger.info(f"成功获取边侧节点样本信息: {node_id}")
                return result
            else:
                self.logger.error(f"获取边侧节点样本信息失败: {node_id}, {result}")
                raise ValidationException(f"Failed to get sample info from edge node {node_id}: {result}")
        except Exception as e:
            self.logger.error(f"请求边侧样本信息异常: {str(e)}")
            raise ValidationException(f"Exception while requesting sample info from {node_id}: {e}")

    async def aggregate_edge_samples(
        self, db: Session, node_id: str, aggregation_type: str, limit: int, 
        aggregation_params: Optional[Dict] = None
    ) -> Optional[Dict]:
        """请求边侧数据聚合（新策略：触发边侧打包）"""
        node = self.get_node_by_id(db, node_id)
        if node.node_type != 'edge':
            raise ValidationException(f"Node {node_id} is not an edge node.")
            
        edge_url = f"http://{node.service_ip}"
        if ':' not in (node.service_ip or ""):
            edge_url = f"http://{node.service_ip}:8001"
        
        try:
            # 使用同步客户端进行请求
            client = HTTPClient(edge_url, timeout=120)
            
            # 构建请求数据，使用新的样本聚合接口参数
            request_data = {
                "max_samples": limit
            }
            
            # 添加可选的聚合参数
            if aggregation_params:
                if aggregation_params.get("difficulty_threshold") is not None:
                    request_data["difficulty_threshold"] = aggregation_params["difficulty_threshold"]
                if aggregation_params.get("time_start"):
                    request_data["time_start"] = aggregation_params["time_start"]
                if aggregation_params.get("time_end"):
                    request_data["time_end"] = aggregation_params["time_end"]
            
            # 调用边侧的新样本聚合接口
            success, result = client.post("samples/aggregate", json_data=request_data)
            
            if success:
                self.logger.info(f"边侧节点数据聚合请求成功: {node_id}")
                return result
            else:
                self.logger.error(f"边侧节点数据聚合请求失败: {node_id}, {result}")
                return {"success": False, "message": f"边侧聚合失败: {result}"}
        except Exception as e:
            self.logger.error(f"请求边侧数据聚合异常: {str(e)}")
            return {"success": False, "message": f"请求异常: {str(e)}"}

    async def deploy_model_to_edge(
        self, db: Session, node_id: str, model_id: str, deployment_config: dict
    ) -> Optional[Dict]:
        """部署模型到边侧"""
        # (此处的逻辑与 model_service.transfer_model 类似，可以考虑未来合并)
        from services.model_service import ModelService
        model_service = ModelService()
        
        # 1. 获取模型信息
        model = model_service.get_model_by_id(db, model_id)
        if not model:
            raise ValueError(f"Model with id {model_id} not found.")
        
        # 2. 触发模型传输
        node = self.get_node_by_id(db, node_id)
        result = model_service.transfer_model(db, model_id, node.name)
        
        # 3. (可选) 可以在此处增加额外的部署配置逻辑
        self.logger.info(f"模型 {model_id} 部署任务已启动，目标节点: {node_id}")
        
        return result

    def get_nodes_summary(self, db: Session, node_type: Optional[str] = None, 
                         status: Optional[str] = None, location: Optional[str] = None) -> dict:
        """获取节点统计信息"""
        query = db.query(Node)
        
        # 应用过滤条件
        if node_type:
            query = query.filter(Node.node_type == node_type)
        if status:
            query = query.filter(Node.status == status)
        if location:
            query = query.filter(func.lower(Node.location).contains(location.lower()))
        
        # 统计总数
        total_count = query.count()
        
        # 按类型统计
        type_counts = db.query(Node.node_type, func.count()).group_by(Node.node_type).all()
        type_summary = {node_type: count for node_type, count in type_counts}
        
        # 按状态统计
        status_query = db.query(Node.status, func.count()).group_by(Node.status)
        if node_type:
            status_query = status_query.filter(Node.node_type == node_type)
        status_counts = status_query.all()
        status_summary = {status: count for status, count in status_counts}
        
        return {
            "total": total_count,
            "by_type": type_summary,
            "by_status": status_summary
        }

    @register_batch_edge_task("health_check")
    async def batch_update_edge_nodes_status(self, db: Session, node_ids: Optional[List[str]] = None) -> Dict:
        """批量更新边侧节点状态"""
        try:
            # 如果没有指定节点，则更新所有边侧节点
            if node_ids is None:
                edge_nodes = db.query(Node).filter(Node.node_type == 'edge').all()
                node_ids = [node.node_id for node in edge_nodes]
            else:
                edge_nodes = db.query(Node).filter(
                    Node.node_id.in_(node_ids),
                    Node.node_type == 'edge'
                ).all()
                
            if not edge_nodes:
                return {"message": "没有找到边侧节点", "updated_count": 0}
            
            # 批量健康检查
            health_results = await self.edge_manager.batch_health_check(edge_nodes)
            
            updated_count = 0
            online_count = 0
            offline_count = 0
            
            for node_id, is_healthy in health_results.items():
                node = db.query(Node).filter(Node.node_id == node_id).first()
                if node:
                    # 更新节点状态
                    if is_healthy:
                        node.status = "online"
                        online_count += 1
                    else:
                        node.status = "offline"
                        offline_count += 1
                    
                    node.updated_at = datetime.utcnow()
                    updated_count += 1
            
            db.commit()
            
            return {
                "message": "边侧节点状态更新完成",
                "total_nodes": len(node_ids),
                "updated_count": updated_count,
                "online_count": online_count,
                "offline_count": offline_count,
                "results": health_results
            }
            
        except Exception as e:
            self.logger.error(f"批量更新边侧节点状态失败: {str(e)}")
            db.rollback()
            raise ValidationException(f"批量更新边侧节点状态失败: {str(e)}")

    @register_batch_edge_task("batch_operation")
    async def get_all_edge_nodes_with_samples_info(self, db: Session) -> Dict:
        """获取所有边侧节点及其样本信息"""
        try:
            # 先更新节点状态
            await self.batch_update_edge_nodes_status(db)
            
            # 获取所有边侧节点
            edge_nodes = db.query(Node).filter(Node.node_type == 'edge').all()
            
            if not edge_nodes:
                return {
                    "message": "没有找到边侧节点",
                    "nodes": [],
                    "total_count": 0
                }
            
            # 只对在线节点获取样本信息
            # online_nodes = [node for node in edge_nodes if node.status == "online"]
            online_nodes = edge_nodes
            
            samples_results = None
            if online_nodes:
                # 批量获取样本信息
                node_ids = [node.node_id for node in online_nodes]
                batch_result = await self.edge_manager.batch_samples_info(db, node_ids)
                samples_results = batch_result.results if hasattr(batch_result, 'results') else {}
            
            # 组装返回数据
            nodes_info = []
            for node in edge_nodes:
                node_data = {
                    "node_id": node.node_id,
                    "name": node.name,
                    "location": node.location,
                    "status": node.status,
                    "service_ip": node.service_ip,
                    "cpu": node.cpu,
                    "memory": node.memory,
                    "gpu": node.gpu,
                    "bandwidth": node.bandwidth,
                    "cpu_load": node.cpu_load,
                    "memory_load": node.memory_load,
                    "network_load": node.network_load,
                    "collected_sample_count": node.collected_sample_count,
                    "uncollected_sample_count": node.uncollected_sample_count,
                    "created_at": node.created_at.isoformat() if node.created_at else None,
                    "updated_at": node.updated_at.isoformat() if node.updated_at else None,
                }
                
                # 解析running_services（JSON格式）
                if node.running_services:
                    try:
                        node_data["running_services"] = json.loads(node.running_services)
                    except (json.JSONDecodeError, TypeError):
                        node_data["running_services"] = []
                else:
                    node_data["running_services"] = []
                
                # 解析deployed_models（JSON格式）
                if node.deployed_models:
                    try:
                        node_data["deployed_models"] = json.loads(node.deployed_models)
                    except (json.JSONDecodeError, TypeError):
                        node_data["deployed_models"] = []
                else:
                    node_data["deployed_models"] = []
                
                # 添加样本信息
                if node.status == "offline":
                    node_data["samples_info"] = {"error": "节点离线"}
                elif samples_results and node.node_id in samples_results:
                    sample_result = samples_results[node.node_id]
                    if sample_result and sample_result.get("success"):
                        # 从批量操作结果中提取样本信息
                        sample_info_data = sample_result.get("data", {})
                        node_data.update({
                            "samples_info": sample_info_data,
                            "total_sample_count": sample_info_data.get("total_sample_count", 0),
                            "uninferred_sample_count": sample_info_data.get("uninferred_sample_count", 0),
                        })
                        
                        # 更新数据库中的样本计数
                        node.collected_sample_count = sample_info_data.get("collected_sample_count", 0)
                        node.uncollected_sample_count = sample_info_data.get("uncollected_sample_count", 0)
                    else:
                        error_msg = sample_result.get("error", "获取样本信息失败") if sample_result else "获取样本信息失败"
                        node_data["samples_info"] = {"error": error_msg}
                else:
                    node_data["samples_info"] = {"error": "无法连接到边侧节点或节点离线"}
                
                nodes_info.append(node_data)
            
            # 提交数据库更新
            db.commit()
            
            return {
                "message": "获取边侧节点信息成功",
                "nodes": nodes_info,
                "total_count": len(nodes_info),
                "summary": {
                    "online_nodes": len([n for n in nodes_info if n["status"] == "online"]),
                    "offline_nodes": len([n for n in nodes_info if n["status"] == "offline"]),
                    "total_samples": sum([n.get("total_sample_count", 0) for n in nodes_info]),
                    "total_uncollected_samples": sum([n.get("uncollected_sample_count", 0) for n in nodes_info])
                }
            }
            
        except Exception as e:
            self.logger.error(f"获取边侧节点样本信息失败: {str(e)}")
            raise ValidationException(f"获取边侧节点样本信息失败: {str(e)}")

    @register_batch_edge_task("batch_operation")
    async def get_all_edge_nodes_with_samples_info_paginated(
        self, 
        db: Session, 
        page: int = 1, 
        page_size: int = 20,
        status: Optional[str] = ["online"],
        location: Optional[str] = None
    ) -> Dict:
        """获取所有边侧节点及其样本信息（支持分页和过滤）"""
        try:
            # 先更新节点状态
            await self.batch_update_edge_nodes_status(db)
            
            # 构建查询
            query = db.query(Node).filter(Node.node_type == 'edge')
            
            # 应用过滤条件
            if status:
                query = query.filter(Node.status == status)
            if location:
                query = query.filter(func.lower(Node.location).contains(location.lower()))
            
            # 获取总数
            total_count = query.count()
            
            if total_count == 0:
                return {
                    "message": "没有找到符合条件的边侧节点",
                    "nodes": [],
                    "total_count": 0,
                    "summary": {
                        "online_nodes": 0,
                        "offline_nodes": 0,
                        "total_samples": 0,
                        "total_uncollected_samples": 0
                    }
                }
            
            # 应用分页
            edge_nodes = query.order_by(Node.updated_at.desc()) \
                             .offset((page - 1) * page_size) \
                             .limit(page_size) \
                             .all()
            
            # 只对在线节点获取样本信息（当前页的节点）
            online_nodes = [node for node in edge_nodes if node.status == "online"]
            
            samples_results = None
            if online_nodes:
                # 批量获取样本信息（仅针对当前页的在线节点）
                node_ids = [node.node_id for node in online_nodes]
                batch_result = await self.edge_manager.batch_samples_info(db, node_ids)
                samples_results = batch_result.results if hasattr(batch_result, 'results') else {}
            
            # 组装返回数据
            nodes_info = []
            for node in edge_nodes:
                node_data = {
                    "node_id": node.node_id,
                    "name": node.name,
                    "location": node.location,
                    "status": node.status,
                    "service_ip": node.service_ip,
                    "cpu": node.cpu,
                    "memory": node.memory,
                    "gpu": node.gpu,
                    "bandwidth": node.bandwidth,
                    "cpu_load": node.cpu_load,
                    "memory_load": node.memory_load,
                    "network_load": node.network_load,
                    "collected_sample_count": node.collected_sample_count,
                    "uncollected_sample_count": node.uncollected_sample_count,
                    "created_at": node.created_at.isoformat() if node.created_at else None,
                    "updated_at": node.updated_at.isoformat() if node.updated_at else None,
                }
                
                # 解析running_services（JSON格式）
                if node.running_services:
                    try:
                        node_data["running_services"] = json.loads(node.running_services)
                    except (json.JSONDecodeError, TypeError):
                        node_data["running_services"] = []
                else:
                    node_data["running_services"] = []
                
                # 解析deployed_models（JSON格式）
                if node.deployed_models:
                    try:
                        node_data["deployed_models"] = json.loads(node.deployed_models)
                    except (json.JSONDecodeError, TypeError):
                        node_data["deployed_models"] = []
                else:
                    node_data["deployed_models"] = []
                
                # 添加样本信息
                if node.status == "offline":
                    node_data["samples_info"] = {"error": "节点离线"}
                elif samples_results and node.node_id in samples_results:
                    sample_result = samples_results[node.node_id]
                    if sample_result and sample_result.get("success"):
                        # 从批量操作结果中提取样本信息
                        sample_info_data = sample_result.get("data", {})
                        node_data.update({
                            "samples_info": sample_info_data,
                            "total_sample_count": sample_info_data.get("total_sample_count", 0),
                            "uninferred_sample_count": sample_info_data.get("uninferred_sample_count", 0),
                        })
                        
                        # 更新数据库中的样本计数
                        node.collected_sample_count = sample_info_data.get("collected_sample_count", 0)
                        node.uncollected_sample_count = sample_info_data.get("uncollected_sample_count", 0)
                    else:
                        error_msg = sample_result.get("error", "获取样本信息失败") if sample_result else "获取样本信息失败"
                        node_data["samples_info"] = {"error": error_msg}
                else:
                    node_data["samples_info"] = {"error": "无法连接到边侧节点或节点离线"}
                
                nodes_info.append(node_data)
            
            # 计算全局统计（基于所有符合条件的节点，不仅仅是当前页）
            all_matching_nodes = db.query(Node).filter(Node.node_type == 'edge')
            if status:
                all_matching_nodes = all_matching_nodes.filter(Node.status == status)
            if location:
                all_matching_nodes = all_matching_nodes.filter(func.lower(Node.location).contains(location.lower()))
            
            all_nodes = all_matching_nodes.all()
            
            summary = {
                "online_nodes": len([n for n in all_nodes if n.status == "online"]),
                "offline_nodes": len([n for n in all_nodes if n.status == "offline"]),
                "total_samples": sum([n.collected_sample_count or 0 for n in all_nodes]),
                "total_uncollected_samples": sum([n.uncollected_sample_count or 0 for n in all_nodes])
            }
            
            # 提交数据库更新
            db.commit()
            
            return {
                "message": "获取边侧节点信息成功",
                "nodes": nodes_info,
                "total_count": total_count,
                "summary": summary
            }
            
        except Exception as e:
            self.logger.error(f"获取边侧节点样本信息失败: {str(e)}")
            raise ValidationException(f"获取边侧节点样本信息失败: {str(e)}")

    @register_sample_collection_task()
    async def collect_samples_from_edge(
        self, 
        db: Session, 
        node_id: str, 
        aggregation_params: Dict
    ) -> Dict:
        """从指定边侧节点收集样本"""
        try:
            # 验证节点存在且为边侧节点
            node = self.get_node_by_id(db, node_id)
            if node.node_type != 'edge':
                raise ValidationException(f"节点 {node_id} 不是边侧节点")
            
            # 检查节点状态
            if node.status == "offline":
                raise ValidationException(f"节点 {node_id} 处于离线状态，无法进行样本收集")
            
            # 使用边侧管理器进行样本收集
            result = await self.edge_manager.coordinated_sample_aggregation(
                db, [node_id], aggregation_params
            )
            
            # 检查结果中是否包含连接错误，如果是则更新节点状态
            node_result = result.get("results", {}).get(node_id, {})
            if not node_result.get("success", False):
                error_msg = node_result.get("error", "")
                if any(err in error_msg.lower() for err in ["timed out", "connection", "timeout", "offline"]):
                    # 更新节点状态为离线
                    node.status = "offline"
                    node.updated_at = datetime.utcnow()
                    db.commit()
                    self.logger.warning(f"节点 {node_id} 在样本收集过程中连接失败，已设置为离线状态")
            
            self.logger.info(f"边侧节点 {node_id} 样本收集完成")
            return result
            
        except Exception as e:
            self.logger.error(f"从边侧节点 {node_id} 收集样本失败: {str(e)}")
            raise ValidationException(f"从边侧节点收集样本失败: {str(e)}")
    
    async def download_and_process_edge_samples(
        self, 
        db: Session, 
        node_id: str, 
        scenario: str,
        aggregation_params: Dict,
        base_dataset_id: Optional[str] = None
    ) -> Dict:
        """下载边侧压缩包并处理为数据集"""
        import requests
        import zipfile
        import tempfile
        import shutil
        from pathlib import Path
        
        try:
            # 验证节点存在
            node = self.get_node_by_id(db, node_id)
            if node.node_type != 'edge':
                raise ValidationException(f"节点 {node_id} 不是边侧节点")
            
            # 1. 获取边侧压缩包列表
            edge_url = f"http://{node.service_ip}"
            if ':' not in (node.service_ip or ""):
                edge_url = f"http://{node.service_ip}:8001"
            packages_url = f"{edge_url}/samples/packages"
            
            try:
                response = requests.get(packages_url, timeout=30)
                response.raise_for_status()
                packages_info = response.json()
            except requests.RequestException as e:
                self.logger.error(f"获取边侧压缩包列表失败: {str(e)}")
                return {"success": False, "error": f"获取压缩包列表失败: {str(e)}"}
            
            packages = packages_info.get("packages", [])
            if not packages:
                return {"success": False, "error": "边侧没有可用的压缩包"}
            
            # 2. 选择最新的压缩包
            latest_package = packages[0]  # 已按时间降序排列
            package_filename = latest_package["filename"]
            
            self.logger.info(f"选择压缩包: {package_filename}")
            
            # 3. 下载压缩包
            download_url = f"{edge_url}/samples/packages/{package_filename}"
            
            with tempfile.TemporaryDirectory() as temp_dir:
                temp_path = Path(temp_dir)
                zip_path = temp_path / package_filename
                
                try:
                    # 下载文件
                    with requests.get(download_url, timeout=60, stream=True) as r:
                        r.raise_for_status()
                        with open(zip_path, 'wb') as f:
                            for chunk in r.iter_content(chunk_size=8192):
                                f.write(chunk)
                    
                    self.logger.info(f"压缩包下载完成: {zip_path}")
                    
                except requests.RequestException as e:
                    self.logger.error(f"下载压缩包失败: {str(e)}")
                    return {"success": False, "error": f"下载压缩包失败: {str(e)}"}
                
                # 4. 解压并处理样本
                extract_dir = temp_path / "extracted"
                extract_dir.mkdir()
                
                try:
                    with zipfile.ZipFile(zip_path, 'r') as zip_ref:
                        zip_ref.extractall(extract_dir)
                    
                    self.logger.info(f"压缩包解压完成: {extract_dir}")
                    
                    # 检查解压后的目录结构
                    images_dir = extract_dir / "images"
                    labels_dir = extract_dir / "labels"
                    confidence_dir = extract_dir / "confidence"
                    
                    if not images_dir.exists():
                        return {"success": False, "error": "压缩包中缺少images目录"}
                    
                    # 统计样本数量
                    image_files = list(images_dir.glob("*.[jJ][pP][gG]")) + \
                                 list(images_dir.glob("*.[pP][nN][gG]")) + \
                                 list(images_dir.glob("*.[jJ][pP][eE][gG]"))
                    
                    sample_count = len(image_files)
                    
                    if sample_count == 0:
                        return {"success": False, "error": "压缩包中没有有效的图像文件"}
                    
                except zipfile.BadZipFile as e:
                    self.logger.error(f"解压压缩包失败: {str(e)}")
                    return {"success": False, "error": f"解压压缩包失败: {str(e)}"}
                
                # 5. 创建数据集
                try:
                    from services.dataset_service import DatasetService
                    dataset_service = DatasetService()
                    
                    # 构建样本信息
                    samples_info = {
                        "node_id": node_id,
                        "sample_count": sample_count,
                        "total_sample_count": sample_count,
                        "package_filename": package_filename,
                        "request_params": aggregation_params,
                        "selected_samples": [f.stem for f in image_files]
                    }
                    
                    # 创建数据集
                    dataset = dataset_service.create_dataset_from_edge_samples(
                        db, node_id, scenario, samples_info, base_dataset_id
                    )
                    
                    # 6. 复制样本文件到数据集目录
                    dataset_folder = Path(dataset.folder_path)
                    dataset_images_dir = dataset_folder / "images"
                    dataset_labels_dir = dataset_folder / "labels"
                    
                    # 复制图像文件
                    for image_file in image_files:
                        shutil.copy2(image_file, dataset_images_dir / image_file.name)
                    
                    # 复制标签文件
                    if labels_dir.exists():
                        for label_file in labels_dir.glob("*.txt"):
                            target_file = dataset_labels_dir / label_file.name
                            shutil.copy2(label_file, target_file)
                    
                    # 复制置信度文件（可选）
                    if confidence_dir.exists():
                        confidence_target_dir = dataset_folder / "confidence"
                        confidence_target_dir.mkdir(exist_ok=True)
                        for conf_file in confidence_dir.glob("*.txt"):
                            shutil.copy2(conf_file, confidence_target_dir / conf_file.name)
                    
                    # 7. 更新数据集状态
                    dataset_service.update_dataset_processing_status(
                        db, dataset.dataset_id, "active", 
                        sample_count=sample_count,
                        file_count=sample_count * 2  # 图像 + 标签
                    )
                    
                    # 8. 删除边侧的压缩包
                    try:
                        delete_url = f"{edge_url}/samples/packages/{package_filename}"
                        delete_response = requests.delete(delete_url, timeout=30)
                        if delete_response.status_code == 200:
                            self.logger.info(f"已删除边侧压缩包: {package_filename}")
                        else:
                            self.logger.warning(f"删除边侧压缩包失败: {delete_response.status_code}")
                    except Exception as e:
                        self.logger.warning(f"删除边侧压缩包时出错: {str(e)}")
                    
                    return {
                        "success": True,
                        "message": f"成功处理 {sample_count} 个样本",
                        "dataset": {
                            "dataset_id": dataset.dataset_id,
                            "name": dataset.name,
                            "folder_path": dataset.folder_path,
                            "sample_count": sample_count,
                            "status": "active"
                        },
                        "package_info": {
                            "filename": package_filename,
                            "size_mb": latest_package.get("size_mb", 0),
                            "processed_samples": sample_count
                        }
                    }
                    
                except Exception as e:
                    self.logger.error(f"创建数据集失败: {str(e)}")
                    return {"success": False, "error": f"创建数据集失败: {str(e)}"}
            
        except Exception as e:
            self.logger.error(f"下载和处理边侧样本失败: {str(e)}")
            return {"success": False, "error": f"处理失败: {str(e)}"}

    def generate_mock_monitoring_data(self, node) -> NodeMonitoringInfo:
        """为节点生成模拟监控数据"""
        
        # 根据节点类型生成不同的模拟数据
        if node.node_type == "edge":
            # 边缘节点的模拟数据
            cpu_data = NodeCPUInfo(
                usage=random.uniform(20, 80),
                cores=random.choice([4, 8, 16]),
                temperature=random.uniform(35, 75)
            )
            memory_data = NodeMemoryInfo(
                used=random.uniform(8, 48),
                total=random.choice([16, 32, 64]),
                usage=0  # 会在下面计算
            )
            gpu_data = NodeGPUInfo(
                usage=random.uniform(30, 90),
                memory_used=random.uniform(4, 20),
                memory_total=random.choice([8, 16, 24]),
                temperature=random.uniform(45, 85),
                count=random.choice([1, 2])
            )
            network_data = NodeNetworkInfo(
                bandwidth_in=random.uniform(50, 200),
                bandwidth_out=random.uniform(20, 100),
                latency=random.uniform(5, 20)
            )
            storage_data = NodeStorageInfo(
                used=random.uniform(100, 400),
                total=random.choice([256, 512, 1024]),
                usage=0  # 会在下面计算
            )
        else:
            # 云端节点的模拟数据
            cpu_data = NodeCPUInfo(
                usage=random.uniform(10, 60),
                cores=random.choice([16, 32, 64]),
                temperature=random.uniform(30, 65)
            )
            memory_data = NodeMemoryInfo(
                used=random.uniform(32, 128),
                total=random.choice([64, 128, 256]),
                usage=0  # 会在下面计算
            )
            gpu_data = NodeGPUInfo(
                usage=random.uniform(20, 70),
                memory_used=random.uniform(8, 40),
                memory_total=random.choice([16, 32, 48]),
                temperature=random.uniform(40, 80),
                count=random.choice([2, 4, 8])
            )
            network_data = NodeNetworkInfo(
                bandwidth_in=random.uniform(200, 1000),
                bandwidth_out=random.uniform(100, 500),
                latency=random.uniform(1, 10)
            )
            storage_data = NodeStorageInfo(
                used=random.uniform(500, 2000),
                total=random.choice([1024, 2048, 4096]),
                usage=0  # 会在下面计算
            )
        
        # 计算使用率
        memory_data.usage = round((memory_data.used / memory_data.total) * 100, 1)
        storage_data.usage = round((storage_data.used / storage_data.total) * 100, 1)
        
        # 创建扩展的节点信息
        monitoring_info = NodeMonitoringInfo(
            # 从原始节点复制所有字段
            node_id=node.node_id,
            name=node.name,
            node_type=node.node_type,
            location=node.location,
            status=node.status,
            service_ip=node.service_ip,
            cpu=cpu_data,
            memory=memory_data,
            gpu=gpu_data,
            bandwidth=node.bandwidth,
            running_services=node.running_services,
            deployed_models=node.deployed_models,
            collected_sample_count=node.collected_sample_count,
            uncollected_sample_count=node.uncollected_sample_count,
            cpu_load=node.cpu_load,
            memory_load=node.memory_load,
            network_load=node.network_load,
            created_at=node.created_at,
            updated_at=node.updated_at,
            network=network_data,
            storage=storage_data,
            timestamp=datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        )
        
        return monitoring_info

    def get_nodes_monitoring(
        self, db: Session, page: int, page_size: int, 
        node_type: Optional[str] = None, 
        status: Optional[str] = None, 
        location: Optional[str] = None
    ) -> Tuple[List[NodeMonitoringInfo], int, dict]:
        """获取节点监控信息列表"""
        try:
            # 首先获取普通的节点列表
            nodes, total_count = self.list_nodes(
                db, page, page_size, node_type, status, location
            )
            
            # 为每个节点生成监控数据
            monitoring_nodes = []
            for node in nodes:
                monitoring_node = self.generate_mock_monitoring_data(node)
                monitoring_nodes.append(monitoring_node)
            
            # 调用服务层的统计方法
            summary = self.get_nodes_summary(db, node_type, status, location)
            
            # 添加额外的信息到summary中
            summary["next_edge_id"] = f"edge-{summary.get('by_type', {}).get('edge', 0) + 1}"
            summary["next_cloud_id"] = f"cloud-{summary.get('by_type', {}).get('cloud', 0) + 1}"
            
            return monitoring_nodes, total_count, summary
            
        except Exception as e:
            self.logger.error(f"获取节点监控信息失败: {str(e)}")
            raise 