"""
离线功能支持服务

提供移动端离线功能支持，包括：
- 离线数据存储
- 数据同步机制
- 离线操作队列
- 网络状态检测
- 缓存管理
- 离线UI适配
"""
import json
import time
import hashlib
from typing import Dict, List, Any, Optional, Callable
from datetime import datetime, timedelta
from dataclasses import dataclass, field
from enum import Enum
import logging

# 配置日志
logging.basicConfig(level=logging.INFO)
offline_logger = logging.getLogger('offline_service')


class NetworkStatus(Enum):
    """网络状态枚举"""
    ONLINE = "online"
    OFFLINE = "offline"
    SLOW = "slow"
    UNKNOWN = "unknown"


class SyncStatus(Enum):
    """同步状态枚举"""
    PENDING = "pending"
    SYNCING = "syncing"
    SYNCED = "synced"
    FAILED = "failed"
    CONFLICT = "conflict"


class OperationType(Enum):
    """操作类型枚举"""
    CREATE = "create"
    UPDATE = "update"
    DELETE = "delete"
    READ = "read"


@dataclass
class OfflineData:
    """离线数据项"""
    data_id: str
    data_type: str
    content: Dict[str, Any]
    version: int = 1
    created_at: datetime = field(default_factory=datetime.now)
    updated_at: datetime = field(default_factory=datetime.now)
    sync_status: SyncStatus = SyncStatus.PENDING
    last_sync_at: Optional[datetime] = None
    checksum: str = ""
    metadata: Dict[str, Any] = field(default_factory=dict)


@dataclass
class OfflineOperation:
    """离线操作"""
    operation_id: str
    operation_type: OperationType
    data_id: str
    data_type: str
    payload: Dict[str, Any]
    timestamp: datetime = field(default_factory=datetime.now)
    retry_count: int = 0
    max_retries: int = 3
    status: SyncStatus = SyncStatus.PENDING
    error_message: str = ""


@dataclass
class CacheItem:
    """缓存项"""
    cache_key: str
    data: Any
    expires_at: datetime
    access_count: int = 0
    last_accessed: datetime = field(default_factory=datetime.now)
    size_bytes: int = 0


class OfflineService:
    """离线功能支持服务"""
    
    def __init__(self, max_cache_size: int = 50 * 1024 * 1024):  # 50MB
        self.max_cache_size = max_cache_size
        
        # 离线存储
        self.offline_data: Dict[str, OfflineData] = {}
        self.operation_queue: List[OfflineOperation] = []
        self.cache: Dict[str, CacheItem] = {}
        
        # 网络状态
        self.network_status = NetworkStatus.UNKNOWN
        self.last_network_check = datetime.now()
        
        # 同步配置
        self.sync_config = {
            'auto_sync_enabled': True,
            'sync_interval_seconds': 30,
            'max_sync_retries': 3,
            'sync_timeout_seconds': 10,
            'conflict_resolution': 'server_wins',  # server_wins, client_wins, merge
            'batch_sync_size': 10
        }
        
        # 缓存配置
        self.cache_config = {
            'default_ttl_seconds': 3600,  # 1小时
            'max_item_size': 1024 * 1024,  # 1MB
            'cleanup_interval_seconds': 300,  # 5分钟
            'cache_hit_threshold': 0.8
        }
        
        # 离线策略
        self.offline_strategies = {
            'projects': {
                'cache_duration': 7200,  # 2小时
                'sync_priority': 'high',
                'offline_editable': True
            },
            'analyses': {
                'cache_duration': 3600,  # 1小时
                'sync_priority': 'medium',
                'offline_editable': False
            },
            'user_data': {
                'cache_duration': 86400,  # 24小时
                'sync_priority': 'low',
                'offline_editable': True
            }
        }
        
        # 同步回调
        self.sync_callbacks: Dict[str, Callable] = {}
        
        # 启动后台任务
        self._last_cleanup = datetime.now()
        self._last_sync = datetime.now()
    
    def set_network_status(self, status: NetworkStatus):
        """设置网络状态"""
        if self.network_status != status:
            offline_logger.info(f"网络状态变更: {self.network_status.value} -> {status.value}")
            self.network_status = status
            self.last_network_check = datetime.now()
            
            # 网络恢复时触发同步
            if status == NetworkStatus.ONLINE:
                self._trigger_sync()
    
    def store_offline_data(self, data_id: str, data_type: str, 
                          content: Dict[str, Any], 
                          metadata: Optional[Dict[str, Any]] = None) -> bool:
        """存储离线数据"""
        try:
            # 计算校验和
            content_str = json.dumps(content, sort_keys=True, ensure_ascii=False)
            checksum = hashlib.md5(content_str.encode()).hexdigest()
            
            # 检查是否已存在
            if data_id in self.offline_data:
                existing_data = self.offline_data[data_id]
                if existing_data.checksum == checksum:
                    # 数据未变化，只更新访问时间
                    existing_data.updated_at = datetime.now()
                    return True
                
                # 数据已变化，更新版本
                existing_data.content = content
                existing_data.version += 1
                existing_data.updated_at = datetime.now()
                existing_data.checksum = checksum
                existing_data.sync_status = SyncStatus.PENDING
                if metadata:
                    existing_data.metadata.update(metadata)
            else:
                # 创建新数据项
                offline_data = OfflineData(
                    data_id=data_id,
                    data_type=data_type,
                    content=content,
                    checksum=checksum,
                    metadata=metadata or {}
                )
                self.offline_data[data_id] = offline_data
            
            offline_logger.info(f"离线数据存储成功: {data_id}")
            return True
            
        except Exception as e:
            offline_logger.error(f"离线数据存储失败: {str(e)}")
            return False
    
    def get_offline_data(self, data_id: str) -> Optional[OfflineData]:
        """获取离线数据"""
        try:
            if data_id in self.offline_data:
                data = self.offline_data[data_id]
                data.updated_at = datetime.now()
                offline_logger.info(f"获取离线数据: {data_id}")
                return data
            
            return None
            
        except Exception as e:
            offline_logger.error(f"获取离线数据失败: {str(e)}")
            return None
    
    def queue_operation(self, operation_type: OperationType, data_id: str,
                       data_type: str, payload: Dict[str, Any]) -> str:
        """添加操作到队列"""
        try:
            operation_id = f"op_{int(time.time() * 1000)}_{len(self.operation_queue)}"
            
            operation = OfflineOperation(
                operation_id=operation_id,
                operation_type=operation_type,
                data_id=data_id,
                data_type=data_type,
                payload=payload
            )
            
            self.operation_queue.append(operation)
            
            offline_logger.info(f"操作已加入队列: {operation_id} - {operation_type.value}")
            
            # 如果在线，尝试立即同步
            if self.network_status == NetworkStatus.ONLINE:
                self._process_operation_queue()
            
            return operation_id
            
        except Exception as e:
            offline_logger.error(f"操作入队失败: {str(e)}")
            return ""
    
    def _process_operation_queue(self):
        """处理操作队列"""
        if not self.operation_queue or self.network_status != NetworkStatus.ONLINE:
            return
        
        try:
            # 按优先级和时间排序
            pending_operations = [
                op for op in self.operation_queue 
                if op.status == SyncStatus.PENDING and op.retry_count < op.max_retries
            ]
            
            # 批量处理操作
            batch_size = self.sync_config['batch_sync_size']
            for i in range(0, len(pending_operations), batch_size):
                batch = pending_operations[i:i + batch_size]
                self._sync_operation_batch(batch)
            
        except Exception as e:
            offline_logger.error(f"处理操作队列失败: {str(e)}")
    
    def _sync_operation_batch(self, operations: List[OfflineOperation]):
        """同步操作批次"""
        for operation in operations:
            try:
                operation.status = SyncStatus.SYNCING
                
                # 模拟网络请求
                success = self._simulate_network_request(operation)
                
                if success:
                    operation.status = SyncStatus.SYNCED
                    
                    # 更新对应的离线数据同步状态
                    if operation.data_id in self.offline_data:
                        self.offline_data[operation.data_id].sync_status = SyncStatus.SYNCED
                        self.offline_data[operation.data_id].last_sync_at = datetime.now()
                    
                    offline_logger.info(f"操作同步成功: {operation.operation_id}")
                else:
                    operation.status = SyncStatus.FAILED
                    operation.retry_count += 1
                    operation.error_message = "网络请求失败"
                    
                    offline_logger.warning(f"操作同步失败: {operation.operation_id}")
                
            except Exception as e:
                operation.status = SyncStatus.FAILED
                operation.retry_count += 1
                operation.error_message = str(e)
                offline_logger.error(f"操作同步异常: {operation.operation_id} - {str(e)}")
        
        # 清理已完成的操作
        self.operation_queue = [
            op for op in self.operation_queue 
            if op.status not in [SyncStatus.SYNCED] and op.retry_count < op.max_retries
        ]
    
    def _simulate_network_request(self, operation: OfflineOperation) -> bool:
        """模拟网络请求"""
        # 这里应该是实际的网络请求逻辑
        # 为了演示，我们模拟一个成功率
        import random
        return random.random() > 0.1  # 90% 成功率
    
    def cache_data(self, cache_key: str, data: Any, 
                  ttl_seconds: Optional[int] = None) -> bool:
        """缓存数据"""
        try:
            ttl = ttl_seconds or self.cache_config['default_ttl_seconds']
            expires_at = datetime.now() + timedelta(seconds=ttl)
            
            # 计算数据大小
            data_str = json.dumps(data, ensure_ascii=False) if not isinstance(data, str) else data
            size_bytes = len(data_str.encode('utf-8'))
            
            # 检查单项大小限制
            if size_bytes > self.cache_config['max_item_size']:
                offline_logger.warning(f"缓存项过大: {cache_key} - {size_bytes} bytes")
                return False
            
            # 检查总缓存大小
            current_size = sum(item.size_bytes for item in self.cache.values())
            if current_size + size_bytes > self.max_cache_size:
                self._cleanup_cache()
            
            # 添加到缓存
            cache_item = CacheItem(
                cache_key=cache_key,
                data=data,
                expires_at=expires_at,
                size_bytes=size_bytes
            )
            
            self.cache[cache_key] = cache_item
            
            offline_logger.info(f"数据已缓存: {cache_key}")
            return True
            
        except Exception as e:
            offline_logger.error(f"缓存数据失败: {str(e)}")
            return False
    
    def get_cached_data(self, cache_key: str) -> Optional[Any]:
        """获取缓存数据"""
        try:
            if cache_key not in self.cache:
                return None
            
            cache_item = self.cache[cache_key]
            
            # 检查是否过期
            if datetime.now() > cache_item.expires_at:
                del self.cache[cache_key]
                return None
            
            # 更新访问统计
            cache_item.access_count += 1
            cache_item.last_accessed = datetime.now()
            
            offline_logger.info(f"缓存命中: {cache_key}")
            return cache_item.data
            
        except Exception as e:
            offline_logger.error(f"获取缓存失败: {str(e)}")
            return None
    
    def _cleanup_cache(self):
        """清理缓存"""
        try:
            current_time = datetime.now()
            
            # 移除过期项
            expired_keys = [
                key for key, item in self.cache.items()
                if current_time > item.expires_at
            ]
            
            for key in expired_keys:
                del self.cache[key]
            
            # 如果仍然超过大小限制，移除最少使用的项
            current_size = sum(item.size_bytes for item in self.cache.values())
            if current_size > self.max_cache_size:
                # 按访问次数和最后访问时间排序
                sorted_items = sorted(
                    self.cache.items(),
                    key=lambda x: (x[1].access_count, x[1].last_accessed)
                )
                
                # 移除最少使用的项直到大小合适
                for key, item in sorted_items:
                    if current_size <= self.max_cache_size * 0.8:  # 保留20%空间
                        break
                    
                    del self.cache[key]
                    current_size -= item.size_bytes
            
            offline_logger.info(f"缓存清理完成，当前大小: {current_size} bytes")
            
        except Exception as e:
            offline_logger.error(f"缓存清理失败: {str(e)}")
    
    def _trigger_sync(self):
        """触发同步"""
        if self.network_status != NetworkStatus.ONLINE:
            return
        
        try:
            # 处理操作队列
            self._process_operation_queue()
            
            # 同步离线数据
            pending_data = [
                data for data in self.offline_data.values()
                if data.sync_status == SyncStatus.PENDING
            ]
            
            for data in pending_data:
                self._sync_offline_data(data)
            
            self._last_sync = datetime.now()
            offline_logger.info("同步触发完成")
            
        except Exception as e:
            offline_logger.error(f"同步触发失败: {str(e)}")
    
    def _sync_offline_data(self, data: OfflineData):
        """同步离线数据"""
        try:
            data.sync_status = SyncStatus.SYNCING
            
            # 模拟同步请求
            success = self._simulate_data_sync(data)
            
            if success:
                data.sync_status = SyncStatus.SYNCED
                data.last_sync_at = datetime.now()
                offline_logger.info(f"数据同步成功: {data.data_id}")
            else:
                data.sync_status = SyncStatus.FAILED
                offline_logger.warning(f"数据同步失败: {data.data_id}")
            
        except Exception as e:
            data.sync_status = SyncStatus.FAILED
            offline_logger.error(f"数据同步异常: {data.data_id} - {str(e)}")
    
    def _simulate_data_sync(self, data: OfflineData) -> bool:
        """模拟数据同步"""
        # 这里应该是实际的同步逻辑
        import random
        return random.random() > 0.05  # 95% 成功率
    
    def get_offline_status(self) -> Dict[str, Any]:
        """获取离线状态"""
        try:
            # 统计数据
            total_data = len(self.offline_data)
            synced_data = len([d for d in self.offline_data.values() if d.sync_status == SyncStatus.SYNCED])
            pending_data = len([d for d in self.offline_data.values() if d.sync_status == SyncStatus.PENDING])
            
            total_operations = len(self.operation_queue)
            pending_operations = len([op for op in self.operation_queue if op.status == SyncStatus.PENDING])
            
            cache_size = sum(item.size_bytes for item in self.cache.values())
            cache_items = len(self.cache)
            
            return {
                'network_status': self.network_status.value,
                'last_network_check': self.last_network_check.isoformat(),
                'data_storage': {
                    'total_items': total_data,
                    'synced_items': synced_data,
                    'pending_items': pending_data,
                    'sync_rate': synced_data / total_data if total_data > 0 else 0
                },
                'operation_queue': {
                    'total_operations': total_operations,
                    'pending_operations': pending_operations,
                    'queue_length': total_operations
                },
                'cache': {
                    'total_items': cache_items,
                    'total_size_bytes': cache_size,
                    'max_size_bytes': self.max_cache_size,
                    'usage_rate': cache_size / self.max_cache_size
                },
                'sync_config': self.sync_config,
                'last_sync': self._last_sync.isoformat()
            }
            
        except Exception as e:
            offline_logger.error(f"获取离线状态失败: {str(e)}")
            return {'error': str(e)}
    
    def register_sync_callback(self, data_type: str, callback: Callable):
        """注册同步回调"""
        self.sync_callbacks[data_type] = callback
        offline_logger.info(f"同步回调已注册: {data_type}")
    
    def clear_offline_data(self, data_type: Optional[str] = None) -> int:
        """清理离线数据"""
        try:
            if data_type:
                # 清理特定类型的数据
                keys_to_remove = [
                    key for key, data in self.offline_data.items()
                    if data.data_type == data_type
                ]
            else:
                # 清理所有数据
                keys_to_remove = list(self.offline_data.keys())
            
            for key in keys_to_remove:
                del self.offline_data[key]
            
            offline_logger.info(f"清理离线数据: {len(keys_to_remove)} 项")
            return len(keys_to_remove)
            
        except Exception as e:
            offline_logger.error(f"清理离线数据失败: {str(e)}")
            return 0
    
    def export_offline_data(self) -> Dict[str, Any]:
        """导出离线数据"""
        try:
            export_data = {
                'timestamp': datetime.now().isoformat(),
                'network_status': self.network_status.value,
                'data_items': {},
                'operation_queue': [],
                'cache_keys': list(self.cache.keys())
            }
            
            # 导出离线数据
            for data_id, data in self.offline_data.items():
                export_data['data_items'][data_id] = {
                    'data_type': data.data_type,
                    'content': data.content,
                    'version': data.version,
                    'sync_status': data.sync_status.value,
                    'created_at': data.created_at.isoformat(),
                    'updated_at': data.updated_at.isoformat(),
                    'checksum': data.checksum
                }
            
            # 导出操作队列
            for operation in self.operation_queue:
                export_data['operation_queue'].append({
                    'operation_id': operation.operation_id,
                    'operation_type': operation.operation_type.value,
                    'data_id': operation.data_id,
                    'data_type': operation.data_type,
                    'payload': operation.payload,
                    'timestamp': operation.timestamp.isoformat(),
                    'status': operation.status.value,
                    'retry_count': operation.retry_count
                })
            
            offline_logger.info("离线数据导出完成")
            return export_data
            
        except Exception as e:
            offline_logger.error(f"离线数据导出失败: {str(e)}")
            return {'error': str(e)}


# 示例使用
if __name__ == "__main__":
    # 创建离线服务
    offline_service = OfflineService()
    
    print("=== 离线功能支持服务测试 ===")
    
    # 模拟网络状态变化
    print("1. 网络状态测试")
    offline_service.set_network_status(NetworkStatus.OFFLINE)
    print(f"当前网络状态: {offline_service.network_status.value}")
    
    # 存储离线数据
    print("\n2. 离线数据存储测试")
    test_data = {
        "title": "AI研究项目",
        "description": "人工智能在医疗诊断中的应用",
        "progress": 65,
        "collaborators": ["researcher1", "student1"]
    }
    
    success = offline_service.store_offline_data("proj_001", "project", test_data)
    print(f"数据存储结果: {success}")
    
    # 添加操作到队列
    print("\n3. 操作队列测试")
    op_id = offline_service.queue_operation(
        OperationType.UPDATE,
        "proj_001",
        "project",
        {"progress": 70}
    )
    print(f"操作ID: {op_id}")
    
    # 缓存数据
    print("\n4. 缓存测试")
    cache_success = offline_service.cache_data("user_profile", {"name": "研究员", "role": "researcher"})
    print(f"缓存结果: {cache_success}")
    
    cached_data = offline_service.get_cached_data("user_profile")
    print(f"缓存数据: {cached_data}")
    
    # 网络恢复，触发同步
    print("\n5. 同步测试")
    offline_service.set_network_status(NetworkStatus.ONLINE)
    
    # 获取离线状态
    print("\n6. 离线状态")
    status = offline_service.get_offline_status()
    print(json.dumps(status, indent=2, ensure_ascii=False))
    
    # 导出离线数据
    print("\n7. 数据导出")
    export_data = offline_service.export_offline_data()
    print(f"导出数据项数: {len(export_data.get('data_items', {}))}")
    print(f"导出操作数: {len(export_data.get('operation_queue', []))}")