# -*- coding: utf-8 -*-
"""
性能优化模块 - 文件操作和进程检测性能优化
提供缓存、批量操作、异步处理等性能优化功能
"""

import os
import sys
import time
import threading
import hashlib
import pickle
from pathlib import Path
from typing import Dict, List, Any, Optional, Tuple, Set
from collections import defaultdict, OrderedDict
from concurrent.futures import ThreadPoolExecutor, as_completed
import subprocess

# 尝试导入统一日志系统
try:
    from config.unified_logger import get_logger
    logger = get_logger(__name__)
except ImportError:
    import logging
    logging.basicConfig(level=logging.INFO)
    logger = logging.getLogger(__name__)

class FileOperationCache:
    """文件操作缓存"""
    
    def __init__(self, max_size: int = 1000, ttl: int = 300):
        self.max_size = max_size
        self.ttl = ttl  # 缓存生存时间（秒）
        self.cache = OrderedDict()
        self.timestamps = {}
        self.lock = threading.RLock()
        
    def _is_expired(self, key: str) -> bool:
        """检查缓存项是否过期"""
        if key not in self.timestamps:
            return True
        return time.time() - self.timestamps[key] > self.ttl
    
    def _cleanup_expired(self):
        """清理过期的缓存项"""
        current_time = time.time()
        expired_keys = [
            key for key, timestamp in self.timestamps.items()
            if current_time - timestamp > self.ttl
        ]
        for key in expired_keys:
            self.cache.pop(key, None)
            self.timestamps.pop(key, None)
    
    def get(self, key: str) -> Any:
        """获取缓存值"""
        with self.lock:
            if key in self.cache and not self._is_expired(key):
                # 移动到末尾（LRU）
                value = self.cache.pop(key)
                self.cache[key] = value
                return value
            return None
    
    def set(self, key: str, value: Any):
        """设置缓存值"""
        with self.lock:
            # 清理过期项
            self._cleanup_expired()
            
            # 如果缓存已满，删除最旧的项
            if len(self.cache) >= self.max_size:
                oldest_key = next(iter(self.cache))
                self.cache.pop(oldest_key)
                self.timestamps.pop(oldest_key, None)
            
            self.cache[key] = value
            self.timestamps[key] = time.time()
    
    def clear(self):
        """清空缓存"""
        with self.lock:
            self.cache.clear()
            self.timestamps.clear()
    
    def get_stats(self) -> Dict[str, Any]:
        """获取缓存统计信息"""
        with self.lock:
            return {
                'size': len(self.cache),
                'max_size': self.max_size,
                'ttl': self.ttl,
                'hit_ratio': getattr(self, '_hit_count', 0) / max(getattr(self, '_total_count', 1), 1)
            }

class OptimizedFileOperations:
    """优化的文件操作"""
    
    def __init__(self, cache_size: int = 1000, cache_ttl: int = 300):
        self.file_cache = FileOperationCache(cache_size, cache_ttl)
        self.stat_cache = FileOperationCache(cache_size, cache_ttl)
        self.content_cache = FileOperationCache(100, 60)  # 内容缓存较小，TTL较短
        self.batch_operations = []
        self.performance_metrics = defaultdict(list)
        
    def get_file_stat(self, file_path: str, use_cache: bool = True) -> Optional[os.stat_result]:
        """获取文件状态信息（带缓存）"""
        if not use_cache:
            try:
                return os.stat(file_path)
            except (OSError, IOError):
                return None
        
        cache_key = f"stat:{file_path}"
        cached_stat = self.stat_cache.get(cache_key)
        
        if cached_stat is not None:
            return cached_stat
        
        try:
            stat_result = os.stat(file_path)
            self.stat_cache.set(cache_key, stat_result)
            return stat_result
        except (OSError, IOError):
            return None
    
    def file_exists(self, file_path: str, use_cache: bool = True) -> bool:
        """检查文件是否存在（带缓存）"""
        if not use_cache:
            return os.path.exists(file_path)
        
        cache_key = f"exists:{file_path}"
        cached_result = self.file_cache.get(cache_key)
        
        if cached_result is not None:
            return cached_result
        
        result = os.path.exists(file_path)
        self.file_cache.set(cache_key, result)
        return result
    
    def get_file_size(self, file_path: str, use_cache: bool = True) -> int:
        """获取文件大小（带缓存）"""
        stat_result = self.get_file_stat(file_path, use_cache)
        return stat_result.st_size if stat_result else 0
    
    def get_file_mtime(self, file_path: str, use_cache: bool = True) -> float:
        """获取文件修改时间（带缓存）"""
        stat_result = self.get_file_stat(file_path, use_cache)
        return stat_result.st_mtime if stat_result else 0
    
    def read_file_content(self, file_path: str, encoding: str = 'utf-8', use_cache: bool = True) -> Optional[str]:
        """读取文件内容（带缓存）"""
        if not use_cache:
            try:
                with open(file_path, 'r', encoding=encoding) as f:
                    return f.read()
            except (OSError, IOError, UnicodeDecodeError):
                return None
        
        # 生成缓存键（包含文件修改时间）
        mtime = self.get_file_mtime(file_path, use_cache)
        cache_key = f"content:{file_path}:{mtime}"
        
        cached_content = self.content_cache.get(cache_key)
        if cached_content is not None:
            return cached_content
        
        try:
            with open(file_path, 'r', encoding=encoding) as f:
                content = f.read()
            self.content_cache.set(cache_key, content)
            return content
        except (OSError, IOError, UnicodeDecodeError):
            return None
    
    def batch_file_exists(self, file_paths: List[str]) -> Dict[str, bool]:
        """批量检查文件是否存在"""
        start_time = time.time()
        results = {}
        
        # 检查缓存
        uncached_paths = []
        for path in file_paths:
            cache_key = f"exists:{path}"
            cached_result = self.file_cache.get(cache_key)
            if cached_result is not None:
                results[path] = cached_result
            else:
                uncached_paths.append(path)
        
        # 批量检查未缓存的文件
        if uncached_paths:
            with ThreadPoolExecutor(max_workers=min(10, len(uncached_paths))) as executor:
                future_to_path = {
                    executor.submit(os.path.exists, path): path 
                    for path in uncached_paths
                }
                
                for future in as_completed(future_to_path):
                    path = future_to_path[future]
                    try:
                        exists = future.result()
                        results[path] = exists
                        # 缓存结果
                        cache_key = f"exists:{path}"
                        self.file_cache.set(cache_key, exists)
                    except Exception as e:
                        logger.warning(f"检查文件存在性失败: {path}, 错误: {e}")
                        results[path] = False
        
        # 记录性能指标
        elapsed_time = time.time() - start_time
        self.performance_metrics['batch_file_exists'].append({
            'files_count': len(file_paths),
            'elapsed_time': elapsed_time,
            'cache_hits': len(file_paths) - len(uncached_paths)
        })
        
        return results
    
    def batch_file_stats(self, file_paths: List[str]) -> Dict[str, Optional[os.stat_result]]:
        """批量获取文件状态信息"""
        start_time = time.time()
        results = {}
        
        # 检查缓存
        uncached_paths = []
        for path in file_paths:
            cache_key = f"stat:{path}"
            cached_stat = self.stat_cache.get(cache_key)
            if cached_stat is not None:
                results[path] = cached_stat
            else:
                uncached_paths.append(path)
        
        # 批量获取未缓存的文件状态
        if uncached_paths:
            def get_stat(path):
                try:
                    return path, os.stat(path)
                except (OSError, IOError):
                    return path, None
            
            with ThreadPoolExecutor(max_workers=min(10, len(uncached_paths))) as executor:
                futures = [executor.submit(get_stat, path) for path in uncached_paths]
                
                for future in as_completed(futures):
                    try:
                        path, stat_result = future.result()
                        results[path] = stat_result
                        # 缓存结果
                        if stat_result:
                            cache_key = f"stat:{path}"
                            self.stat_cache.set(cache_key, stat_result)
                    except Exception as e:
                        logger.warning(f"获取文件状态失败: 错误: {e}")
        
        # 记录性能指标
        elapsed_time = time.time() - start_time
        self.performance_metrics['batch_file_stats'].append({
            'files_count': len(file_paths),
            'elapsed_time': elapsed_time,
            'cache_hits': len(file_paths) - len(uncached_paths)
        })
        
        return results
    
    def find_files_by_pattern(self, directory: str, pattern: str = "*", 
                            recursive: bool = True, use_cache: bool = True) -> List[str]:
        """按模式查找文件（带缓存）"""
        cache_key = f"find:{directory}:{pattern}:{recursive}"
        
        if use_cache:
            cached_result = self.file_cache.get(cache_key)
            if cached_result is not None:
                return cached_result
        
        start_time = time.time()
        try:
            if recursive:
                files = list(Path(directory).rglob(pattern))
            else:
                files = list(Path(directory).glob(pattern))
            
            file_paths = [str(f) for f in files if f.is_file()]
            
            if use_cache:
                self.file_cache.set(cache_key, file_paths)
            
            # 记录性能指标
            elapsed_time = time.time() - start_time
            self.performance_metrics['find_files'].append({
                'directory': directory,
                'pattern': pattern,
                'files_found': len(file_paths),
                'elapsed_time': elapsed_time
            })
            
            return file_paths
            
        except Exception as e:
            logger.error(f"查找文件失败: {directory}, 模式: {pattern}, 错误: {e}")
            return []
    
    def clear_caches(self):
        """清空所有缓存"""
        self.file_cache.clear()
        self.stat_cache.clear()
        self.content_cache.clear()
        logger.info("文件操作缓存已清空")
    
    def get_performance_report(self) -> Dict[str, Any]:
        """获取性能报告"""
        report = {}
        
        for operation, metrics in self.performance_metrics.items():
            if metrics:
                total_time = sum(m['elapsed_time'] for m in metrics)
                avg_time = total_time / len(metrics)
                
                report[operation] = {
                    'total_calls': len(metrics),
                    'total_time': total_time,
                    'average_time': avg_time,
                    'last_call': metrics[-1] if metrics else None
                }
        
        # 添加缓存统计
        report['cache_stats'] = {
            'file_cache': self.file_cache.get_stats(),
            'stat_cache': self.stat_cache.get_stats(),
            'content_cache': self.content_cache.get_stats()
        }
        
        return report

class OptimizedProcessDetection:
    """优化的进程检测"""
    
    def __init__(self, cache_ttl: int = 30):
        self.process_cache = FileOperationCache(500, cache_ttl)
        self.command_cache = FileOperationCache(200, cache_ttl)
        self.performance_metrics = defaultdict(list)
        
    def get_running_processes(self, use_cache: bool = True) -> List[Dict[str, Any]]:
        """获取运行中的进程列表（带缓存）"""
        cache_key = "running_processes"
        
        if use_cache:
            cached_processes = self.process_cache.get(cache_key)
            if cached_processes is not None:
                return cached_processes
        
        start_time = time.time()
        processes = []
        
        try:
            # 使用tasklist命令（Windows）或ps命令（Linux）
            if sys.platform.startswith('win'):
                result = subprocess.run(
                    ['tasklist', '/fo', 'csv'],
                    capture_output=True, text=True, timeout=10
                )
                if result.returncode == 0:
                    lines = result.stdout.strip().split('\n')
                    if len(lines) > 1:  # 跳过标题行
                        for line in lines[1:]:
                            parts = [p.strip('"') for p in line.split('","')]
                            if len(parts) >= 5:
                                processes.append({
                                    'name': parts[0],
                                    'pid': parts[1],
                                    'session_name': parts[2],
                                    'session_number': parts[3],
                                    'memory_usage': parts[4]
                                })
            else:
                result = subprocess.run(
                    ['ps', 'aux'],
                    capture_output=True, text=True, timeout=10
                )
                if result.returncode == 0:
                    lines = result.stdout.strip().split('\n')
                    if len(lines) > 1:  # 跳过标题行
                        for line in lines[1:]:
                            parts = line.split(None, 10)
                            if len(parts) >= 11:
                                processes.append({
                                    'user': parts[0],
                                    'pid': parts[1],
                                    'cpu': parts[2],
                                    'memory': parts[3],
                                    'command': parts[10]
                                })
        
        except (subprocess.TimeoutExpired, subprocess.CalledProcessError, Exception) as e:
            logger.warning(f"获取进程列表失败: {e}")
        
        if use_cache:
            self.process_cache.set(cache_key, processes)
        
        # 记录性能指标
        elapsed_time = time.time() - start_time
        self.performance_metrics['get_processes'].append({
            'process_count': len(processes),
            'elapsed_time': elapsed_time
        })
        
        return processes
    
    def find_processes_by_name(self, process_name: str, use_cache: bool = True) -> List[Dict[str, Any]]:
        """按名称查找进程（带缓存）"""
        cache_key = f"find_process:{process_name}"
        
        if use_cache:
            cached_result = self.command_cache.get(cache_key)
            if cached_result is not None:
                return cached_result
        
        start_time = time.time()
        matching_processes = []
        
        try:
            all_processes = self.get_running_processes(use_cache)
            
            for process in all_processes:
                if sys.platform.startswith('win'):
                    if process_name.lower() in process.get('name', '').lower():
                        matching_processes.append(process)
                else:
                    if process_name.lower() in process.get('command', '').lower():
                        matching_processes.append(process)
        
        except Exception as e:
            logger.warning(f"查找进程失败: {process_name}, 错误: {e}")
        
        if use_cache:
            self.command_cache.set(cache_key, matching_processes)
        
        # 记录性能指标
        elapsed_time = time.time() - start_time
        self.performance_metrics['find_processes'].append({
            'process_name': process_name,
            'matches_found': len(matching_processes),
            'elapsed_time': elapsed_time
        })
        
        return matching_processes
    
    def is_process_running(self, process_name: str, use_cache: bool = True) -> bool:
        """检查进程是否运行（带缓存）"""
        processes = self.find_processes_by_name(process_name, use_cache)
        return len(processes) > 0
    
    def batch_process_check(self, process_names: List[str]) -> Dict[str, bool]:
        """批量检查进程是否运行"""
        start_time = time.time()
        results = {}
        
        # 获取所有进程（一次性）
        all_processes = self.get_running_processes(use_cache=True)
        
        # 构建进程名称集合（用于快速查找）
        if sys.platform.startswith('win'):
            running_names = {p.get('name', '').lower() for p in all_processes}
        else:
            running_commands = {p.get('command', '').lower() for p in all_processes}
        
        # 检查每个目标进程
        for process_name in process_names:
            name_lower = process_name.lower()
            if sys.platform.startswith('win'):
                results[process_name] = any(name_lower in name for name in running_names)
            else:
                results[process_name] = any(name_lower in cmd for cmd in running_commands)
        
        # 记录性能指标
        elapsed_time = time.time() - start_time
        self.performance_metrics['batch_process_check'].append({
            'process_count': len(process_names),
            'elapsed_time': elapsed_time
        })
        
        return results
    
    def clear_cache(self):
        """清空进程缓存"""
        self.process_cache.clear()
        self.command_cache.clear()
        logger.info("进程检测缓存已清空")
    
    def get_performance_report(self) -> Dict[str, Any]:
        """获取性能报告"""
        report = {}
        
        for operation, metrics in self.performance_metrics.items():
            if metrics:
                total_time = sum(m['elapsed_time'] for m in metrics)
                avg_time = total_time / len(metrics)
                
                report[operation] = {
                    'total_calls': len(metrics),
                    'total_time': total_time,
                    'average_time': avg_time,
                    'last_call': metrics[-1] if metrics else None
                }
        
        # 添加缓存统计
        report['cache_stats'] = {
            'process_cache': self.process_cache.get_stats(),
            'command_cache': self.command_cache.get_stats()
        }
        
        return report

class PerformanceOptimizer:
    """性能优化器主类"""
    
    def __init__(self):
        self.file_ops = OptimizedFileOperations()
        self.process_detection = OptimizedProcessDetection()
        
    def optimize_file_operations(self, enable_cache: bool = True, cache_size: int = 1000):
        """优化文件操作设置"""
        if enable_cache:
            self.file_ops = OptimizedFileOperations(cache_size)
            logger.info(f"文件操作缓存已启用，缓存大小: {cache_size}")
        else:
            self.file_ops.clear_caches()
            logger.info("文件操作缓存已禁用")
    
    def optimize_process_detection(self, enable_cache: bool = True, cache_ttl: int = 30):
        """优化进程检测设置"""
        if enable_cache:
            self.process_detection = OptimizedProcessDetection(cache_ttl)
            logger.info(f"进程检测缓存已启用，TTL: {cache_ttl}秒")
        else:
            self.process_detection.clear_cache()
            logger.info("进程检测缓存已禁用")
    
    def clear_all_caches(self):
        """清空所有缓存"""
        self.file_ops.clear_caches()
        self.process_detection.clear_cache()
        logger.info("所有性能优化缓存已清空")
    
    def get_comprehensive_report(self) -> Dict[str, Any]:
        """获取综合性能报告"""
        return {
            'file_operations': self.file_ops.get_performance_report(),
            'process_detection': self.process_detection.get_performance_report(),
            'timestamp': time.strftime('%Y-%m-%d %H:%M:%S')
        }
    
    def save_performance_report(self, output_path: str = None):
        """保存性能报告"""
        if not output_path:
            output_path = f"performance_report_{int(time.time())}.json"
        
        try:
            import json
            report = self.get_comprehensive_report()
            
            with open(output_path, 'w', encoding='utf-8') as f:
                json.dump(report, f, indent=2, ensure_ascii=False, default=str)
            
            logger.info(f"性能报告已保存到: {output_path}")
            return output_path
            
        except Exception as e:
            logger.error(f"保存性能报告失败: {e}")
            return None

# 全局性能优化器实例
performance_optimizer = PerformanceOptimizer()

# 便捷函数
def get_optimized_file_ops():
    """获取优化的文件操作实例"""
    return performance_optimizer.file_ops

def get_optimized_process_detection():
    """获取优化的进程检测实例"""
    return performance_optimizer.process_detection

if __name__ == "__main__":
    # 性能测试示例
    import argparse
    
    parser = argparse.ArgumentParser(description="性能优化模块测试")
    parser.add_argument('--test-files', action='store_true', help="测试文件操作性能")
    parser.add_argument('--test-processes', action='store_true', help="测试进程检测性能")
    parser.add_argument('--report', action='store_true', help="生成性能报告")
    
    args = parser.parse_args()
    
    if args.test_files:
        print("测试文件操作性能...")
        file_ops = get_optimized_file_ops()
        
        # 测试批量文件检查
        test_files = [f"test_file_{i}.txt" for i in range(100)]
        start_time = time.time()
        results = file_ops.batch_file_exists(test_files)
        elapsed = time.time() - start_time
        print(f"批量检查100个文件耗时: {elapsed:.3f}秒")
    
    if args.test_processes:
        print("测试进程检测性能...")
        proc_detection = get_optimized_process_detection()
        
        # 测试进程检测
        start_time = time.time()
        processes = proc_detection.get_running_processes()
        elapsed = time.time() - start_time
        print(f"获取进程列表耗时: {elapsed:.3f}秒，找到 {len(processes)} 个进程")
    
    if args.report:
        print("生成性能报告...")
        report_path = performance_optimizer.save_performance_report()
        if report_path:
            print(f"性能报告已保存到: {report_path}")