package com.imut.lagain.service;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;

/**
 * 性能监控服务
 */
@Service
public class PerformanceMonitorService {
    private static final Logger log = LoggerFactory.getLogger(PerformanceMonitorService.class);
    
    // 数据库查询统计
    private final Map<String, AtomicLong> databaseQueryCounts = new ConcurrentHashMap<>();
    private final Map<String, AtomicLong> databaseQueryTimes = new ConcurrentHashMap<>();
    private final Map<String, AtomicLong> databaseQueryErrors = new ConcurrentHashMap<>();
    
    // 缓存操作统计
    private final Map<String, AtomicLong> cacheOperationCounts = new ConcurrentHashMap<>();
    private final Map<String, AtomicLong> cacheOperationTimes = new ConcurrentHashMap<>();
    private final Map<String, AtomicLong> cacheOperationErrors = new ConcurrentHashMap<>();
    
    // API调用统计
    private final Map<String, AtomicLong> apiCallCounts = new ConcurrentHashMap<>();
    private final Map<String, AtomicLong> apiCallTimes = new ConcurrentHashMap<>();
    private final Map<String, AtomicLong> apiCallErrors = new ConcurrentHashMap<>();
    
    /**
     * 记录数据库查询性能
     */
    public void recordDatabaseQuery(String operation, long duration, boolean success) {
        databaseQueryCounts.computeIfAbsent(operation, k -> new AtomicLong(0)).incrementAndGet();
        databaseQueryTimes.computeIfAbsent(operation, k -> new AtomicLong(0)).addAndGet(duration);
        
        if (!success) {
            databaseQueryErrors.computeIfAbsent(operation, k -> new AtomicLong(0)).incrementAndGet();
        }
        
        log.debug("Database query recorded: {} - {}ms - {}", operation, duration, success ? "success" : "error");
    }
    
    /**
     * 记录缓存操作性能
     */
    public void recordCacheOperation(String operation, long duration, boolean success) {
        cacheOperationCounts.computeIfAbsent(operation, k -> new AtomicLong(0)).incrementAndGet();
        cacheOperationTimes.computeIfAbsent(operation, k -> new AtomicLong(0)).addAndGet(duration);
        
        if (!success) {
            cacheOperationErrors.computeIfAbsent(operation, k -> new AtomicLong(0)).incrementAndGet();
        }
        
        log.debug("Cache operation recorded: {} - {}ms - {}", operation, duration, success ? "success" : "error");
    }
    
    /**
     * 记录API调用性能
     */
    public void recordApiCall(String endpoint, long duration, boolean success) {
        apiCallCounts.computeIfAbsent(endpoint, k -> new AtomicLong(0)).incrementAndGet();
        apiCallTimes.computeIfAbsent(endpoint, k -> new AtomicLong(0)).addAndGet(duration);
        
        if (!success) {
            apiCallErrors.computeIfAbsent(endpoint, k -> new AtomicLong(0)).incrementAndGet();
        }
        
        log.debug("API call recorded: {} - {}ms - {}", endpoint, duration, success ? "success" : "error");
    }
    
    /**
     * 获取系统性能指标
     */
    public Map<String, Object> getSystemMetrics() {
        Map<String, Object> metrics = new HashMap<>();
        
        // JVM内存信息
        Runtime runtime = Runtime.getRuntime();
        long totalMemory = runtime.totalMemory();
        long freeMemory = runtime.freeMemory();
        long usedMemory = totalMemory - freeMemory;
        long maxMemory = runtime.maxMemory();
        
        Map<String, Object> memoryInfo = new HashMap<>();
        memoryInfo.put("total", totalMemory);
        memoryInfo.put("used", usedMemory);
        memoryInfo.put("free", freeMemory);
        memoryInfo.put("max", maxMemory);
        memoryInfo.put("usagePercent", (double) usedMemory / maxMemory * 100);
        
        metrics.put("memory", memoryInfo);
        metrics.put("processors", runtime.availableProcessors());
        
        return metrics;
    }
    
    /**
     * 获取数据库性能指标
     */
    public Map<String, Object> getDatabaseMetrics() {
        Map<String, Object> metrics = new HashMap<>();
        
        Map<String, Object> queryStats = new HashMap<>();
        for (Map.Entry<String, AtomicLong> entry : databaseQueryCounts.entrySet()) {
            String operation = entry.getKey();
            long count = entry.getValue().get();
            long totalTime = databaseQueryTimes.getOrDefault(operation, new AtomicLong(0)).get();
            long errors = databaseQueryErrors.getOrDefault(operation, new AtomicLong(0)).get();
            
            Map<String, Object> operationStats = new HashMap<>();
            operationStats.put("count", count);
            operationStats.put("totalTime", totalTime);
            operationStats.put("averageTime", count > 0 ? (double) totalTime / count : 0);
            operationStats.put("errors", errors);
            operationStats.put("errorRate", count > 0 ? (double) errors / count * 100 : 0);
            
            queryStats.put(operation, operationStats);
        }
        
        metrics.put("queries", queryStats);
        return metrics;
    }
    
    /**
     * 获取缓存性能指标
     */
    public Map<String, Object> getCacheMetrics() {
        Map<String, Object> metrics = new HashMap<>();
        
        Map<String, Object> operationStats = new HashMap<>();
        for (Map.Entry<String, AtomicLong> entry : cacheOperationCounts.entrySet()) {
            String operation = entry.getKey();
            long count = entry.getValue().get();
            long totalTime = cacheOperationTimes.getOrDefault(operation, new AtomicLong(0)).get();
            long errors = cacheOperationErrors.getOrDefault(operation, new AtomicLong(0)).get();
            
            Map<String, Object> opStats = new HashMap<>();
            opStats.put("count", count);
            opStats.put("totalTime", totalTime);
            opStats.put("averageTime", count > 0 ? (double) totalTime / count : 0);
            opStats.put("errors", errors);
            opStats.put("errorRate", count > 0 ? (double) errors / count * 100 : 0);
            
            operationStats.put(operation, opStats);
        }
        
        metrics.put("operations", operationStats);
        return metrics;
    }
    
    /**
     * 获取API性能指标
     */
    public Map<String, Object> getApiMetrics() {
        Map<String, Object> metrics = new HashMap<>();
        
        Map<String, Object> endpointStats = new HashMap<>();
        for (Map.Entry<String, AtomicLong> entry : apiCallCounts.entrySet()) {
            String endpoint = entry.getKey();
            long count = entry.getValue().get();
            long totalTime = apiCallTimes.getOrDefault(endpoint, new AtomicLong(0)).get();
            long errors = apiCallErrors.getOrDefault(endpoint, new AtomicLong(0)).get();
            
            Map<String, Object> epStats = new HashMap<>();
            epStats.put("count", count);
            epStats.put("totalTime", totalTime);
            epStats.put("averageTime", count > 0 ? (double) totalTime / count : 0);
            epStats.put("errors", errors);
            epStats.put("errorRate", count > 0 ? (double) errors / count * 100 : 0);
            
            endpointStats.put(endpoint, epStats);
        }
        
        metrics.put("endpoints", endpointStats);
        return metrics;
    }
    
    /**
     * 获取完整性能报告
     */
    public Map<String, Object> getPerformanceReport() {
        Map<String, Object> report = new HashMap<>();
        
        report.put("system", getSystemMetrics());
        report.put("database", getDatabaseMetrics());
        report.put("cache", getCacheMetrics());
        report.put("api", getApiMetrics());
        report.put("timestamp", System.currentTimeMillis());
        
        return report;
    }
    
    /**
     * 清理过期性能数据
     */
    public void cleanupExpiredData() {
        log.info("Cleaning up expired performance data...");
        
        // 清理统计数据（简单实现，实际可能需要更复杂的逻辑）
        databaseQueryCounts.clear();
        databaseQueryTimes.clear();
        databaseQueryErrors.clear();
        
        cacheOperationCounts.clear();
        cacheOperationTimes.clear();
        cacheOperationErrors.clear();
        
        apiCallCounts.clear();
        apiCallTimes.clear();
        apiCallErrors.clear();
        
        log.info("Performance data cleanup completed");
    }
}