package logger

import (
	"encoding/json"
	"fmt"
	"os"
	"path/filepath"
	"sort"
	"strings"
	"time"
)

// DelayAnalyzer 延迟分析器
type DelayAnalyzer struct {
	logDir string
}

// NewDelayAnalyzer 创建延迟分析器
func NewDelayAnalyzer(logDir string) *DelayAnalyzer {
	return &DelayAnalyzer{
		logDir: logDir,
	}
}

// DelayStats 延迟统计
type DelayStats struct {
	Component   string        `json:"component"`
	Count       int           `json:"count"`
	MinDelay    time.Duration `json:"min_delay_ms"`
	MaxDelay    time.Duration `json:"max_delay_ms"`
	AvgDelay    time.Duration `json:"avg_delay_ms"`
	MedianDelay time.Duration `json:"median_delay_ms"`
	P95Delay    time.Duration `json:"p95_delay_ms"`
	P99Delay    time.Duration `json:"p99_delay_ms"`
}

// AnalyzeDelays 分析延迟数据
func (da *DelayAnalyzer) AnalyzeDelays() ([]DelayStats, error) {
	// 读取所有日志文件
	logFiles, err := filepath.Glob(filepath.Join(da.logDir, "*.log"))
	if err != nil {
		return nil, fmt.Errorf("failed to find log files: %v", err)
	}

	// 收集所有延迟数据
	allDelays := make(map[string][]time.Duration)
	
	for _, logFile := range logFiles {
		delays, err := da.parseLogFile(logFile)
		if err != nil {
			fmt.Printf("Warning: failed to parse %s: %v\n", logFile, err)
			continue
		}
		
		for component, componentDelays := range delays {
			allDelays[component] = append(allDelays[component], componentDelays...)
		}
	}

	// 计算统计信息
	var stats []DelayStats
	for component, delays := range allDelays {
		if len(delays) == 0 {
			continue
		}

		stat := DelayStats{
			Component: component,
			Count:     len(delays),
		}

		// 排序延迟数据
		sort.Slice(delays, func(i, j int) bool {
			return delays[i] < delays[j]
		})

		// 计算统计值
		stat.MinDelay = delays[0]
		stat.MaxDelay = delays[len(delays)-1]

		// 平均延迟
		var total time.Duration
		for _, delay := range delays {
			total += delay
		}
		stat.AvgDelay = total / time.Duration(len(delays))

		// 中位数
		if len(delays)%2 == 0 {
			stat.MedianDelay = (delays[len(delays)/2-1] + delays[len(delays)/2]) / 2
		} else {
			stat.MedianDelay = delays[len(delays)/2]
		}

		// P95和P99
		stat.P95Delay = delays[int(float64(len(delays))*0.95)]
		stat.P99Delay = delays[int(float64(len(delays))*0.99)]

		stats = append(stats, stat)
	}

	return stats, nil
}

// parseLogFile 解析单个日志文件
func (da *DelayAnalyzer) parseLogFile(filename string) (map[string][]time.Duration, error) {
	file, err := os.Open(filename)
	if err != nil {
		return nil, err
	}
	defer file.Close()

	delays := make(map[string][]time.Duration)
	
	// 这里需要根据实际的日志格式来解析
	// 假设日志格式为JSON，包含延迟信息
	decoder := json.NewDecoder(file)
	
	for {
		var entry map[string]interface{}
		if err := decoder.Decode(&entry); err != nil {
			break // 文件结束或解析错误
		}

		// 提取延迟信息
		if component, ok := entry["component"].(string); ok {
			if delayMs, ok := entry["delay_ms"].(float64); ok {
				delay := time.Duration(delayMs) * time.Millisecond
				delays[component] = append(delays[component], delay)
			}
		}
	}

	return delays, nil
}

// GenerateReport 生成延迟报告
func (da *DelayAnalyzer) GenerateReport() error {
	stats, err := da.AnalyzeDelays()
	if err != nil {
		return err
	}

	// 生成报告文件
	reportFile := filepath.Join(da.logDir, "delay_report.json")
	file, err := os.Create(reportFile)
	if err != nil {
		return err
	}
	defer file.Close()

	encoder := json.NewEncoder(file)
	encoder.SetIndent("", "  ")
	
	report := map[string]interface{}{
		"timestamp": time.Now().Format(time.RFC3339),
		"stats":     stats,
	}

	if err := encoder.Encode(report); err != nil {
		return err
	}

	fmt.Printf("Delay analysis report generated: %s\n", reportFile)
	return nil
}

// PrintStats 打印统计信息
func (da *DelayAnalyzer) PrintStats() error {
	stats, err := da.AnalyzeDelays()
	if err != nil {
		return err
	}

	fmt.Println("\n=== 延迟分析报告 ===")
	fmt.Printf("%-15s %-8s %-10s %-10s %-10s %-10s %-10s %-10s\n", 
		"组件", "次数", "最小(ms)", "最大(ms)", "平均(ms)", "中位数(ms)", "P95(ms)", "P99(ms)")
	fmt.Println(strings.Repeat("-", 100))

	for _, stat := range stats {
		fmt.Printf("%-15s %-8d %-10.2f %-10.2f %-10.2f %-10.2f %-10.2f %-10.2f\n",
			stat.Component,
			stat.Count,
			float64(stat.MinDelay.Nanoseconds())/1e6,
			float64(stat.MaxDelay.Nanoseconds())/1e6,
			float64(stat.AvgDelay.Nanoseconds())/1e6,
			float64(stat.MedianDelay.Nanoseconds())/1e6,
			float64(stat.P95Delay.Nanoseconds())/1e6,
			float64(stat.P99Delay.Nanoseconds())/1e6)
	}

	return nil
}
