#!/usr/bin/env python3
# SPDX-License-Identifier: MulanPSL-2.0+
# Copyright (c) 2025 Huawei Technologies Co., Ltd. All rights reserved.

import os
import sys
import csv
import json
import logging
import mysql.connector
from datetime import datetime, timedelta
from collections import defaultdict
import argparse

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s [%(levelname)s] %(message)s',
    handlers=[logging.StreamHandler(sys.stdout)]
)
logger = logging.getLogger('bisect_analysis')

class BisectAnalysis:
    """Bisect 任务批量分析工具"""
    
    def __init__(self, db_config=None):
        # 数据库配置
        self.db_config = db_config or {
            'host': os.getenv('MANTICORE_HOST', 'localhost'),
            'port': int(os.getenv('MANTICORE_PORT', '9306')),
            'user': os.getenv('MANTICORE_USER', ''),
            'password': os.getenv('MANTICORE_PASSWORD', ''),
            'database': os.getenv('MANTICORE_DB', 'jobs')
        }
        self.connection = None
    
    def connect(self):
        """连接数据库"""
        try:
            self.connection = mysql.connector.connect(**self.db_config)
            logger.info(f"数据库连接成功: {self.db_config['host']}:{self.db_config['port']}")
            return True
        except mysql.connector.Error as err:
            logger.error(f"数据库连接失败: {err}")
            return False
    
    def disconnect(self):
        """关闭数据库连接"""
        if self.connection and self.connection.is_connected():
            self.connection.close()
            logger.info("数据库连接已关闭")
    
    def query_bisect_tasks(self, hours=24, status=None, limit=1000):
        """
        查询bisect任务
        
        :param hours: 查询最近多少小时内的任务
        :param status: 筛选任务状态
        :param limit: 返回结果数量限制
        :return: bisect任务列表
        """
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return []
        
        try:
            cursor = self.connection.cursor(dictionary=True)
            
            # 计算时间范围
            cutoff_time = datetime.now() - timedelta(hours=hours)
            unix_cutoff = int(cutoff_time.timestamp())
            
            # 构建查询条件
            conditions = [f"start_time >= {unix_cutoff}"]
            params = []
            
            if status:
                conditions.append("bisect_status = %s")
                params.append(status)
            
            where_clause = " AND ".join(conditions)
            
            # 构建查询
            query = f"""
                SELECT 
                    id, bad_job_id, bisect_status, start_time, end_time, 
                    bad_commit
                FROM bisect
                WHERE {where_clause}
                ORDER BY start_time DESC
                LIMIT %s
            """
            params.append(limit)
            
            logger.debug(f"执行查询: {query}")
            cursor.execute(query, params)
            results = cursor.fetchall()
            
            # 转换时间戳
            for task in results:
                task['start_time'] = datetime.fromtimestamp(task['start_time']).strftime('%Y-%m-%d %H:%M:%S')
                if task['end_time']:
                    task['end_time'] = datetime.fromtimestamp(task['end_time']).strftime('%Y-%m-%d %H:%M:%S')
            
            return results
            
        except mysql.connector.Error as err:
            logger.error(f"查询失败: {err}")
            return []
        finally:
            if cursor:
                cursor.close()
    
    def analyze_tasks(self, tasks):
        """分析bisect任务结果"""
        if not tasks:
            return None
        
        analysis = {
            'total': len(tasks),
            'success_count': 0,
            'failure_count': 0,
            'pending_count': 0,
            'duplicate_tasks': defaultdict(list),
            'duration_stats': [],
            'first_bad_commits': defaultdict(int),
            'status_distribution': defaultdict(int),
            'timeliness': {
                'timeout': 0,    # >1小时
                'normal': 0,     # 10分钟-1小时
                'fast': 0        # <10分钟
            }
        }
        
        # 收集统计信息
        for task in tasks:
            # 状态分布
            analysis['status_distribution'][task['bisect_status']] += 1
            
            # 成功/失败计数
            if task['bisect_status'] == 'success':
                analysis['success_count'] += 1
            elif task['bisect_status'] == 'failure':
                analysis['failure_count'] += 1
            else:
                analysis['pending_count'] += 1
            
            # 重复任务统计
            analysis['duplicate_tasks'][task['bad_job_id']].append(task['id'])
            
            # 首次坏提交统计
            if task['bad_commit']:  # 修复点1：使用 bad_commit 替代 first_bad_commit
                analysis['first_bad_commits'][task['bad_commit']] += 1
            
            # 时效分析
            if task['start_time'] and task['end_time']:
                start = datetime.strptime(task['start_time'], '%Y-%m-%d %H:%M:%S')
                end = datetime.strptime(task['end_time'], '%Y-%m-%d %H:%M:%S')
                duration = (end - start).total_seconds()
                analysis['duration_stats'].append(duration)
                
                # 分类时效
                if duration > 3600:    # 1小时
                    analysis['timeliness']['timeout'] += 1
                elif duration > 600:   # 10分钟
                    analysis['timeliness']['normal'] += 1
                else:
                    analysis['timeliness']['fast'] += 1
        
        # 计算重复率
        duplicate_count = sum(1 for jobs in analysis['duplicate_tasks'].values() if len(jobs) > 1)
        analysis['duplicate_rate'] = duplicate_count / analysis['total'] if analysis['total'] > 0 else 0
        
        # 计算成功率 (排除pending状态)
        completed_tasks = analysis['success_count'] + analysis['failure_count']
        analysis['success_rate'] = analysis['success_count'] / completed_tasks if completed_tasks > 0 else 0
        
        # 计算平均耗时
        if analysis['duration_stats']:
            analysis['avg_duration'] = sum(analysis['duration_stats']) / len(analysis['duration_stats'])
            analysis['min_duration'] = min(analysis['duration_stats'])
            analysis['max_duration'] = max(analysis['duration_stats'])
        
        # 转换为常规字典
        analysis['status_distribution'] = dict(analysis['status_distribution'])
        analysis['first_bad_commits'] = dict(analysis['first_bad_commits'])
        
        return analysis
    
    def calculate_miss_rate(self, bisect_tasks):
        """计算漏检率 - 需要结合jobs表"""
        if not bisect_tasks:
            return 0
        
        try:
            cursor = self.connection.cursor(dictionary=True)
            
            # 获取所有bisect任务报告的坏提交
            reported_bad_commits = set()
            for task in bisect_tasks:
                if task['bad_commit']:  # 修复点2：使用 bad_commit 替代 first_bad_commit
                    reported_bad_commits.add(task['bad_commit'])
            
            # 查询实际坏提交 (从jobs表中获取)
            bad_job_ids = [task['bad_job_id'] for task in bisect_tasks]
            if not bad_job_ids:
                return 0
                
            placeholders = ','.join(['%s'] * len(bad_job_ids))
            
            query = f"""
                SELECT 
                    CASE 
                        WHEN ss IS NOT NULL AND ss->'$.linux.commit' IS NOT NULL 
                            THEN ss->'$.linux.commit'
                        WHEN program IS NOT NULL AND program->'$.makepkg.commit' IS NOT NULL 
                            THEN program->'$.makepkg.commit'
                        ELSE NULL
                    END AS commit
                FROM jobs
                WHERE job_health = 'fail' AND id IN ({placeholders})
            """
            cursor.execute(query, bad_job_ids)
            results = cursor.fetchall()
            
            # 获取实际坏提交
            actual_bad_commits = set()
            for row in results:
                if row['commit']:
                    actual_bad_commits.add(row['commit'])
            
            # 计算漏检率
            detected_count = len(reported_bad_commits & actual_bad_commits)
            actual_count = len(actual_bad_commits)
            
            if actual_count == 0:
                return 0
            
            return (actual_count - detected_count) / actual_count
            
        except mysql.connector.Error as err:
            logger.error(f"漏检率计算失败: {err}")
            return -1
        finally:
            if cursor:
                cursor.close()
    
    def format_analysis(self, analysis, hours=24):
        """格式化分析结果"""
        if not analysis:
            return "无分析结果"
        
        # 成功率计算
        success_rate_percent = analysis.get('success_rate', 0) * 100
        duplicate_rate_percent = analysis.get('duplicate_rate', 0) * 100
        
        # 时效分布
        timeliness = analysis.get('timeliness', {})
        total_completed = timeliness['timeout'] + timeliness['normal'] + timeliness['fast']
        
        output = [
            "=" * 80,
            "Bisect 任务分析报告",
            "=" * 80,
            f"任务总数: {analysis['total']}",
            f"时间范围: 最近{hours}小时",
            "",
            "关键指标:",
            f"  - 成功率: {success_rate_percent:.2f}%",
            f"  - 漏检率: {analysis.get('miss_rate', 0)*100:.2f}%",
            f"  - 重复率: {duplicate_rate_percent:.2f}%",
            "",
            "状态分布:"
        ]
        
        # 添加状态分布
        for status, count in analysis['status_distribution'].items():
            output.append(f"  {status}: {count} ({count/analysis['total']*100:.1f}%)")
        
        # 时效分析
        output.append("\n时效分析:")
        if total_completed > 0:
            output.append(f"  超时(>1小时): {timeliness['timeout']} ({timeliness['timeout']/total_completed*100:.1f}%)")
            output.append(f"  正常(10m-1h): {timeliness['normal']} ({timeliness['normal']/total_completed*100:.1f}%)")
            output.append(f"  快速(<10m): {timeliness['fast']} ({timeliness['fast']/total_completed*100:.1f}%)")
            output.append(f"  平均耗时: {analysis.get('avg_duration', 0)/60:.1f} 分钟")
            output.append(f"  最短耗时: {analysis.get('min_duration', 0)/60:.1f} 分钟")
            output.append(f"  最长耗时: {analysis.get('max_duration', 0)/60:.1f} 分钟")
        else:
            output.append("  无已完成任务")
        
        output.append("\n首次坏提交分布 (前10位):")
        # 添加提交分布 (前10位)
        sorted_commits = sorted(
            analysis['first_bad_commits'].items(), 
            key=lambda x: x[1], 
            reverse=True
        )[:10]
        
        for commit, count in sorted_commits:
            output.append(f"  {commit[:8]}: {count} 次")
        
        output.append("=" * 80)
        return "\n".join(output)
    
    def export_to_csv(self, tasks, filename):
        """导出结果到CSV文件"""
        if not tasks:
            logger.warning("无任务可导出")
            return False
        
        try:
            with open(filename, 'w', newline='', encoding='utf-8') as f:
                fieldnames = [
                    'id', 'bad_job_id', 'status', 'start_time', 'end_time',
                    'bad_commit'
                ]
                
                writer = csv.DictWriter(f, fieldnames=fieldnames)
                writer.writeheader()
                
                for task in tasks:
                    # 只保留实际存在的字段
                    row = {
                        'id': task['id'],
                        'bad_job_id': task['bad_job_id'],
                        'status': task['bisect_status'],
                        'start_time': task['start_time'],
                        'end_time': task.get('end_time', ''),
                        'bad_commit': task['bad_commit']
                    }
                    writer.writerow(row)
            
            logger.info(f"结果已导出至: {filename}")
            return True
        except Exception as e:
            logger.error(f"导出失败: {str(e)}")
            return False

def main():
    parser = argparse.ArgumentParser(
        description='Bisect 任务批量分析工具',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter
    )
    
    # 查询参数
    parser.add_argument(
        '--hours',
        type=int,
        default=24,
        help='查询最近多少小时内的任务'
    )
    parser.add_argument(
        '--bisect-status',
        dest='status',
        help='按状态筛选任务 (如: success, failure, pending)'
    )
    parser.add_argument(
        '--limit',
        type=int,
        default=1000,
        help='返回结果数量限制'
    )
    
    # 输出选项
    parser.add_argument(
        '--export-csv',
        help='导出结果到CSV文件'
    )
    parser.add_argument(
        '--debug',
        action='store_true',
        help='启用调试模式'
    )
    
    args = parser.parse_args()
    
    if args.debug:
        logger.setLevel(logging.DEBUG)
        logger.debug("调试模式已启用")
    
    # 创建分析工具
    analyzer = BisectAnalysis()
    
    # 执行查询
    tasks = analyzer.query_bisect_tasks(
        hours=args.hours,
        status=args.status,
        limit=args.limit
    )
    
    if not tasks:
        logger.info("未找到匹配的bisect任务")
        # 添加详细的错误信息
        logger.info("可能原因:")
        logger.info("1. 数据库中没有bisect任务记录")
        logger.info("2. 时间范围(--hours)设置过短")
        logger.info("3. 状态过滤(--bisect-status)条件太严格")
        logger.info("4. 数据库表结构不匹配")
        logger.info("建议检查:")
        logger.info("a. 确认py_bisect.py是否成功记录任务")
        logger.info("b. 直接查询数据库: SELECT * FROM bisect LIMIT 1")
        return
    
    logger.info(f"找到 {len(tasks)} 个bisect任务")
    
    # 分析结果
    analysis = analyzer.analyze_tasks(tasks)
    
    # 计算漏检率 (需要额外查询jobs表)
    if tasks:
        analysis['miss_rate'] = analyzer.calculate_miss_rate(tasks)
    else:
        analysis['miss_rate'] = 0
    
    # 格式化输出
    print(analyzer.format_analysis(analysis, args.hours))
    
    # 导出结果
    if args.export_csv:
        analyzer.export_to_csv(tasks, args.export_csv)
    
    # 断开数据库连接
    analyzer.disconnect()

if __name__ == "__main__":
    main()
