#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
GitLab TimescaleDB数据分析器

提供基于TimescaleDB的高效时序数据分析功能，包括：
- 用户贡献统计
- 项目活跃度分析
- 时间序列趋势分析
- 代码变更统计
- 分支活跃度分析
"""

import os
from dataclasses import dataclass
from datetime import datetime, date
from typing import Dict, List, Optional, Any

import pandas as pd
from loguru import logger

from .models import TimescaleDBManager


@dataclass
class AnalysisConfig:
    """分析配置"""
    gitlab_url: Optional[str] = None
    start_date: Optional[date] = None
    end_date: Optional[date] = None
    project_ids: Optional[List[int]] = None
    branch_names: Optional[List[str]] = None
    user_emails: Optional[List[str]] = None
    include_merge_commits: bool = True
    group_by_period: str = 'day'  # day, week, month, quarter, year


class GitLabTimescaleAnalyzer:
    """GitLab TimescaleDB分析器"""

    def __init__(self, db_manager: TimescaleDBManager):
        """
        初始化分析器
        
        Args:
            db_manager: 数据库管理器
        """
        self.db_manager = db_manager

    def get_user_contribution_stats(self, config: AnalysisConfig) -> List[Dict]:
        """获取用户贡献统计"""
        logger.info("获取用户贡献统计...")

        where_conditions = []
        params = []

        # 构建查询条件
        if config.gitlab_url:
            where_conditions.append("gitlab_url = %s")
            params.append(config.gitlab_url)

        if config.start_date:
            where_conditions.append("commit_time >= %s")
            params.append(config.start_date)

        if config.end_date:
            where_conditions.append("commit_time <= %s")
            params.append(config.end_date)

        if config.project_ids:
            placeholders = ','.join(['%s'] * len(config.project_ids))
            where_conditions.append(f"project_id IN ({placeholders})")
            params.extend(config.project_ids)

        if config.branch_names:
            placeholders = ','.join(['%s'] * len(config.branch_names))
            where_conditions.append(f"branch_name IN ({placeholders})")
            params.extend(config.branch_names)

        if config.user_emails:
            placeholders = ','.join(['%s'] * len(config.user_emails))
            where_conditions.append(f"author_email IN ({placeholders})")
            params.extend(config.user_emails)

        if not config.include_merge_commits:
            where_conditions.append("is_merge_commit = false")

        where_clause = "WHERE " + " AND ".join(where_conditions) if where_conditions else ""

        sql = f"""
        SELECT 
            author_email,
            author_name,
            author_username,
            COUNT(*) as total_commits,
            COUNT(DISTINCT project_id) as projects_count,
            COUNT(DISTINCT branch_name) as branches_count,
            SUM(additions) as total_additions,
            SUM(deletions) as total_deletions,
            SUM(total_changes) as total_changes,
            SUM(files_changed) as total_files_changed,
            MIN(commit_time) as first_commit_time,
            MAX(commit_time) as last_commit_time,
            COUNT(CASE WHEN is_merge_commit THEN 1 END) as merge_commits,
            COUNT(CASE WHEN NOT is_merge_commit THEN 1 END) as normal_commits,
            ROUND(AVG(additions), 2) as avg_additions_per_commit,
            ROUND(AVG(deletions), 2) as avg_deletions_per_commit,
            ROUND(AVG(total_changes), 2) as avg_changes_per_commit
        FROM gitlab_commits
        {where_clause}
        GROUP BY author_email, author_name, author_username
        ORDER BY total_commits DESC
        """

        try:
            with self.db_manager.get_connection() as conn:
                with conn.cursor() as cursor:
                    cursor.execute(sql, params)
                    columns = [desc[0] for desc in cursor.description]
                    results = cursor.fetchall()

                    # 转换结果并处理时间字段
                    processed_results = []
                    for row in results:
                        row_dict = dict(zip(columns, row))

                        # 安全处理时间字段
                        for time_field in ['first_commit_time', 'last_commit_time']:
                            if time_field in row_dict and row_dict[time_field]:
                                time_value = row_dict[time_field]
                                if hasattr(time_value, 'strftime'):
                                    row_dict[time_field] = time_value.strftime('%Y-%m-%d %H:%M:%S')
                                else:
                                    row_dict[time_field] = str(time_value)

                        processed_results.append(row_dict)

                    return processed_results

        except Exception as e:
            logger.error(f"获取用户贡献统计失败: {e}")
            return []

    def get_project_activity_stats(self, config: AnalysisConfig) -> List[Dict]:
        """获取项目活跃度统计"""
        logger.info("获取项目活跃度统计...")

        where_conditions = []
        params = []

        if config.gitlab_url:
            where_conditions.append("gitlab_url = %s")
            params.append(config.gitlab_url)

        if config.start_date:
            where_conditions.append("commit_time >= %s")
            params.append(config.start_date)

        if config.end_date:
            where_conditions.append("commit_time <= %s")
            params.append(config.end_date)

        if config.project_ids:
            placeholders = ','.join(['%s'] * len(config.project_ids))
            where_conditions.append(f"project_id IN ({placeholders})")
            params.extend(config.project_ids)

        if not config.include_merge_commits:
            where_conditions.append("is_merge_commit = false")

        where_clause = "WHERE " + " AND ".join(where_conditions) if where_conditions else ""

        sql = f"""
        SELECT 
            project_id,
            project_name,
            project_path_with_namespace,
            COUNT(*) as total_commits,
            COUNT(DISTINCT branch_name) as branches_count,
            COUNT(DISTINCT author_email) as contributors_count,
            SUM(additions) as total_additions,
            SUM(deletions) as total_deletions,
            SUM(total_changes) as total_changes,
            SUM(files_changed) as total_files_changed,
            MIN(commit_time) as first_commit_time,
            MAX(commit_time) as last_commit_time,
            COUNT(CASE WHEN is_merge_commit THEN 1 END) as merge_commits,
            COUNT(CASE WHEN NOT is_merge_commit THEN 1 END) as normal_commits,
            ROUND(AVG(additions), 2) as avg_additions_per_commit,
            ROUND(AVG(deletions), 2) as avg_deletions_per_commit,
            ROUND(AVG(total_changes), 2) as avg_changes_per_commit
        FROM gitlab_commits
        {where_clause}
        GROUP BY project_id, project_name, project_path_with_namespace
        ORDER BY total_commits DESC
        """

        try:
            with self.db_manager.get_connection() as conn:
                with conn.cursor() as cursor:
                    cursor.execute(sql, params)
                    columns = [desc[0] for desc in cursor.description]
                    results = cursor.fetchall()

                    # 转换结果并处理时间字段
                    processed_results = []
                    for row in results:
                        row_dict = dict(zip(columns, row))

                        # 安全处理时间字段
                        for time_field in ['first_commit_time', 'last_commit_time']:
                            if time_field in row_dict and row_dict[time_field]:
                                time_value = row_dict[time_field]
                                if hasattr(time_value, 'strftime'):
                                    row_dict[time_field] = time_value.strftime('%Y-%m-%d %H:%M:%S')
                                else:
                                    row_dict[time_field] = str(time_value)

                        processed_results.append(row_dict)

                    return processed_results

        except Exception as e:
            logger.error(f"获取项目活跃度统计失败: {e}")
            return []

    def get_time_series_stats(self, config: AnalysisConfig) -> List[Dict]:
        """获取时间序列统计"""
        logger.info(f"获取时间序列统计，按{config.group_by_period}分组...")

        # 时间分组函数映射
        time_group_functions = {
            'hour': "date_trunc('hour', commit_time)",
            'day': "date_trunc('day', commit_time)",
            'week': "date_trunc('week', commit_time)",
            'month': "date_trunc('month', commit_time)",
            'quarter': "date_trunc('quarter', commit_time)",
            'year': "date_trunc('year', commit_time)"
        }

        time_group_func = time_group_functions.get(config.group_by_period, time_group_functions['day'])

        where_conditions = []
        params = []

        if config.gitlab_url:
            where_conditions.append("gitlab_url = %s")
            params.append(config.gitlab_url)

        if config.start_date:
            where_conditions.append("commit_time >= %s")
            params.append(config.start_date)

        if config.end_date:
            where_conditions.append("commit_time <= %s")
            params.append(config.end_date)

        if config.project_ids:
            placeholders = ','.join(['%s'] * len(config.project_ids))
            where_conditions.append(f"project_id IN ({placeholders})")
            params.extend(config.project_ids)

        if not config.include_merge_commits:
            where_conditions.append("is_merge_commit = false")

        where_clause = "WHERE " + " AND ".join(where_conditions) if where_conditions else ""

        sql = f"""
        SELECT 
            {time_group_func} as time_period,
            COUNT(*) as total_commits,
            COUNT(DISTINCT author_email) as active_contributors,
            COUNT(DISTINCT project_id) as active_projects,
            COUNT(DISTINCT branch_name) as active_branches,
            SUM(additions) as total_additions,
            SUM(deletions) as total_deletions,
            SUM(total_changes) as total_changes,
            SUM(files_changed) as total_files_changed,
            COUNT(CASE WHEN is_merge_commit THEN 1 END) as merge_commits,
            COUNT(CASE WHEN NOT is_merge_commit THEN 1 END) as normal_commits,
            ROUND(AVG(additions), 2) as avg_additions_per_commit,
            ROUND(AVG(deletions), 2) as avg_deletions_per_commit,
            ROUND(AVG(total_changes), 2) as avg_changes_per_commit
        FROM gitlab_commits
        {where_clause}
        GROUP BY {time_group_func}
        ORDER BY time_period DESC
        """

        try:
            with self.db_manager.get_connection() as conn:
                with conn.cursor() as cursor:
                    cursor.execute(sql, params)
                    columns = [desc[0] for desc in cursor.description]
                    results = cursor.fetchall()

                    return [dict(zip(columns, row)) for row in results]

        except Exception as e:
            logger.error(f"获取时间序列统计失败: {e}")
            return []

    def get_branch_activity_stats(self, config: AnalysisConfig) -> List[Dict]:
        """获取分支活跃度统计"""
        logger.info("获取分支活跃度统计...")

        where_conditions = []
        params = []

        if config.gitlab_url:
            where_conditions.append("gitlab_url = %s")
            params.append(config.gitlab_url)

        if config.start_date:
            where_conditions.append("commit_time >= %s")
            params.append(config.start_date)

        if config.end_date:
            where_conditions.append("commit_time <= %s")
            params.append(config.end_date)

        if config.project_ids:
            placeholders = ','.join(['%s'] * len(config.project_ids))
            where_conditions.append(f"project_id IN ({placeholders})")
            params.extend(config.project_ids)

        if config.branch_names:
            placeholders = ','.join(['%s'] * len(config.branch_names))
            where_conditions.append(f"branch_name IN ({placeholders})")
            params.extend(config.branch_names)

        if not config.include_merge_commits:
            where_conditions.append("is_merge_commit = false")

        where_clause = "WHERE " + " AND ".join(where_conditions) if where_conditions else ""

        sql = f"""
        SELECT 
            project_id,
            project_name,
            branch_name,
            COUNT(*) as total_commits,
            COUNT(DISTINCT author_email) as contributors_count,
            SUM(additions) as total_additions,
            SUM(deletions) as total_deletions,
            SUM(total_changes) as total_changes,
            SUM(files_changed) as total_files_changed,
            MIN(commit_time) as first_commit_time,
            MAX(commit_time) as last_commit_time,
            COUNT(CASE WHEN is_merge_commit THEN 1 END) as merge_commits,
            COUNT(CASE WHEN NOT is_merge_commit THEN 1 END) as normal_commits,
            ROUND(AVG(additions), 2) as avg_additions_per_commit,
            ROUND(AVG(deletions), 2) as avg_deletions_per_commit,
            ROUND(AVG(total_changes), 2) as avg_changes_per_commit,
            EXTRACT(EPOCH FROM (MAX(commit_time) - MIN(commit_time))) / 86400 as active_days
        FROM gitlab_commits
        {where_clause}
        GROUP BY project_id, project_name, branch_name
        ORDER BY total_commits DESC
        """

        try:
            with self.db_manager.get_connection() as conn:
                with conn.cursor() as cursor:
                    cursor.execute(sql, params)
                    columns = [desc[0] for desc in cursor.description]
                    results = cursor.fetchall()

                    # 转换结果并处理时间字段
                    processed_results = []
                    for row in results:
                        row_dict = dict(zip(columns, row))

                        # 安全处理时间字段
                        for time_field in ['first_commit_time', 'last_commit_time']:
                            if time_field in row_dict and row_dict[time_field]:
                                time_value = row_dict[time_field]
                                if hasattr(time_value, 'strftime'):
                                    row_dict[time_field] = time_value.strftime('%Y-%m-%d %H:%M:%S')
                                else:
                                    row_dict[time_field] = str(time_value)

                        processed_results.append(row_dict)

                    return processed_results

        except Exception as e:
            logger.error(f"获取分支活跃度统计失败: {e}")
            return []

    def get_top_contributors(self, config: AnalysisConfig, limit: int = None) -> List[Dict]:
        """获取顶级贡献者"""
        # 如果limit为None，从环境变量读取默认值
        if limit is None:
            limit = int(os.getenv('GITLAB_DEFAULT_MAX_CONTRIBUTORS', '50'))

        logger.info(f"获取前{limit}名贡献者...")

        where_conditions = []
        params = []

        if config.gitlab_url:
            where_conditions.append("gitlab_url = %s")
            params.append(config.gitlab_url)

        if config.start_date:
            where_conditions.append("commit_time >= %s")
            params.append(config.start_date)

        if config.end_date:
            where_conditions.append("commit_time <= %s")
            params.append(config.end_date)

        if config.project_ids:
            placeholders = ','.join(['%s'] * len(config.project_ids))
            where_conditions.append(f"project_id IN ({placeholders})")
            params.extend(config.project_ids)

        if not config.include_merge_commits:
            where_conditions.append("is_merge_commit = false")

        where_clause = "WHERE " + " AND ".join(where_conditions) if where_conditions else ""
        params.append(limit)

        sql = f"""
        SELECT 
            author_email,
            author_name,
            author_username,
            COUNT(*) as total_commits,
            SUM(additions) as total_additions,
            SUM(deletions) as total_deletions,
            SUM(total_changes) as total_changes,
            COUNT(DISTINCT project_id) as projects_count,
            COUNT(DISTINCT branch_name) as branches_count,
            ROUND(AVG(total_changes), 2) as avg_changes_per_commit,
            MIN(commit_time) as first_commit_time,
            MAX(commit_time) as last_commit_time
        FROM gitlab_commits
        {where_clause}
        GROUP BY author_email, author_name, author_username
        ORDER BY total_commits DESC
        LIMIT %s
        """

        try:
            with self.db_manager.get_connection() as conn:
                with conn.cursor() as cursor:
                    cursor.execute(sql, params)
                    columns = [desc[0] for desc in cursor.description]
                    results = cursor.fetchall()

                    # 转换结果并处理时间字段
                    processed_results = []
                    for row in results:
                        row_dict = dict(zip(columns, row))

                        # 安全处理时间字段
                        for time_field in ['first_commit_time', 'last_commit_time']:
                            if time_field in row_dict and row_dict[time_field]:
                                time_value = row_dict[time_field]
                                if hasattr(time_value, 'strftime'):
                                    row_dict[time_field] = time_value.strftime('%Y-%m-%d %H:%M:%S')
                                else:
                                    row_dict[time_field] = str(time_value)

                        processed_results.append(row_dict)

                    return processed_results

        except Exception as e:
            logger.error(f"获取顶级贡献者失败: {e}")
            return []

    def get_commit_frequency_analysis(self, config: AnalysisConfig) -> Dict[str, Any]:
        """获取提交频率分析"""
        logger.info("获取提交频率分析...")

        where_conditions = []
        params = []

        if config.gitlab_url:
            where_conditions.append("gitlab_url = %s")
            params.append(config.gitlab_url)

        if config.start_date:
            where_conditions.append("commit_time >= %s")
            params.append(config.start_date)

        if config.end_date:
            where_conditions.append("commit_time <= %s")
            params.append(config.end_date)

        if config.project_ids:
            placeholders = ','.join(['%s'] * len(config.project_ids))
            where_conditions.append(f"project_id IN ({placeholders})")
            params.extend(config.project_ids)

        if not config.include_merge_commits:
            where_conditions.append("is_merge_commit = false")

        where_clause = "WHERE " + " AND ".join(where_conditions) if where_conditions else ""

        # 按小时分析
        hour_sql = f"""
        SELECT 
            EXTRACT(HOUR FROM commit_time) as hour,
            COUNT(*) as commit_count
        FROM gitlab_commits
        {where_clause}
        GROUP BY EXTRACT(HOUR FROM commit_time)
        ORDER BY hour
        """

        # 按星期几分析
        weekday_sql = f"""
        SELECT 
            EXTRACT(DOW FROM commit_time) as weekday,
            COUNT(*) as commit_count
        FROM gitlab_commits
        {where_clause}
        GROUP BY EXTRACT(DOW FROM commit_time)
        ORDER BY weekday
        """

        # 按月份分析
        month_sql = f"""
        SELECT 
            EXTRACT(MONTH FROM commit_time) as month,
            COUNT(*) as commit_count
        FROM gitlab_commits
        {where_clause}
        GROUP BY EXTRACT(MONTH FROM commit_time)
        ORDER BY month
        """

        try:
            with self.db_manager.get_connection() as conn:
                with conn.cursor() as cursor:
                    # 小时分析
                    cursor.execute(hour_sql, params)
                    hour_data = [{'hour': row[0], 'commit_count': row[1]} for row in cursor.fetchall()]

                    # 星期几分析
                    cursor.execute(weekday_sql, params)
                    weekday_data = [{'weekday': row[0], 'commit_count': row[1]} for row in cursor.fetchall()]

                    # 月份分析
                    cursor.execute(month_sql, params)
                    month_data = [{'month': row[0], 'commit_count': row[1]} for row in cursor.fetchall()]

                    return {
                        'by_hour': hour_data,
                        'by_weekday': weekday_data,
                        'by_month': month_data
                    }

        except Exception as e:
            logger.error(f"获取提交频率分析失败: {e}")
            return {'by_hour': [], 'by_weekday': [], 'by_month': []}

    def get_code_change_analysis(self, config: AnalysisConfig) -> Dict[str, Any]:
        """获取代码变更分析"""
        logger.info("获取代码变更分析...")

        where_conditions = []
        params = []

        if config.gitlab_url:
            where_conditions.append("gitlab_url = %s")
            params.append(config.gitlab_url)

        if config.start_date:
            where_conditions.append("commit_time >= %s")
            params.append(config.start_date)

        if config.end_date:
            where_conditions.append("commit_time <= %s")
            params.append(config.end_date)

        if config.project_ids:
            placeholders = ','.join(['%s'] * len(config.project_ids))
            where_conditions.append(f"project_id IN ({placeholders})")
            params.extend(config.project_ids)

        if not config.include_merge_commits:
            where_conditions.append("is_merge_commit = false")

        where_clause = "WHERE " + " AND ".join(where_conditions) if where_conditions else ""

        sql = f"""
        SELECT 
            COUNT(*) as total_commits,
            SUM(additions) as total_additions,
            SUM(deletions) as total_deletions,
            SUM(total_changes) as total_changes,
            SUM(files_changed) as total_files_changed,
            ROUND(AVG(additions), 2) as avg_additions,
            ROUND(AVG(deletions), 2) as avg_deletions,
            ROUND(AVG(total_changes), 2) as avg_changes,
            ROUND(AVG(files_changed), 2) as avg_files_changed,
            MAX(additions) as max_additions,
            MAX(deletions) as max_deletions,
            MAX(total_changes) as max_changes,
            MAX(files_changed) as max_files_changed,
            PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY additions) as median_additions,
            PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY deletions) as median_deletions,
            PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY total_changes) as median_changes,
            PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY total_changes) as p95_changes
        FROM gitlab_commits
        {where_clause}
        """

        try:
            with self.db_manager.get_connection() as conn:
                with conn.cursor() as cursor:
                    cursor.execute(sql, params)
                    columns = [desc[0] for desc in cursor.description]
                    result = cursor.fetchone()

                    if result:
                        return dict(zip(columns, result))
                    else:
                        return {}

        except Exception as e:
            logger.error(f"获取代码变更分析失败: {e}")
            return {}

    def get_project_health_score(self, project_id: int, config: AnalysisConfig) -> Dict[str, Any]:
        """获取项目健康度评分"""
        logger.info(f"计算项目 {project_id} 的健康度评分...")

        # 设置项目过滤
        config.project_ids = [project_id]

        try:
            # 获取基础统计
            project_stats = self.get_project_activity_stats(config)
            if not project_stats:
                return {'error': '项目数据不存在'}

            stats = project_stats[0]

            # 获取时间序列数据（按天）
            config.group_by_period = 'day'
            time_series = self.get_time_series_stats(config)

            # 计算健康度指标
            health_score = {
                'project_id': project_id,
                'project_name': stats['project_name'],
                'total_commits': stats['total_commits'],
                'contributors_count': stats['contributors_count'],
                'branches_count': stats['branches_count'],
                'activity_score': 0,
                'consistency_score': 0,
                'collaboration_score': 0,
                'overall_score': 0
            }

            # 活跃度评分 (基于提交频率)
            if time_series:
                avg_daily_commits = sum(day['total_commits'] for day in time_series) / len(time_series)
                health_score['activity_score'] = min(100, avg_daily_commits * 10)

            # 一致性评分 (基于提交分布的标准差)
            if len(time_series) > 1:
                daily_commits = [day['total_commits'] for day in time_series]
                import statistics
                std_dev = statistics.stdev(daily_commits)
                mean_commits = statistics.mean(daily_commits)
                cv = std_dev / mean_commits if mean_commits > 0 else 0
                health_score['consistency_score'] = max(0, 100 - cv * 50)

            # 协作评分 (基于贡献者数量和分支数量)
            collaboration_factor = (stats['contributors_count'] * 2 + stats['branches_count']) / 3
            health_score['collaboration_score'] = min(100, collaboration_factor * 5)

            # 综合评分
            health_score['overall_score'] = (
                    health_score['activity_score'] * 0.4 +
                    health_score['consistency_score'] * 0.3 +
                    health_score['collaboration_score'] * 0.3
            )

            # 添加评级
            if health_score['overall_score'] >= 80:
                health_score['grade'] = 'A'
            elif health_score['overall_score'] >= 60:
                health_score['grade'] = 'B'
            elif health_score['overall_score'] >= 40:
                health_score['grade'] = 'C'
            else:
                health_score['grade'] = 'D'

            return health_score

        except Exception as e:
            logger.error(f"计算项目健康度评分失败: {e}")
            return {'error': str(e)}

    def export_analysis_to_dataframe(self, analysis_data: List[Dict]) -> pd.DataFrame:
        """将分析结果导出为DataFrame"""
        try:
            if not analysis_data:
                return pd.DataFrame()

            df = pd.DataFrame(analysis_data)

            # 转换时间列
            time_columns = ['first_commit_time', 'last_commit_time', 'time_period']
            for col in time_columns:
                if col in df.columns:
                    df[col] = pd.to_datetime(df[col], errors='coerce')

            return df

        except Exception as e:
            logger.error(f"导出DataFrame失败: {e}")
            return pd.DataFrame()

    def generate_summary_report(self, config: AnalysisConfig) -> Dict[str, Any]:
        """生成汇总报告"""
        logger.info("生成汇总报告...")

        try:
            report = {
                'analysis_period': {
                    'start_date': config.start_date.isoformat() if config.start_date else None,
                    'end_date': config.end_date.isoformat() if config.end_date else None
                },
                'user_stats': self.get_user_contribution_stats(config),
                'project_stats': self.get_project_activity_stats(config),
                'time_series': self.get_time_series_stats(config),
                'top_contributors': self.get_top_contributors(config, 10),
                'frequency_analysis': self.get_commit_frequency_analysis(config),
                'code_change_analysis': self.get_code_change_analysis(config),
                'generated_at': datetime.now().isoformat()
            }

            # 添加汇总统计
            if report['user_stats']:
                report['summary'] = {
                    'total_contributors': len(report['user_stats']),
                    'total_projects': len(report['project_stats']),
                    'total_commits': sum(user['total_commits'] for user in report['user_stats']),
                    'total_additions': sum(user['total_additions'] for user in report['user_stats']),
                    'total_deletions': sum(user['total_deletions'] for user in report['user_stats']),
                    'total_changes': sum(user['total_changes'] for user in report['user_stats'])
                }

            logger.info("汇总报告生成完成")
            return report

        except Exception as e:
            logger.error(f"生成汇总报告失败: {e}")
            return {'error': str(e)}
