#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
GitLab数据收集器

支持从GitLab API收集项目、分支、提交等数据，并存储到TimescaleDB中。
功能包括：
- 全量同步：从头开始收集所有数据
- 增量同步：基于时间戳收集新增数据
- 多分支支持：收集项目下所有分支的数据
- 批量处理：优化大量数据的处理性能
"""

import threading
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from dataclasses import dataclass
from datetime import datetime, timezone
from typing import Dict, List, Optional, Any

import gitlab
from loguru import logger

from .models import TimescaleDBManager


@dataclass
class SyncConfig:
    """同步配置"""
    full_sync: bool = False  # 是否全量同步
    start_time: Optional[datetime] = None  # 增量同步开始时间
    end_time: Optional[datetime] = None  # 同步结束时间
    batch_size: int = 50  # 批处理大小，降低以减少内存使用
    max_workers: int = 2  # 最大并发数，降低以减少资源竞争
    include_merge_commits: bool = True  # 是否包含合并提交
    projects_filter: Optional[List[int]] = None  # 项目过滤列表


class GitLabTimescaleCollector:
    """GitLab时序数据收集器"""

    def __init__(self, gitlab_url: str, access_token: str, db_manager: TimescaleDBManager):
        """
        初始化收集器
        
        Args:
            gitlab_url: GitLab服务器URL
            access_token: 访问令牌
            db_manager: 数据库管理器
        """
        self.gitlab_url = gitlab_url
        self.access_token = access_token
        self.db_manager = db_manager
        self.gl = None
        self._user_cache = {}  # 用户信息缓存
        self._project_cache = {}  # 项目信息缓存
        self._lock = threading.Lock()

        self._connect_gitlab()

    def _connect_gitlab(self):
        """连接GitLab"""
        try:
            self.gl = gitlab.Gitlab(self.gitlab_url, private_token=self.access_token, keep_base_url=True)
            self.gl.auth()
            logger.info(f"GitLab连接成功: {self.gitlab_url}")
        except Exception as e:
            logger.error(f"GitLab连接失败: {e}")
            raise

    def test_connection(self) -> bool:
        """测试连接"""
        try:
            # 测试GitLab连接
            user = self.gl.user
            logger.info(f"当前用户: {user.username} ({user.name})")

            # 测试数据库连接
            if not self.db_manager.test_connection():
                return False

            return True

        except Exception as e:
            logger.error(f"连接测试失败: {e}")
            return False

    def get_user_by_email(self, email: str) -> Optional[Dict]:
        """通过邮箱获取用户信息（带缓存）"""
        if not email:
            return None

        # 检查缓存
        with self._lock:
            if email in self._user_cache:
                return self._user_cache[email]

        try:
            # 搜索用户
            users = self.gl.users.list(search=email, all=True)
            for user in users:
                user_data = user.asdict()
                if user_data.get('email') == email or user_data.get('public_email') == email:
                    user_info = {
                        'id': user_data.get('id'),
                        'username': user_data.get('username'),
                        'name': user_data.get('name'),
                        'email': email
                    }

                    # 缓存结果
                    with self._lock:
                        self._user_cache[email] = user_info

                    return user_info

            # 未找到用户，缓存空结果
            with self._lock:
                self._user_cache[email] = None

            return None
        except Exception as e:
            logger.warning(f"通过邮箱获取用户失败 {email}: {e}")
            return None

    def collect_projects(self, projects_filter: Optional[List[int]] = None) -> List[Dict]:
        """收集项目信息"""
        logger.info("开始收集项目信息...")
        projects_data = []

        try:
            if projects_filter:
                # 收集指定项目
                for project_id in projects_filter:
                    try:
                        project = self.gl.projects.get(project_id)
                        project_data = self._extract_project_data(project)
                        projects_data.append(project_data)

                        # 存储到数据库
                        self.db_manager.insert_project(project_data)

                        # 缓存项目信息
                        with self._lock:
                            self._project_cache[project_id] = project_data

                        logger.info(f"收集项目: {project.name} (ID: {project_id})")
                    except Exception as e:
                        logger.error(f"收集项目 {project_id} 失败: {e}")
            else:
                # 收集所有项目 - 使用分页避免超时
                logger.info("开始分页收集所有项目...")
                page = 1
                per_page = 50  # 每页50个项目
                total_collected = 0

                while True:
                    try:
                        # 分页获取项目，设置超时
                        projects = self.gl.projects.list(
                            page=page,
                            per_page=per_page,
                            simple=False,
                            timeout=30  # 30秒超时
                        )

                        if not projects:
                            logger.info(f"第{page}页没有更多项目，收集完成")
                            break

                        logger.info(f"正在处理第{page}页，共{len(projects)}个项目...")

                        for project in projects:
                            try:
                                project_data = self._extract_project_data(project)
                                projects_data.append(project_data)

                                # 存储到数据库
                                self.db_manager.insert_project(project_data)

                                # 缓存项目信息
                                with self._lock:
                                    self._project_cache[project.id] = project_data

                                total_collected += 1
                                if total_collected % 10 == 0:
                                    logger.info(f"已收集 {total_collected} 个项目...")

                            except Exception as e:
                                logger.error(f"收集项目 {project.id} 失败: {e}")

                        page += 1

                        # 避免请求过于频繁
                        time.sleep(0.1)

                    except Exception as e:
                        logger.error(f"获取第{page}页项目失败: {e}")
                        if "timeout" in str(e).lower() or "timed out" in str(e).lower():
                            logger.error("请求超时，可能是GitLab实例响应缓慢或项目数量过多")
                            break
                        # 其他错误也跳出循环
                        break

            logger.info(f"项目信息收集完成，共 {len(projects_data)} 个项目")
            return projects_data

        except Exception as e:
            logger.error(f"收集项目信息失败: {e}")
            return []

    def _extract_project_data(self, project) -> Dict:
        """提取项目数据"""
        project_dict = project.asdict()

        # 处理时间字段
        created_at = self._parse_datetime(project_dict.get('created_at'))
        updated_at = self._parse_datetime(project_dict.get('updated_at'))
        last_activity_at = self._parse_datetime(project_dict.get('last_activity_at'))

        # 处理命名空间信息
        namespace = project_dict.get('namespace', {})

        # 处理所有者信息
        owner = project_dict.get('owner', {})

        return {
            'gitlab_url': self.gitlab_url,
            'project_id': project_dict.get('id'),
            'project_name': project_dict.get('name', ''),
            'project_path': project_dict.get('path', ''),
            'project_path_with_namespace': project_dict.get('path_with_namespace', ''),
            'project_description': project_dict.get('description', ''),
            'default_branch': project_dict.get('default_branch', ''),
            'visibility': project_dict.get('visibility', 'private'),
            'web_url': project_dict.get('web_url', ''),
            'ssh_url_to_repo': project_dict.get('ssh_url_to_repo', ''),
            'http_url_to_repo': project_dict.get('http_url_to_repo', ''),
            'namespace_name': namespace.get('name', '') if namespace else '',
            'namespace_path': namespace.get('path', '') if namespace else '',
            'owner_name': owner.get('name', '') if owner else '',
            'owner_username': owner.get('username', '') if owner else '',
            'created_at': created_at,
            'updated_at': updated_at,
            'last_activity_at': last_activity_at,
            'star_count': project_dict.get('star_count', 0),
            'forks_count': project_dict.get('forks_count', 0),
            'issues_enabled': project_dict.get('issues_enabled', False),
            'merge_requests_enabled': project_dict.get('merge_requests_enabled', False),
            'wiki_enabled': project_dict.get('wiki_enabled', False),
            'archived': project_dict.get('archived', False),
            'empty_repo': project_dict.get('empty_repo', False)
        }

    def collect_branches(self, project_id: int) -> List[Dict]:
        """收集项目分支信息"""
        logger.info(f"收集项目 {project_id} 的分支信息...")
        branches_data = []

        try:
            project = self.gl.projects.get(project_id)

            # 分页获取分支列表
            page = 1
            per_page = 50

            while True:
                try:
                    branches = project.branches.list(page=page, per_page=per_page)

                    if not branches:
                        logger.info(f"第 {page} 页没有更多分支，收集完成")
                        break

                    logger.info(f"正在处理第 {page} 页，共 {len(branches)} 个分支")

                    for branch in branches:
                        try:
                            branch_data = self._extract_branch_data(project_id, branch)
                            branches_data.append(branch_data)

                            # 存储到数据库
                            self.db_manager.insert_branch(branch_data)

                            logger.debug(f"收集分支: {branch.name}")
                        except Exception as e:
                            logger.error(f"收集分支 {branch.name} 失败: {e}")

                    page += 1
                    time.sleep(0.1)  # 添加延迟避免请求过于频繁

                except Exception as e:
                    logger.error(f"获取第 {page} 页分支失败: {e}")
                    break

            logger.info(f"项目 {project_id} 分支信息收集完成，共 {len(branches_data)} 个分支")
            return branches_data

        except Exception as e:
            logger.error(f"收集项目 {project_id} 分支信息失败: {e}")
            return []

    def _extract_branch_data(self, project_id: int, branch) -> Dict:
        """提取分支数据"""
        branch_dict = branch.asdict()
        commit = branch_dict.get('commit', {})

        return {
            'gitlab_url': self.gitlab_url,
            'project_id': project_id,
            'branch_name': branch_dict.get('name', ''),
            'branch_commit_sha': commit.get('id', ''),
            'branch_commit_title': commit.get('title', ''),
            'branch_commit_message': commit.get('message', ''),
            'branch_commit_author_name': commit.get('author_name', ''),
            'branch_commit_author_email': commit.get('author_email', ''),
            'branch_commit_date': self._parse_datetime(commit.get('committed_date')),
            'is_default': branch_dict.get('default', False),
            'is_protected': branch_dict.get('protected', False),
            'can_push': branch_dict.get('can_push', False),
            'developers_can_push': branch_dict.get('developers_can_push', False),
            'developers_can_merge': branch_dict.get('developers_can_merge', False)
        }

    def collect_commits(self, project_id: int, branch_name: str,
                        sync_config: SyncConfig) -> int:
        """收集项目分支的提交数据"""
        logger.info(f"收集项目 {project_id} 分支 {branch_name} 的提交数据...")

        try:
            project = self.gl.projects.get(project_id)

            # 获取同步状态
            sync_status = self.db_manager.get_sync_status(self.gitlab_url, project_id, branch_name)

            # 确定同步起始时间
            since_time = None
            if not sync_config.full_sync:
                if sync_config.start_time:
                    since_time = sync_config.start_time
                elif sync_status and sync_status.get('last_commit_time'):
                    since_time = sync_status['last_commit_time']

            # 构建查询参数
            commit_params = {
                'ref_name': branch_name,
                'per_page': 50,  # 分页处理，每页50个提交
                'page': 1
            }

            if since_time:
                commit_params['since'] = since_time.isoformat()

            if sync_config.end_time:
                commit_params['until'] = sync_config.end_time.isoformat()

            # 分页获取提交列表
            total_processed = 0
            commits_batch = []
            page = 1
            latest_commit = None

            logger.info(f"开始分页收集项目 {project_id} 分支 {branch_name} 的提交...")

            while True:
                try:
                    commit_params['page'] = page
                    commits = project.commits.list(**commit_params)

                    if not commits:
                        logger.info(f"第 {page} 页没有更多提交，收集完成")
                        break

                    logger.info(f"正在处理第 {page} 页，共 {len(commits)} 个提交")

                    # 记录第一个提交作为最新提交
                    if page == 1 and commits:
                        latest_commit = commits[0]

                    for commit in commits:
                        try:
                            commit_data = self._extract_commit_data(project_id, branch_name, commit)
                            if commit_data:
                                commits_batch.append(commit_data)

                                # 批量插入
                                if len(commits_batch) >= sync_config.batch_size:
                                    inserted = self.db_manager.batch_insert_commits(commits_batch)
                                    total_processed += inserted
                                    commits_batch = []

                                    logger.info(f"已处理 {total_processed} 个提交")

                        except Exception as e:
                            logger.error(f"处理提交 {commit.id} 失败: {e}")

                    page += 1
                    time.sleep(0.1)  # 添加延迟避免请求过于频繁

                except Exception as e:
                    logger.error(f"获取第 {page} 页提交失败: {e}")
                    break

            # 处理剩余的提交
            if commits_batch:
                inserted = self.db_manager.batch_insert_commits(commits_batch)
                total_processed += inserted

            # 更新同步状态
            if latest_commit:
                latest_commit_time = self._parse_datetime(latest_commit.committed_date)

                self.db_manager.update_sync_status(
                    gitlab_url=self.gitlab_url,
                    project_id=project_id,
                    branch_name=branch_name,
                    last_commit_time=latest_commit_time,
                    last_commit_sha=latest_commit.id,
                    sync_status='completed',
                    total_commits_synced=total_processed
                )
            elif total_processed == 0:
                logger.info(f"项目 {project_id} 分支 {branch_name} 没有新的提交")
                return 0

            logger.info(f"项目 {project_id} 分支 {branch_name} 提交数据收集完成，共处理 {total_processed} 个提交")
            return total_processed

        except Exception as e:
            logger.error(f"收集项目 {project_id} 分支 {branch_name} 提交数据失败: {e}")

            # 更新同步状态为失败
            self.db_manager.update_sync_status(
                gitlab_url=self.gitlab_url,
                project_id=project_id,
                branch_name=branch_name,
                sync_status='failed'
            )

            return 0

    def _extract_commit_data(self, project_id: int, branch_name: str, commit) -> Optional[Dict]:
        """提取提交数据"""
        try:
            commit_dict = commit.asdict()

            # 获取详细的提交信息（包含统计数据）
            try:
                detailed_commit = commit.manager.gitlab.projects.get(project_id).commits.get(commit.id)
                stats = detailed_commit.stats if hasattr(detailed_commit, 'stats') else {}
            except:
                stats = {}

            # 解析时间
            authored_date = self._parse_datetime(commit_dict.get('authored_date'))
            committed_date = self._parse_datetime(commit_dict.get('committed_date'))

            if not committed_date:
                logger.warning(f"提交 {commit.id} 缺少提交时间，跳过")
                return None

            # 获取作者和提交者信息
            author_email = commit_dict.get('author_email', '')
            committer_email = commit_dict.get('committer_email', '')

            # 尝试获取用户信息
            author_user = self.get_user_by_email(author_email) if author_email else None
            committer_user = self.get_user_by_email(committer_email) if committer_email else None

            # 获取项目信息
            project_info = self._get_project_info(project_id)

            # 生成复合主键
            project_branch_key = f"{project_id}_{branch_name}"

            # 判断提交类型
            parent_ids = commit_dict.get('parent_ids', [])
            is_merge_commit = len(parent_ids) > 1

            commit_type = 'merge' if is_merge_commit else 'normal'

            return {
                'gitlab_url': self.gitlab_url,
                'project_branch_key': project_branch_key,
                'commit_time': committed_date,
                'commit_sha': commit_dict.get('id', ''),
                'project_id': project_id,
                'project_name': project_info.get('project_name', ''),
                'project_path_with_namespace': project_info.get('project_path_with_namespace', ''),
                'branch_name': branch_name,
                'commit_title': commit_dict.get('title', ''),
                'commit_message': commit_dict.get('message', ''),
                'commit_short_id': commit_dict.get('short_id', ''),
                'author_name': commit_dict.get('author_name', ''),
                'author_email': author_email,
                'author_username': author_user.get('username', '') if author_user else '',
                'author_user_id': author_user.get('id') if author_user else None,
                'committer_name': commit_dict.get('committer_name', ''),
                'committer_email': committer_email,
                'committer_username': committer_user.get('username', '') if committer_user else '',
                'committer_user_id': committer_user.get('id') if committer_user else None,
                'additions': stats.get('additions', 0),
                'deletions': stats.get('deletions', 0),
                'total_changes': stats.get('total', 0),
                'files_changed': len(stats.get('files', [])) if 'files' in stats else 0,
                'commit_type': commit_type,
                'is_merge_commit': is_merge_commit,
                'parent_ids': parent_ids,
                'authored_date': authored_date,
                'committed_date': committed_date
            }

        except Exception as e:
            logger.error(f"提取提交数据失败 {commit.id}: {e}")
            return None

    def _get_project_info(self, project_id: int) -> Dict:
        """获取项目信息（带缓存）"""
        with self._lock:
            if project_id in self._project_cache:
                return self._project_cache[project_id]

        try:
            project = self.gl.projects.get(project_id)
            project_data = self._extract_project_data(project)

            with self._lock:
                self._project_cache[project_id] = project_data

            return project_data
        except Exception as e:
            logger.error(f"获取项目信息失败 {project_id}: {e}")
            return {}

    def sync_all_projects(self, sync_config: SyncConfig) -> Dict[str, Any]:
        """同步所有项目的数据"""
        logger.info("开始同步所有项目数据...")

        start_time = datetime.now()
        results = {
            'total_projects': 0,
            'total_branches': 0,
            'total_commits': 0,
            'failed_projects': [],
            'start_time': start_time,
            'end_time': None
        }

        try:
            # 收集项目信息
            projects = self.collect_projects(sync_config.projects_filter)
            results['total_projects'] = len(projects)

            if not projects:
                logger.warning("没有找到项目")
                return results

            # 使用线程池并行处理项目
            with ThreadPoolExecutor(max_workers=sync_config.max_workers) as executor:
                future_to_project = {}

                for project_data in projects:
                    project_id = project_data['project_id']
                    future = executor.submit(self._sync_project, project_id, sync_config)
                    future_to_project[future] = project_id

                # 处理结果，添加超时机制
                for future in as_completed(future_to_project, timeout=1800):  # 30分钟超时
                    project_id = future_to_project[future]
                    try:
                        # 为每个任务设置5分钟超时
                        project_result = future.result(timeout=300)
                        results['total_branches'] += project_result['branches']
                        results['total_commits'] += project_result['commits']

                        logger.info(f"项目 {project_id} 同步完成: "
                                    f"分支 {project_result['branches']}, "
                                    f"提交 {project_result['commits']}")

                    except TimeoutError:
                        logger.error(f"项目 {project_id} 同步超时，跳过")
                        results['failed_projects'].append({
                            'project_id': project_id,
                            'error': '同步超时'
                        })
                        future.cancel()  # 尝试取消超时的任务

                    except Exception as e:
                        logger.error(f"项目 {project_id} 同步失败: {e}")
                        results['failed_projects'].append({
                            'project_id': project_id,
                            'error': str(e)
                        })

            results['end_time'] = datetime.now()
            duration = results['end_time'] - start_time

            logger.info(f"所有项目同步完成！")
            logger.info(f"总计: 项目 {results['total_projects']}, "
                        f"分支 {results['total_branches']}, "
                        f"提交 {results['total_commits']}")
            logger.info(f"耗时: {duration}")

            if results['failed_projects']:
                logger.warning(f"失败项目数: {len(results['failed_projects'])}")

            return results

        except Exception as e:
            logger.error(f"同步所有项目失败: {e}")
            results['end_time'] = datetime.now()
            return results

    def _sync_project(self, project_id: int, sync_config: SyncConfig) -> Dict[str, int]:
        """同步单个项目的数据"""
        result = {'branches': 0, 'commits': 0}

        try:
            # 收集分支信息
            branches = self.collect_branches(project_id)
            result['branches'] = len(branches)

            # 收集每个分支的提交数据
            for branch_data in branches:
                branch_name = branch_data['branch_name']
                commits_count = self.collect_commits(project_id, branch_name, sync_config)
                result['commits'] += commits_count

            return result

        except Exception as e:
            logger.error(f"同步项目 {project_id} 失败: {e}")
            raise

    def _parse_datetime(self, date_str: Optional[str]) -> Optional[datetime]:
        """解析日期时间字符串"""
        if not date_str:
            return None

        try:
            # GitLab API返回的时间格式通常是ISO 8601
            if date_str.endswith('Z'):
                date_str = date_str[:-1] + '+00:00'

            dt = datetime.fromisoformat(date_str)

            # 确保时区信息
            if dt.tzinfo is None:
                dt = dt.replace(tzinfo=timezone.utc)

            return dt

        except Exception as e:
            logger.warning(f"解析日期时间失败 '{date_str}': {e}")
            return None
