import requests
import mysql.connector
from mysql.connector import Error
from datetime import datetime
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import List, Dict, Any, Set, Tuple
import threading
from utils.logger import sync_logger
from dateutil import parser
from config.gitlab_config import GITLAB_CONFIG
from config.db_config import DB_CONFIG

# 创建线程锁
db_lock = threading.Lock()

def get_connection():
    """获取数据库连接"""
    return mysql.connector.connect(**DB_CONFIG)

def get_existing_commits(project_id: int) -> Set[str]:
    """获取已存在的提交ID集合"""
    try:
        connection = get_connection()
        if connection.is_connected():
            cursor = connection.cursor()
            cursor.execute(
                "SELECT id FROM commits WHERE project_id = %s",
                (project_id,)
            )
            return {row[0] for row in cursor.fetchall()}
    except Error as e:
        sync_logger.log_error(f"获取已存在提交时发生错误: {e}")
    finally:
        if connection.is_connected():
            cursor.close()
            connection.close()
    return set()

def get_existing_projects() -> Set[int]:
    """获取已存在的项目ID集合"""
    try:
        connection = get_connection()
        if connection.is_connected():
            cursor = connection.cursor()
            cursor.execute("SELECT id FROM projects")
            return {row[0] for row in cursor.fetchall()}
    except Error as e:
        sync_logger.log_error(f"获取已存在项目时发生错误: {e}")
    finally:
        if connection.is_connected():
            cursor.close()
            connection.close()
    return set()

def save_projects_batch(projects: List[Dict[str, Any]]):
    """批量保存项目数据"""
    if not projects:
        return
    
    with sync_logger.start_timer("批量保存项目数据"):
        try:
            # 获取已存在的项目ID
            existing_projects = get_existing_projects()
            
            # 过滤出新项目
            new_projects = [p for p in projects if p['id'] not in existing_projects]
            updated_projects = [p for p in projects if p['id'] in existing_projects]
            
            sync_logger.log_info(f"发现 {len(new_projects)} 个新项目，{len(updated_projects)} 个已存在项目")
            
            if not new_projects and not updated_projects:
                sync_logger.log_info("没有新的项目数据需要保存")
                return
            
            connection = get_connection()
            if connection.is_connected():
                cursor = connection.cursor()
                
                # 准备SQL语句
                sql = """
                    INSERT INTO projects (
                        id, name, description, created_at, updated_at, 
                        last_activity_at, visibility
                    ) VALUES (
                        %s, %s, %s, %s, %s, %s, %s
                    ) ON DUPLICATE KEY UPDATE
                        name = VALUES(name),
                        description = VALUES(description),
                        updated_at = VALUES(updated_at),
                        last_activity_at = VALUES(last_activity_at),
                        visibility = VALUES(visibility)
                """
                
                # 准备批量数据
                values = []
                for project in projects:
                    values.append((
                        project['id'],
                        project['name'],
                        project.get('description', ''),
                        parser.parse(project['created_at']) if 'created_at' in project else None,
                        parser.parse(project['updated_at']) if 'updated_at' in project else None,
                        parser.parse(project['last_activity_at']) if 'last_activity_at' in project else None,
                        project.get('visibility', '')
                    ))
                
                # 批量执行
                cursor.executemany(sql, values)
                connection.commit()
                sync_logger.log_info(f"成功保存 {len(projects)} 个项目到数据库")
                
        except Error as e:
            sync_logger.log_error(f"批量保存项目数据时发生错误: {e}")
        finally:
            if connection.is_connected():
                cursor.close()
                connection.close()

def save_commits_batch(commits: List[Dict[str, Any]], project_id: int):
    """批量保存提交数据"""
    if not commits:
        return
    
    with sync_logger.start_timer(f"批量保存项目 {project_id} 的提交数据"):
        try:
            # 获取已存在的提交ID
            existing_commits = get_existing_commits(project_id)
            
            # 过滤出新提交
            new_commits = [c for c in commits if c['id'] not in existing_commits]
            updated_commits = [c for c in commits if c['id'] in existing_commits]
            
            sync_logger.log_info(f"项目 {project_id} 发现 {len(new_commits)} 个新提交，{len(updated_commits)} 个已存在提交")
            
            if not new_commits and not updated_commits:
                sync_logger.log_info(f"项目 {project_id} 没有新的提交数据需要保存")
                return
            
            connection = get_connection()
            if connection.is_connected():
                cursor = connection.cursor()
                
                # 准备SQL语句
                sql = """
                    INSERT INTO commits (
                        id, project_id, author_name, author_email, title, message,
                        created_at, committed_date, stats_additions, stats_deletions, stats_total
                    ) VALUES (
                        %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s
                    ) ON DUPLICATE KEY UPDATE
                        author_name = VALUES(author_name),
                        author_email = VALUES(author_email),
                        title = VALUES(title),
                        message = VALUES(message),
                        stats_additions = VALUES(stats_additions),
                        stats_deletions = VALUES(stats_deletions),
                        stats_total = VALUES(stats_total)
                """
                
                # 准备批量数据
                values = []
                for commit in commits:
                    values.append((
                        commit['id'],
                        project_id,
                        commit.get('author_name', ''),
                        commit.get('author_email', ''),
                        commit.get('title', ''),
                        commit.get('message', ''),
                        parser.parse(commit['created_at']) if 'created_at' in commit else None,
                        parser.parse(commit['committed_date']) if 'committed_date' in commit else None,
                        commit.get('stats', {}).get('additions', 0),
                        commit.get('stats', {}).get('deletions', 0),
                        commit.get('stats', {}).get('total', 0)
                    ))
                
                # 批量执行
                cursor.executemany(sql, values)
                connection.commit()
                sync_logger.log_info(f"成功保存 {len(commits)} 个提交到数据库")
                
        except Error as e:
            sync_logger.log_error(f"批量保存提交数据时发生错误: {e}")

def get_all_projects() -> List[Dict[str, Any]]:
    """获取所有GitLab项目"""
    headers = {'PRIVATE-TOKEN': GITLAB_CONFIG['token']}
    base_url = f"{GITLAB_CONFIG['url']}/api/{GITLAB_CONFIG['api_version']}"
    
    with sync_logger.start_timer("获取项目列表"):
        projects = []
        page = 1
        
        while True:
            url = f"{base_url}/projects"
            params = {'page': page, 'per_page': GITLAB_CONFIG['per_page']}
            
            try:
                response = requests.get(url, headers=headers, params=params, timeout=GITLAB_CONFIG['timeout'])
                if response.status_code == 200:
                    page_projects = response.json()
                    if not page_projects:
                        break
                    projects.extend(page_projects)
                    sync_logger.log_info(f"成功获取第 {page} 页项目，{len(page_projects)} 个项目")
                    page += 1
                else:
                    sync_logger.log_error(f"获取项目列表失败: {response.status_code}")
                    break
            except Exception as e:
                sync_logger.log_error(f"获取项目列表时发生错误: {e}")
                break
        
        sync_logger.log_info(f"共获取到 {len(projects)} 个项目")
        return projects

def get_project_commits(project_id: int) -> List[Dict[str, Any]]:
    """获取指定项目的所有提交"""
    headers = {'PRIVATE-TOKEN': GITLAB_CONFIG['token']}
    base_url = f"{GITLAB_CONFIG['url']}/api/{GITLAB_CONFIG['api_version']}"
    
    with sync_logger.start_timer(f"获取项目 {project_id} 的提交列表"):
        commits = []
        page = 1
        
        while True:
            url = f"{base_url}/projects/{project_id}/repository/commits"
            params = {'page': page, 'per_page': GITLAB_CONFIG['per_page']}
            
            try:
                response = requests.get(url, headers=headers, params=params, timeout=GITLAB_CONFIG['timeout'])
                if response.status_code == 200:
                    page_commits = response.json()
                    if not page_commits:
                        break
                    commits.extend(page_commits)
                    sync_logger.log_info(f"项目 {project_id} 成功获取第 {page} 页提交，{len(page_commits)} 个提交")
                    page += 1
                else:
                    sync_logger.log_error(f"获取项目 {project_id} 的提交列表失败: {response.status_code}")
                    break
            except Exception as e:
                sync_logger.log_error(f"获取项目 {project_id} 的提交列表时发生错误: {e}")
                break
        
        sync_logger.log_info(f"项目 {project_id} 共获取到 {len(commits)} 个提交")
        return commits

def process_project(project: Dict[str, Any]):
    """处理单个项目的数据"""
    project_id = project['id']
    
    try:
        # 获取项目的提交
        commits = get_project_commits(project_id)
        
        # 保存提交数据
        if commits:
            save_commits_batch(commits, project_id)
        
        return True
    except Exception as e:
        sync_logger.log_error(f"处理项目 {project_id} 时发生错误: {e}")
        return False

def sync_all_commits():
    """主函数：同步所有项目和提交数据"""
    sync_logger.start_total_timer()
    
    try:
        # 获取所有项目
        projects = get_all_projects()
        
        # 批量保存项目数据
        save_projects_batch(projects)
        
        # 多线程处理每个项目的提交数据
        with ThreadPoolExecutor(max_workers=5) as executor:  # 根据需要调整线程数
            futures = {executor.submit(process_project, project): project for project in projects}
            
            for future in as_completed(futures):
                project = futures[future]
                try:
                    future.result()
                except Exception as e:
                    sync_logger.log_error(f"处理项目 {project['id']} 时发生异常: {e}")
    finally:
        sync_logger.stop_total_timer()

if __name__ == "__main__":
    sync_all_commits() 