"""数据库服务"""
import mysql.connector
from mysql.connector import Error
from datetime import datetime
from typing import List, Dict, Any, Set, Tuple, Optional
from config.db_config import DB_CONFIG, GITLAB_DB_CONFIG, JENKINS_DB_CONFIG, FEISHU_DB_CONFIG
from utils.logger import sync_logger
import json

class DBService:
    def __init__(self, db_config=None):
        # 默认使用通用配置，但可以通过参数指定使用特定服务的配置
        self.config = db_config or DB_CONFIG

    def _parse_datetime(self, date_str: Optional[str]) -> Optional[datetime]:
        """安全地解析日期时间字符串"""
        if not date_str:
            return None
        try:
            return datetime.fromisoformat(date_str.replace('Z', '+00:00'))
        except (ValueError, AttributeError):
            return None

    def get_connection(self):
        """获取数据库连接"""
        return mysql.connector.connect(**self.config)

    def _execute_query(self, query: str, params: tuple = None) -> Set:
        """执行查询并返回结果集合"""
        connection = None
        cursor = None
        try:
            connection = self.get_connection()
            if connection.is_connected():
                cursor = connection.cursor()
                cursor.execute(query, params or ())
                return {row[0] for row in cursor.fetchall()}
        except Error as e:
            sync_logger.log_error(f"执行查询时发生错误: {e}")
        finally:
            if cursor:
                cursor.close()
            if connection and connection.is_connected():
                connection.close()
        return set()

    def _batch_save(self, table: str, data: List[Dict[str, Any]], 
                   id_field: str, fields: List[str], 
                   values_func: callable, 
                   existing_ids: Set = None,
                   project_id: int = None) -> None:
        """通用批量保存方法"""
        if not data:
            return
        
        with sync_logger.start_timer(f"批量保存{table}数据"):
            connection = None
            cursor = None
            try:
                # 获取已存在的数据ID
                if existing_ids is None:
                    query = f"SELECT {id_field} FROM {table}"
                    if project_id is not None:
                        query += f" WHERE project_id = {project_id}"
                    existing_ids = self._execute_query(query)
                
                # 过滤新数据和更新数据
                new_data = [d for d in data if d[id_field] not in existing_ids]
                updated_data = [d for d in data if d[id_field] in existing_ids]
                
                sync_logger.log_info(f"发现 {len(new_data)} 个新记录，{len(updated_data)} 个已存在记录")
                
                if not new_data and not updated_data:
                    sync_logger.log_info(f"没有新的{table}数据需要保存")
                    return
                
                connection = self.get_connection()
                if connection.is_connected():
                    cursor = connection.cursor()
                    
                    # 准备SQL语句
                    placeholders = ', '.join(['%s'] * len(fields))
                    update_fields = ', '.join([f"{field} = VALUES({field})" for field in fields])
                    sql = f"""
                        INSERT INTO {table} ({', '.join(fields)})
                        VALUES ({placeholders})
                        ON DUPLICATE KEY UPDATE {update_fields}
                    """
                    
                    # 准备批量数据
                    values = [values_func(item) for item in data]
                    
                    # 批量执行
                    cursor.executemany(sql, values)
                    connection.commit()
                    sync_logger.log_info(f"成功保存 {len(data)} 条{table}数据到数据库")
                    
            except Error as e:
                sync_logger.log_error(f"批量保存{table}数据时发生错误: {e}")
            finally:
                if cursor:
                    cursor.close()
                if connection and connection.is_connected():
                    connection.close()

    def get_existing_members(self) -> Set[int]:
        """获取已存在的成员ID集合"""
        return self._execute_query("SELECT id FROM members")

    def get_existing_project_members(self) -> Set[tuple]:
        """获取已存在的项目成员关系集合"""
        return self._execute_query("SELECT project_id, user_id FROM project_members")

    def get_existing_projects(self) -> Set[int]:
        """获取已存在的项目ID集合"""
        return self._execute_query("SELECT id FROM projects")

    def get_existing_commits(self, project_id: int) -> Set[str]:
        """获取已存在的提交ID集合"""
        return self._execute_query(
            "SELECT id FROM commits WHERE project_id = %s",
            (project_id,)
        )

    def save_members_batch(self, members: List[Dict[str, Any]]):
        """批量保存成员数据"""
        fields = [
            'id', 'username', 'name', 'email', 'state', 'created_at',
            'last_activity_at', 'avatar_url', 'web_url', 'is_admin',
            'external', 'organization'
        ]
        
        def prepare_values(member: Dict[str, Any]) -> tuple:
            return (
                member['id'],
                member.get('username', ''),
                member.get('name', ''),
                member.get('email', ''),
                member.get('state', ''),
                self._parse_datetime(member.get('created_at')),
                self._parse_datetime(member.get('last_activity_at')),
                member.get('avatar_url', ''),
                member.get('web_url', ''),
                member.get('is_admin', False),
                member.get('external', False),
                member.get('organization', '')
            )
        
        self._batch_save('members', members, 'id', fields, prepare_values)

    def save_project_members_batch(self, project_id: int, members: List[Dict[str, Any]]):
        """批量保存项目成员关系数据"""
        fields = ['project_id', 'user_id', 'access_level', 'created_at', 'expires_at']
        
        def prepare_values(member: Dict[str, Any]) -> tuple:
            return (
                project_id,
                member['id'],
                member.get('access_level', 0),
                self._parse_datetime(member.get('created_at')),
                self._parse_datetime(member.get('expires_at'))
            )
        
        self._batch_save('project_members', members, 'user_id', fields, prepare_values, project_id=project_id)

    def save_projects_batch(self, projects: List[Dict[str, Any]]):
        """批量保存项目数据"""
        fields = [
            'id', 'name', 'description', 'created_at', 'updated_at',
            'last_activity_at', 'visibility'
        ]
        
        def prepare_values(project: Dict[str, Any]) -> tuple:
            return (
                project['id'],
                project['name'],
                project.get('description', ''),
                self._parse_datetime(project.get('created_at')),
                self._parse_datetime(project.get('updated_at')),
                self._parse_datetime(project.get('last_activity_at')),
                project.get('visibility', '')
            )
        
        self._batch_save('projects', projects, 'id', fields, prepare_values)

    def save_commits_batch(self, commits: List[Dict[str, Any]], project_id: int):
        """批量保存提交数据"""
        fields = [
            'id', 'project_id', 'author_name', 'author_email', 'title', 'message',
            'created_at', 'committed_date', 'stats_additions', 'stats_deletions', 'stats_total'
        ]
        
        def prepare_values(commit: Dict[str, Any]) -> tuple:
            return (
                commit['id'],
                project_id,
                commit.get('author_name', ''),
                commit.get('author_email', ''),
                commit.get('title', ''),
                commit.get('message', ''),
                self._parse_datetime(commit.get('created_at')),
                self._parse_datetime(commit.get('committed_date')),
                commit.get('stats', {}).get('additions', 0),
                commit.get('stats', {}).get('deletions', 0),
                commit.get('stats', {}).get('total', 0)
            )
        
        self._batch_save('commits', commits, 'id', fields, prepare_values, project_id=project_id)

# 飞书数据同步相关函数
def init_table(fields: List[Dict[str, Any]]):
    """初始化飞书多维表对应的数据库表"""
    sync_logger.log_info("开始初始化飞书数据表...")
    
    connection = None
    cursor = None
    try:
        connection = mysql.connector.connect(**DB_CONFIG)
        if connection.is_connected():
            cursor = connection.cursor()
            
            # 创建飞书同步状态表
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS feishu_sync_status (
                    id INT AUTO_INCREMENT PRIMARY KEY,
                    last_sync_time DATETIME NOT NULL,
                    sync_count INT DEFAULT 0,
                    status VARCHAR(50) DEFAULT 'success'
                )
            """)
            
            # 创建飞书数据表
            cursor.execute("SHOW TABLES LIKE 'feishu_data'")
            table_exists = cursor.fetchone()
            
            if not table_exists:
                # 构建字段定义
                columns = [
                    'id INT AUTO_INCREMENT PRIMARY KEY',
                    'record_id VARCHAR(255) NOT NULL UNIQUE',
                    'created_at DATETIME NOT NULL',
                    'updated_at DATETIME NOT NULL'
                ]
                
                for field in fields:
                    field_name = field.get('field_name', '')
                    field_type = field.get('type', 'text')
                    
                    if not field_name:
                        continue
                    
                    # 将飞书字段类型映射到MySQL字段类型
                    mysql_type = "TEXT"  # 默认使用TEXT类型
                    if field_type == 'number':
                        mysql_type = "FLOAT"
                    elif field_type == 'datetime':
                        mysql_type = "DATETIME"
                    elif field_type == 'checkbox':
                        mysql_type = "BOOLEAN"
                    elif field_type == 'text':
                        mysql_type = "TEXT"
                    elif field_type == 'array':  # 处理数组类型
                        mysql_type = "TEXT"
                    
                    columns.append(f"`{field_name}` {mysql_type}")
                
                # 创建表
                cursor.execute(f"""
                    CREATE TABLE feishu_data (
                        {', '.join(columns)}
                    )
                """)
                sync_logger.log_info("创建飞书数据表成功")
            else:
                sync_logger.log_info("飞书数据表已存在，检查并更新字段...")
                
                # 获取现有表结构
                cursor.execute("DESCRIBE feishu_data")
                existing_fields = {row[0].lower() for row in cursor.fetchall()}
                
                # 检查并添加缺失的字段
                if 'record_id' not in existing_fields:
                    cursor.execute("ALTER TABLE feishu_data ADD COLUMN record_id VARCHAR(255) NOT NULL UNIQUE AFTER id")
                    sync_logger.log_info("添加record_id字段成功")
                
                for field in fields:
                    field_name = field.get('field_name', '')
                    field_type = field.get('type', 'text')
                    
                    if not field_name or field_name.lower() in existing_fields:
                        continue
                    
                    # 将飞书字段类型映射到MySQL字段类型
                    mysql_type = "TEXT"  # 默认使用TEXT类型
                    if field_type == 'number':
                        mysql_type = "FLOAT"
                    elif field_type == 'datetime':
                        mysql_type = "DATETIME"
                    elif field_type == 'checkbox':
                        mysql_type = "BOOLEAN"
                    elif field_type == 'text':
                        mysql_type = "TEXT"
                    elif field_type == 'array':  # 处理数组类型
                        mysql_type = "TEXT"
                    
                    # 添加新字段
                    cursor.execute(f"ALTER TABLE feishu_data ADD COLUMN `{field_name}` {mysql_type}")
                
                sync_logger.log_info("飞书数据表字段更新完成")
            
            connection.commit()
            
        sync_logger.log_info("飞书数据表初始化完成")
    except Error as e:
        sync_logger.log_error(f"初始化飞书数据表时发生错误: {e}")
    finally:
        if cursor:
            cursor.close()
        if connection and connection.is_connected():
            connection.close()

def insert_records(records: List[Dict[str, Any]]) -> int:
    """将飞书多维表记录插入数据库"""
    if not records:
        return 0
    
    inserted_count = 0
    connection = None
    cursor = None
    
    try:
        connection = mysql.connector.connect(**DB_CONFIG)
        if connection.is_connected():
            cursor = connection.cursor()
            
            for record in records:
                record_id = record.get('record_id', '')
                fields = record.get('fields', {})
                
                # 检查记录是否已存在
                cursor.execute("SELECT id FROM feishu_data WHERE record_id = %s", (record_id,))
                existing = cursor.fetchone()
                
                now = datetime.now()
                
                if existing:
                    # 更新现有记录
                    update_pairs = []
                    values = []
                    
                    for field_name, field_value in fields.items():
                        # 处理列表类型的值
                        if isinstance(field_value, list):
                            field_value = json.dumps(field_value, ensure_ascii=False)
                        update_pairs.append(f"`{field_name}` = %s")
                        values.append(field_value)
                    
                    update_pairs.append("updated_at = %s")
                    values.append(now)
                    values.append(record_id)  # FOR WHERE CLAUSE
                    
                    sql = f"""
                        UPDATE feishu_data 
                        SET {', '.join(update_pairs)}
                        WHERE record_id = %s
                    """
                    
                    cursor.execute(sql, tuple(values))
                else:
                    # 插入新记录
                    field_names = list(fields.keys())
                    field_names.append('record_id')
                    field_names.append('created_at')
                    field_names.append('updated_at')
                    
                    values = []
                    for field_value in fields.values():
                        # 处理列表类型的值
                        if isinstance(field_value, list):
                            field_value = json.dumps(field_value, ensure_ascii=False)
                        values.append(field_value)
                    
                    values.append(record_id)
                    values.append(now)
                    values.append(now)
                    
                    placeholders = ', '.join(['%s'] * len(field_names))
                    
                    sql = f"""
                        INSERT INTO feishu_data 
                        (`{'`, `'.join(field_names)}`)
                        VALUES ({placeholders})
                    """
                    
                    cursor.execute(sql, tuple(values))
                    inserted_count += 1
            
            connection.commit()
            sync_logger.log_info(f"成功插入/更新 {len(records)} 条飞书数据记录")
    except Error as e:
        sync_logger.log_error(f"插入飞书数据记录时发生错误: {e}")
    finally:
        if cursor:
            cursor.close()
        if connection and connection.is_connected():
            connection.close()
    
    return inserted_count

def get_sync_status() -> Optional[str]:
    """获取最近一次飞书数据同步状态，返回最后同步时间的字符串表示"""
    connection = None
    cursor = None
    try:
        connection = mysql.connector.connect(**DB_CONFIG)
        if connection.is_connected():
            cursor = connection.cursor()
            
            # 创建状态表（如果不存在）
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS feishu_sync_status (
                    id INT AUTO_INCREMENT PRIMARY KEY,
                    last_sync_time DATETIME NOT NULL,
                    sync_count INT DEFAULT 0,
                    status VARCHAR(50) DEFAULT 'success'
                )
            """)
            connection.commit()
            
            # 获取最近同步状态
            cursor.execute("""
                SELECT last_sync_time FROM feishu_sync_status
                ORDER BY last_sync_time DESC LIMIT 1
            """)
            result = cursor.fetchone()
            
            if result:
                return result[0].strftime("%Y-%m-%d %H:%M:%S")
            return None
    except Error as e:
        sync_logger.log_error(f"获取飞书同步状态时发生错误: {e}")
        return None
    finally:
        if cursor:
            cursor.close()
        if connection and connection.is_connected():
            connection.close()

def update_sync_status(status: str = 'success') -> bool:
    """更新飞书数据同步状态"""
    connection = None
    cursor = None
    try:
        connection = mysql.connector.connect(**DB_CONFIG)
        if connection.is_connected():
            cursor = connection.cursor()
            
            now = datetime.now()
            
            # 插入新同步状态
            cursor.execute("""
                INSERT INTO feishu_sync_status (last_sync_time, sync_count, status)
                SELECT %s, IFNULL(MAX(sync_count), 0) + 1, %s
                FROM feishu_sync_status
            """, (now, status))
            
            connection.commit()
            sync_logger.log_info(f"已更新飞书同步状态，时间: {now}")
            return True
    except Error as e:
        sync_logger.log_error(f"更新飞书同步状态时发生错误: {e}")
        return False
    finally:
        if cursor:
            cursor.close()
        if connection and connection.is_connected():
            connection.close() 