import pymysql
import json
import logging
from datetime import datetime
from app import db
from app.models.sync_log import SyncLog, SyncLogDetail
from app.models.alert import Alert
from config import Config

# 获取日志记录器
logger = logging.getLogger(__name__)

# 存储正在运行的任务
from app.services.sync_service import running_tasks

def incremental_sync(task, source, target, sync_log):
    """增量同步"""
    try:
        logger.info(f"开始增量同步任务: {task.name}")
        
        # 解析字段映射
        field_mapping = json.loads(task.field_mapping)
        
        # 连接源数据库
        source_conn = pymysql.connect(
            host=source.host,
            port=source.port,
            user=source.username,
            password=source.password,
            database=source.database
        )
        source_cursor = source_conn.cursor(pymysql.cursors.DictCursor)
        
        # 连接目标数据库
        target_conn = pymysql.connect(
            host=target.host,
            port=target.port,
            user=target.username,
            password=target.password,
            database=target.database
        )
        target_cursor = target_conn.cursor(pymysql.cursors.DictCursor)
        
        # 获取目标表中最大的增量字段值
        try:
            target_cursor.execute(f"SELECT MAX(`{task.incremental_field}`) as max_value FROM `{task.target_table}`")
            result = target_cursor.fetchone()
            last_value = result['max_value'] if result and result['max_value'] is not None else None
        except Exception as e:
            logger.error(f"获取目标表最大增量字段值失败: {str(e)}")
            last_value = None
        
        # 构建增量查询条件
        where_clause = ""
        if last_value is not None:
            where_clause = f"WHERE `{task.incremental_field}` > %s"
        
        # 获取源表增量数据总量
        count_sql = f"SELECT COUNT(*) as count FROM `{task.source_table}` {where_clause}"
        if last_value is not None:
            source_cursor.execute(count_sql, (last_value,))
        else:
            source_cursor.execute(count_sql)
        
        total_count = source_cursor.fetchone()['count']
        
        # 更新日志总记录数
        sync_log.total_records = total_count
        db.session.commit()
        
        # 如果总量为0，直接返回
        if total_count == 0:
            sync_log.status = 'success'
            sync_log.end_time = datetime.now()
            db.session.commit()
            
            logger.info(f"增量同步任务完成: {task.name}, 总记录数: 0")
            
            # 从运行任务中移除
            if task.id in running_tasks:
                del running_tasks[task.id]
            
            return
        
        # 获取源表结构
        source_cursor.execute(f"DESCRIBE `{task.source_table}`")
        source_columns = [column['Field'] for column in source_cursor.fetchall()]
        
        # 获取目标表结构
        try:
            target_cursor.execute(f"DESCRIBE `{task.target_table}`")
            target_exists = True
        except:
            target_exists = False
        
        # 如果目标表不存在，则创建
        if not target_exists:
            # 获取源表创建语句
            source_cursor.execute(f"SHOW CREATE TABLE `{task.source_table}`")
            create_table_sql = source_cursor.fetchone()['Create Table']
            
            # 修改表名
            create_table_sql = create_table_sql.replace(f"`{task.source_table}`", f"`{task.target_table}`")
            
            # 创建目标表
            target_cursor.execute(create_table_sql)
            target_conn.commit()
        
        # 计算每个线程处理的数据量
        thread_count = min(task.thread_count, Config.MAX_THREADS)
        if thread_count <= 0:
            thread_count = 1
        
        batch_size = 1000  # 每批处理的记录数
        
        # 创建线程池
        success_count = [0]  # 使用列表作为可变对象
        failed_count = [0]
        
        # 分批获取数据并同步
        for offset in range(0, total_count, batch_size):
            # 检查是否需要停止
            if task.id in running_tasks and running_tasks[task.id]['stop_flag']:
                logger.info(f"同步任务被停止: {task.name}")
                break
            
            # 获取一批数据
            query_sql = f"SELECT * FROM `{task.source_table}` {where_clause} ORDER BY `{task.incremental_field}` LIMIT {offset}, {batch_size}"
            if last_value is not None:
                source_cursor.execute(query_sql, (last_value,))
            else:
                source_cursor.execute(query_sql)
            
            batch_data = source_cursor.fetchall()
            
            # 如果没有数据，跳过
            if not batch_data:
                continue
            
            # 处理字段映射
            mapped_data = []
            for row in batch_data:
                mapped_row = {}
                for source_field, target_field in field_mapping.items():
                    if source_field in row:
                        mapped_row[target_field] = row[source_field]
                mapped_data.append(mapped_row)
            
            # 构建插入SQL
            if mapped_data:
                target_fields = list(mapped_data[0].keys())
                placeholders = ', '.join(['%s'] * len(target_fields))
                insert_sql = f"INSERT INTO `{task.target_table}` (`{'`, `'.join(target_fields)}`) VALUES ({placeholders})"
                
                # 准备插入数据
                insert_data = []
                for row in mapped_data:
                    row_data = []
                    for field in target_fields:
                        row_data.append(row.get(field))
                    insert_data.append(row_data)
                
                try:
                    # 执行插入
                    target_cursor.executemany(insert_sql, insert_data)
                    target_conn.commit()
                    
                    # 更新成功计数
                    success_count[0] += len(insert_data)
                    
                    # 更新日志
                    sync_log.success_records = success_count[0]
                    db.session.commit()
                except Exception as e:
                    # 更新失败计数
                    failed_count[0] += len(insert_data)
                    
                    # 更新日志
                    sync_log.failed_records = failed_count[0]
                    db.session.commit()
                    
                    logger.error(f"插入数据失败: {str(e)}")
                    
                    # 记录错误详情
                    error_log = SyncLogDetail(
                        log_id=sync_log.id,
                        status='failed',
                        error_message=str(e)
                    )
                    db.session.add(error_log)
                    db.session.commit()
        
        # 关闭连接
        source_cursor.close()
        source_conn.close()
        target_cursor.close()
        target_conn.close()
        
        # 更新同步日志状态
        sync_log.end_time = datetime.now()
        if failed_count[0] > 0:
            sync_log.status = 'failed'
            
            # 创建报警
            if Config.ENABLE_ALERTS:
                alert = Alert(
                    task_id=task.id,
                    type='sync_failed',
                    message=f"增量同步任务 {task.name} 失败，共 {total_count} 条记录，成功 {success_count[0]} 条，失败 {failed_count[0]} 条"
                )
                db.session.add(alert)
        else:
            sync_log.status = 'success'
        
        db.session.commit()
        
        logger.info(f"增量同步任务完成: {task.name}, 总记录数: {total_count}, 成功: {success_count[0]}, 失败: {failed_count[0]}")
        
        # 从运行任务中移除
        if task.id in running_tasks:
            del running_tasks[task.id]
    
    except Exception as e:
        logger.error(f"增量同步任务异常: {str(e)}")
        
        # 更新同步日志状态
        sync_log.status = 'failed'
        sync_log.end_time = datetime.now()
        sync_log.error_message = str(e)
        db.session.commit()
        
        # 创建报警
        if Config.ENABLE_ALERTS:
            alert = Alert(
                task_id=task.id,
                type='sync_error',
                message=f"增量同步任务 {task.name} 发生异常: {str(e)}"
            )
            db.session.add(alert)
            db.session.commit()
        
        # 从运行任务中移除
        if task.id in running_tasks:
            del running_tasks[task.id]