import logging
import time
import threading
from datetime import datetime
from src.models.models import db, DownloadTask, DownloadQueue, get_beijing_time
from src.services.xunlei_nas_downloader import XunleiNasDownloader
from src.services.queue_processor import process_queue_immediately

# 配置日志
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# 全局下载器实例
xunlei_downloader = None

def get_xunlei_downloader():
    """获取迅雷下载器实例（每次都新建，确保credential为最新）"""
    return XunleiNasDownloader()

def check_duplicate_task(subscription_id, url, exclude_task_id=None):
    """检查是否存在重复的下载任务"""
    query = DownloadTask.query.filter_by(
        subscription_id=subscription_id,
        url=url
    )
    
    # 如果指定了排除的任务ID，则排除该任务
    if exclude_task_id is not None:
        query = query.filter(DownloadTask.id != exclude_task_id)
    
    existing_task = query.first()
    return existing_task is not None

def download_task(task, skip_duplicate_check=False):
    """
    执行单个下载任务
    """
    try:
        logger.info(f"开始执行下载任务: {task.id}")
        
        # 检查任务重复（除非跳过检查）
        if not skip_duplicate_check and check_duplicate_task(task.subscription_id, task.url, exclude_task_id=task.id):
            task.status = 'task_duplicate'
            task.updated_at = get_beijing_time()
            db.session.commit()
            logger.info(f"下载任务 {task.id} 重复，跳过")
            return {
                'success': True,
                'status': 'task_duplicate',
                'message': '任务重复'
            }
        
        # 获取迅雷下载器
        downloader = get_xunlei_downloader()
        
        # 调用迅雷NAS API进行下载
        result = downloader.process_download(task.url, task.parent_folder_id)
        
        # 更新任务状态
        task.status = result['status']
        task.updated_at = get_beijing_time()
        db.session.commit()
        
        if result['success']:
            logger.info(f"下载任务 {task.id} 创建成功")
        else:
            logger.error(f"下载任务 {task.id} 创建失败: {result['message']}")
        
        return result
        
    except Exception as e:
        logger.error(f"执行下载任务失败: {str(e)}")
        
        # 更新任务状态为失败
        try:
            task.status = 'created_failed'
            task.updated_at = get_beijing_time()
            db.session.commit()
        except:
            pass
        
        return {
            'success': False,
            'status': 'created_failed',
            'message': f'执行失败: {str(e)}'
        }

def batch_download_tasks(tasks):
    """
    批量执行下载任务
    """
    try:
        results = []
        
        for task in tasks:
            result = download_task(task)
            results.append({
                'task_id': task.id,
                'result': result
            })
        
        logger.info(f"批量下载完成，处理了{len(tasks)}个任务")
        
        return {
            'success': True,
            'data': {
                'processed_count': len(tasks),
                'results': results
            },
            'message': f'批量下载完成，处理了{len(tasks)}个任务'
        }
        
    except Exception as e:
        logger.error(f"批量下载失败: {str(e)}")
        return {
            'success': False,
            'message': f'批量下载失败: {str(e)}'
        }

def add_to_download_queue(task_id, priority=0):
    """将任务添加到下载队列"""
    try:
        # 检查是否已在队列中
        existing_queue_item = DownloadQueue.query.filter_by(task_id=task_id).first()
        if existing_queue_item:
            # 如果队列记录存在但状态不是queued，重新激活它
            if existing_queue_item.status != 'queued':
                existing_queue_item.status = 'queued'
                existing_queue_item.retry_count = 0  # 重置重试次数
                existing_queue_item.error_message = None  # 清除错误信息
                existing_queue_item.updated_at = get_beijing_time()
                db.session.commit()
                logger.info(f"重新激活队列中的任务 {task_id}，状态: {existing_queue_item.status}")
            else:
                logger.info(f"任务 {task_id} 已在下载队列中")
            return True
        
        # 添加到队列
        queue_item = DownloadQueue(
            task_id=task_id,  # type: ignore
            priority=priority,  # type: ignore    
            status='queued'  # type: ignore
        )
        db.session.add(queue_item)
        db.session.commit()
        
        logger.info(f"任务 {task_id} 已添加到下载队列")
        return True
        
    except Exception as e:
        logger.error(f"添加任务到队列失败: {str(e)}")
        db.session.rollback()
        return False

def process_download_queue():
    """处理下载队列 - 队列消费机制"""
    try:
        # 使用队列处理器的立即处理函数，避免锁冲突
        return process_queue_immediately()
        
    except Exception as e:
        logger.error(f"处理下载队列失败: {str(e)}")
        return 0

def create_download_tasks(subscription, urls):
    """
    为订阅创建下载任务
    """
    try:
        tasks = []
        duplicate_count = 0
        
        for url in urls:
            # 检查重复任务
            if check_duplicate_task(subscription.id, url):
                duplicate_count += 1
                logger.info(f"跳过重复任务: {url}")
                # 新增：记录重复任务到DownloadTask表
                duplicate_task = DownloadTask(
                    subscription_id=subscription.id,  # type: ignore
                    name=subscription.name,  # type: ignore
                    url=url,  # type: ignore
                    directory=subscription.download_directory or '/downloads',  # type: ignore
                    parent_folder_id=subscription.parent_folder_id,  # type: ignore
                    status='task_duplicate'  # type: ignore
                )
                db.session.add(duplicate_task)
                continue
            
            # 创建新任务
            task = DownloadTask(  # type: ignore
                subscription_id=subscription.id,  # type: ignore
                name=subscription.name,  # type: ignore
                url=url,  # type: ignore
                directory=subscription.download_directory or '/downloads',  # type: ignore
                parent_folder_id=subscription.parent_folder_id,  # type: ignore
                status='waiting_create'  # type: ignore
            )
            tasks.append(task)
            db.session.add(task)
        
        db.session.commit()
        
        logger.info(f"为订阅 {subscription.name} 创建了 {len(tasks)} 个下载任务，跳过 {duplicate_count} 个重复任务")
        
        # 如果开启了自动下载，添加到下载队列
        if subscription.auto_download:
            for task in tasks:
                add_to_download_queue(task.id)
        
        return {
            'success': True,
            'data': {
                'created_count': len(tasks),
                'duplicate_count': duplicate_count,
                'task_ids': [task.id for task in tasks]
            },
            'message': f'创建了{len(tasks)}个下载任务，跳过{duplicate_count}个重复任务'
        }
        
    except Exception as e:
        logger.error(f"创建下载任务失败: {str(e)}")
        db.session.rollback()
        return {
            'success': False,
            'message': f'创建下载任务失败: {str(e)}'
        }

def retry_download_task(task_id):
    """
    重新下载任务
    """
    try:
        task = DownloadTask.query.get(task_id)
        if not task:
            return {
                'success': False,
                'message': '下载任务不存在'
            }
        
        # 重置任务状态
        task.status = 'waiting_create'
        task.updated_at = get_beijing_time()
        db.session.commit()
        
        logger.info(f"开始重新下载任务 {task_id}")
        
        # 直接执行下载任务，跳过重复检查
        result = download_task(task, skip_duplicate_check=True)
        
        logger.info(f"重新下载任务 {task_id} 完成，结果: {result}")
        
        return {
            'success': result['success'],
            'message': result['message']
        }
        
    except Exception as e:
        logger.error(f"重新下载任务失败: {str(e)}")
        return {
            'success': False,
            'message': f'重新下载任务失败: {str(e)}'
        }

def batch_retry_download_tasks(task_ids):
    """
    批量重新下载任务
    """
    try:
        results = []
        
        for task_id in task_ids:
            result = retry_download_task(task_id)
            results.append({
                'task_id': task_id,
                'result': result
            })
        
        # 批量重新下载完成后，再次处理队列确保所有任务都被执行
        processed_count = process_queue_immediately()
        
        logger.info(f"批量重新下载完成，处理了{len(task_ids)}个任务，队列处理了{processed_count}个任务")
        
        return {
            'success': True,
            'data': {
                'processed_count': len(task_ids),
                'queue_processed_count': processed_count,
                'results': results
            },
            'message': f'批量重新下载完成，处理了{len(task_ids)}个任务，队列处理了{processed_count}个任务'
        }
        
    except Exception as e:
        logger.error(f"批量重新下载失败: {str(e)}")
        return {
            'success': False,
            'message': f'批量重新下载失败: {str(e)}'
        }

