# app1/tasks.py
from celery import shared_task
from celery.utils.log import get_task_logger

from app1.utils.helmet_shibie import HelmetShibie
from app1.models import ProcessingTask
from django.core.files import File
import os
from app1.serializer import ShibieSerializer
import logging

@shared_task
def process_video(task_id, file_path):
    # 配置日志输出到控制台
    logger = get_task_logger(__name__)
    logger.handlers = []  # 清空现有处理器
    logger.addHandler(logging.StreamHandler())  # 添加控制台处理器
    logger.setLevel(logging.INFO)

    print(f"任务开始，task_id: {task_id}, file_path: {file_path}")
    logger.info(f"任务开始，task_id: {task_id}, file_path: {file_path}")
    task = ProcessingTask.objects.get(task_id=task_id)
    logger.info(f"任务获取成功: {task_id}")

    try:
        logger.info(f"开始处理任务 {task_id}，文件路径: {file_path}")
        with open(file_path, 'rb') as f:
            # print(f)
            file = File(f)
            # print(file)
            shibie_duixiang = HelmetShibie(file, task)
            result = shibie_duixiang.shibie()
            print(result)
            ser = ShibieSerializer(data=result)
            if ser.is_valid():
                ser.save()
                task.status = 'completed'
                task.result = result
                logger.info(f"任务 {task_id} 完成，结果已保存")
            else:
                logger.error(f"序列化失败: {ser.errors}")
                task.status = 'failed1'
                task.error = str(ser.errors)
    except Exception as e:
        logger.error(f"任务 {task_id} 异常: {str(e)}", exc_info=True)  # 关键：记录堆栈信息
        task.status = 'failed2'
        task.error = str(e)
    finally:
        task.save()
        if os.path.exists(file_path):
            os.remove(file_path)  # 删除临时文件