# 导入json模块，用于处理JSON数据
# Import the json module for handling JSON data
import json
# 导入logging模块，用于记录日志
# Import the logging module for logging
import logging
# 导入os模块，用于与操作系统交互
# Import the os module for interacting with the operating system
import os
# 导入time模块，用于处理时间相关的操作
# Import the time module for time-related operations
import time
# 从concurrent.futures模块导入ThreadPoolExecutor，用于创建线程池
# Import ThreadPoolExecutor from the concurrent.futures module to create a thread pool
from concurrent.futures import ThreadPoolExecutor

# 从myapp.biz.dataset.exception导入DatasetImportError异常类
# Import the DatasetImportError exception class from myapp.biz.dataset.exception
from myapp.biz.dataset.exception import DatasetImportError
# 从myapp.biz.dataset.importer导入factory，用于构建导入器
# Import factory from myapp.biz.dataset.importer to build importers
from myapp.biz.dataset.importer import factory
# 从myapp.const.base导入SharedMntDir，表示共享挂载目录
# Import SharedMntDir from myapp.const.base, representing the shared mount directory
from myapp.const.base import SharedMntDir
# 从myapp.const.dataset导入数据集相关的常量
# Import dataset-related constants from myapp.const.dataset
from myapp.const.dataset import EnumDatasetStatus, OriginPathPrefix, MsgKeyEvalDatasetCreated, MsgKeyDatasetImporting
# 从myapp.consumer.app导入app和MsgContext
# Import app and MsgContext from myapp.consumer.app
from myapp.consumer.app import app, MsgContext
# 从myapp.models.model_dataset导入Dataset模型
# Import the Dataset model from myapp.models.model_dataset
from myapp.models.model_dataset import Dataset
# 从myapp.models.model_eval_dataset导入EvalDataset模型
# Import the EvalDataset model from myapp.models.model_eval_dataset
from myapp.models.model_eval_dataset import EvalDataset
# 从myapp.utils.region_storage导入StorageMgrFactory，用于创建存储管理器
# Import StorageMgrFactory from myapp.utils.region_storage to create a storage manager
from myapp.utils.region_storage import StorageMgrFactory
# 从myapp.utils.sess导入session_scope，用于管理数据库会话
# Import session_scope from myapp.utils.sess to manage database sessions
from myapp.utils.sess import session_scope

# 获取一个名为__name__的logger实例
# Get a logger instance named __name__
log = logging.getLogger(__name__)


# 使用app.task装饰器注册一个任务，消息键为MsgKeyDatasetImporting
# Register a task with the app.task decorator, with the message key MsgKeyDatasetImporting
@app.task(MsgKeyDatasetImporting)
def dataset_created(ctx: MsgContext, message):
    # 解析JSON格式的消息
    # Parse the JSON formatted message
    data = json.loads(message)
    # 获取数据集版本ID
    # Get the dataset version ID
    dataset_version_id = data.get('id')
    # 获取用户名
    # Get the username
    username = data.get('username')
    # 获取请求ID
    # Get the request ID
    reqid = data.get('x_request_id')
    # 记录日志
    # Log the information
    logging.info(f"dataset_created version_id {dataset_version_id},{reqid}")

    # 使用session_scope确保数据库会话的正确管理
    # Use session_scope to ensure proper management of the database session
    with session_scope() as session:
        # 查询正在处理中的数据集版本
        # Query for the dataset version that is being processed
        dataset_version = session.query(Dataset).filter(Dataset.id == dataset_version_id,
                                                        Dataset.status == EnumDatasetStatus.processing.value).first()

        # 如果数据集版本不存在，则记录错误并返回
        # If the dataset version does not exist, log an error and return
        if dataset_version is None:
            logging.error(f"数据集版本{dataset_version_id}不存在")
            return

        # 获取数据集的原始路径
        # Get the original path of the dataset
        origin_path = dataset_version.origin_path
        # 获取标签类型
        # Get the label type
        label_type = dataset_version.label_type
        # 获取数据类型
        # Get the data type
        data_type = dataset_version.data_type
        # 获取最后修改者的外键
        # Get the foreign key of the last modifier
        changed_by_fk = dataset_version.changed_by_fk
        # 获取数据集所在区域
        # Get the region of the dataset
        dataset_region = dataset_version.region

    # 构建数据导入器
    # Build the data importer
    importer = factory.build_importer(data_type, label_type)
    # 初始化文件路径列表
    # Initialize the file path list
    files_path = []
    # 遍历原始路径，构建完整的文件路径
    # Iterate through the original paths and construct the full file paths
    for item in origin_path.split(","):
        files_path.append(os.path.join(SharedMntDir, OriginPathPrefix, item))

    # 初始化错误消息
    # Initialize the error message
    err_msg = ''
    # 构建数据集的存储路径
    # Construct the storage path for the dataset
    dataset_path = f"{username}/dataset/{dataset_version_id}/"
    try:
        # 使用线程池执行导入操作
        # Use a thread pool to execute the import operation
        with ThreadPoolExecutor(max_workers=1) as executor:
            # 初始化上一次的进度
            # Initialize the last progress
            last_progress = 0
            # 提交导入任务到线程池
            # Submit the import task to the thread pool
            future = executor.submit(importer.import_to, files_path, os.path.join(SharedMntDir, dataset_path))
            # 循环检查任务状态
            # Loop to check the task status
            while True:
                # 如果任务完成，则跳出循环
                # If the task is done, break the loop
                if future.done():
                    break
                # 每秒检查一次
                # Check every second
                time.sleep(1)
                # 获取当前进度
                # Get the current progress
                progress = int(importer.progress * 100)
                # 如果进度有更新，则更新数据库中的进度
                # If the progress has been updated, update the progress in the database
                if progress > last_progress:
                    with session_scope() as session:
                        session.query(Dataset).filter(Dataset.id == dataset_version_id,
                                                      Dataset.status == EnumDatasetStatus.processing.value).update(
                            {"progress": progress, "changed_by_fk": changed_by_fk})
                        session.commit()
                    # 更新上一次的进度
                    # Update the last progress
                    last_progress = progress

            # 获取任务执行结果
            # Get the result of the task execution
            result = future.result()

    # 捕获数据集导入异常
    # Catch the dataset import exception
    except DatasetImportError as e:
        log.error("数据集导入失败")
        err_msg = e.message
    # 捕获其他异常
    # Catch other exceptions
    except Exception as e:
        log.error("数据集导入失败", exc_info=True)
        err_msg = "系统处理发生错误"

    # 如果区域不是默认区域，则进行同步
    # If the region is not the default region, perform synchronization
    if dataset_region != 'default':
        try:
            # 上传目录到指定区域
            # Upload the directory to the specified region
            StorageMgrFactory.region(dataset_region).upload_dir(
                os.path.join(SharedMntDir, dataset_path),
                f"{username}/dataset/{dataset_version_id}"
            )
        except Exception as e:
            log.error("数据集同步失败", exc_info=True)
            log.error(e, exc_info=True)
            err_msg = "系统处理发生错误（同步）"

    # 如果存在错误消息，则更新数据集状态为失败
    # If there is an error message, update the dataset status to failed
    if len(err_msg) > 0:
        with session_scope() as session:
            session.query(Dataset).filter(Dataset.id == dataset_version_id,
                                          Dataset.status == EnumDatasetStatus.processing.value).update(
                {"status": EnumDatasetStatus.failed.value, "err_msg": err_msg,
                 "changed_by_fk": changed_by_fk})
            session.commit()

        return

    # 判断数据集是否已标注
    # Determine if the dataset is labeled
    labeled = 1 if result.labeled else 0
    # 更新数据集状态为成功
    # Update the dataset status to succeed
    with session_scope() as session:
        session.query(Dataset).filter(Dataset.id == dataset_version_id,
                                      Dataset.status == EnumDatasetStatus.processing.value).update(
            {"status": EnumDatasetStatus.succeed.value, 'dataset_path': dataset_path,
             "err_msg": err_msg, 'entries_num': str(result.num),
             'labeled': labeled,
             "changed_by_fk": changed_by_fk})

        session.commit()


# 使用app.task装饰器注册一个任务，消息键为MsgKeyEvalDatasetCreated
# Register a task with the app.task decorator, with the message key MsgKeyEvalDatasetCreated
@app.task(MsgKeyEvalDatasetCreated)
def eval_dataset_created(ctx: MsgContext, message):
    # 解析JSON格式的消息
    # Parse the JSON formatted message
    data = json.loads(message)
    # 获取数据集ID
    # Get the dataset ID
    dataset_id = data.get('id')
    # 获取用户名
    # Get the username
    username = data.get('username')

    # 使用session_scope确保数据库会话的正确管理
    # Use session_scope to ensure proper management of the database session
    with session_scope() as session:
        # 查询正在处理中的评估数据集
        # Query for the evaluation dataset that is being processed
        dataset = session.query(EvalDataset).filter(EvalDataset.id == dataset_id,
                                                    EvalDataset.status == EnumDatasetStatus.processing.value).first()

        # 如果数据集不存在，则记录错误并返回
        # If the dataset does not exist, log an error and return
        if dataset is None:
            log.error(f"数据集版本{dataset_id}不存在")
            return

        # 获取数据集的原始路径
        # Get the original path of the dataset
        origin_path = dataset.origin_path
        # 获取标签类型
        # Get the label type
        label_type = dataset.label_type
        # 获取数据类型
        # Get the data type
        data_type = dataset.data_type
        # 获取最后修改者的外键
        # Get the foreign key of the last modifier
        changed_by_fk = dataset.changed_by_fk

    # 构建数据导入器
    # Build the data importer
    importer = factory.build_importer(data_type, label_type)
    # 初始化文件路径列表
    # Initialize the file path list
    files_path = []
    # 遍历原始路径，构建完整的文件路径
    # Iterate through the original paths and construct the full file paths
    for item in origin_path.split(","):
        files_path.append(os.path.join(SharedMntDir, OriginPathPrefix, item))

    # 初始化错误消息
    # Initialize the error message
    err_msg = ''
    # 构建评估数据集的存储路径
    # Construct the storage path for the evaluation dataset
    dataset_path = f"{username}/eval_dataset/{dataset_id}/"
    try:
        # 使用线程池执行导入操作
        # Use a thread pool to execute the import operation
        with ThreadPoolExecutor(max_workers=1) as executor:
            # 初始化上一次的进度
            # Initialize the last progress
            last_progress = 0
            # 提交导入任务到线程池
            # Submit the import task to the thread pool
            future = executor.submit(importer.import_to, files_path, os.path.join(SharedMntDir, dataset_path))
            # 循环检查任务状态
            # Loop to check the task status
            while True:
                # 如果任务完成，则跳出循环
                # If the task is done, break the loop
                if future.done():
                    break
                # 每秒检查一次
                # Check every second
                time.sleep(1)
                # 获取当前进度
                # Get the current progress
                progress = int(importer.progress * 100)
                # 如果进度有更新，则更新数据库中的进度
                # If the progress has been updated, update the progress in the database
                if progress > last_progress:
                    with session_scope() as session:
                        session.query(EvalDataset).filter(EvalDataset.id == dataset_id,
                                                      EvalDataset.status == EnumDatasetStatus.processing.value).update(
                            {"progress": progress, "changed_by_fk": changed_by_fk})
                        session.commit()
                    # 更新上一次的进度
                    # Update the last progress
                    last_progress = progress

            # 获取任务执行结果
            # Get the result of the task execution
            result = future.result()
    # 捕获数据集导入异常
    # Catch the dataset import exception
    except DatasetImportError as e:
        log.error("数据集导入失败")
        err_msg = e.message
    # 捕获其他异常
    # Catch other exceptions
    except Exception as e:
        log.error("数据集导入失败", exc_info=True)
        err_msg = "系统处理发生错误"

    # 如果存在错误消息，则更新数据集状态为失败
    # If there is an error message, update the dataset status to failed
    if len(err_msg) > 0:
        with session_scope() as session:
            session.query(EvalDataset).filter(EvalDataset.id == dataset_id,
                                              EvalDataset.status == EnumDatasetStatus.processing.value).update(
                {"status": EnumDatasetStatus.failed.value, "err_msg": err_msg,
                 "changed_by_fk": changed_by_fk})
            session.commit()

        return

    # 更新数据集状态为成功
    # Update the dataset status to succeed
    with session_scope() as session:
        session.query(EvalDataset).filter(EvalDataset.id == dataset_id,
                                          EvalDataset.status == EnumDatasetStatus.processing.value).update(
            {"status": EnumDatasetStatus.succeed.value, 'dataset_path': dataset_path,
             "err_msg": err_msg, 'entries_num': result.num,
             "changed_by_fk": changed_by_fk})

        session.commit()