#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2023/12/6 8:27
# @Author  : FuKai ZHANG
# @File    : loader_case.py.py
# @Description :加载所有测试用例到用例池
from concurrent.futures import ThreadPoolExecutor, as_completed
from config.setting import ensure_path_sep
from utils.file_operation.get_case_yaml_analysis import CaseData
from utils.file_operation.get_all_file_path import get_all_files
from utils.logging_tools.log_control import INFO,ERROR
from config.global_config import config
from utils.cache_tools.cache_control import CacheHandler




def process_file(file_path):
    """
    处理单个文件：解析用例数据并写入缓存
    """
    try:
        # 解析文件数据
        case_data_list = CaseData(file_path).case_data_analysis()

        # 将解析后的数据写入缓存
        for case in case_data_list:
            for case_id, case_data in case.items():
                CacheHandler.set_case_data_cache(name=case_id, value=case_data)
    except Exception as e:
        ERROR.error(f"处理文件 {file_path} 时发生错误: {e}")


def load_all_case_data_to_cache():
    """
    加载所有测试用例到用例池（多线程实现）
    如果一个文件中有错误，那么该文件的所有用例都不会加载
    """
    # 获取所有文件路径
    file_paths = get_all_files(file_path=ensure_path_sep("\\test_case_data"), yaml_data_switch=True)
    INFO.info(f"发现的测试用例文件数: {len(file_paths)}")
    if config.Multi_threaded_load:
        # 多线程加载
        INFO.info("启用多线程加载模式")
        max_workers = 10  # 设置最大线程数
        with ThreadPoolExecutor(max_workers=max_workers) as executor:
            futures = [executor.submit(process_file, file_path) for file_path in file_paths]

            # 等待所有任务完成
            for future in as_completed(futures):
                try:
                    future.result()  # 捕获任务中的异常
                except Exception as e:
                    ERROR.error(f"线程任务执行失败: {e}")
        INFO.info("所有测试用例已加载到缓存池！")
    else:
        # 单线程加载
        INFO.info("启用单线程加载模式")
        for file_path in file_paths:
            process_file(file_path)
        INFO.info("所有测试用例已加载到缓存池！")


if __name__ == '__main__':
    load_all_case_data_to_cache()
