


import json
import shutil
import time
import uuid
from loguru import logger
from clients import GerritClient, DifyClient
from config import TEMP_FILE_DIR
from parser import CodeChangeParser
class WorkflowExecutor:
    def __init__(self, dify_client:DifyClient, gerrit_client:GerritClient):
        self.dify_client = dify_client
        self.gerrit_client = gerrit_client
        self.code_change_parser = CodeChangeParser()
    def _merge_results(self, results, sub_results):
        """
        合并结果
        
        Args:
            results (dict): 主结果
            sub_results (dict): 子结果
        """
        # 浅拷贝，备份
        backup = results.copy()
        try:
            logger.info(f'sub_results : {sub_results}')
            # 合并检测结果
            for key, value in sub_results.items():
                if not isinstance(value, list):
                    continue
                # 若key首次出现，先置空，再extend追加
                results.setdefault(key, []).extend(value)
        # 异常回滚
        except Exception as e:
            logger.error(f'merge sub_results error {e}')
            results.clear()
            results.update(backup)

    def _safe_rmtree(self, path):
        try:
            shutil.rmtree(path)
        except Exception as e:
            logger.error(f"shutil.rmtree error: {e}, dir: {path}")
    def submit_workflow_run(self, change_id, options):
        """
        提交工作流运行
        
        Args:
            change_id (str): 变更ID
            options (dict): 选项参数
            
        Returns:
            dict: 执行结果
        """
        logger.info(f"options: {options}")
        # 根据change_id获取patch
        patch = self.gerrit_client.get_change_patch(change_id)
        if patch is None:
            logger.error("patch is null")
            return {'success': False, 'message': "None", 'result': None}
        
        # 获取patch的修改内容, 进行分段
        req_list = self.code_change_parser.parse_code_change(patch, change_id, self.gerrit_client)

        # 请求列表为NULL，说明异常
        if req_list is None:
            return {'success': False, 'message': "None", 'result': None}

        # 如果请求列表为空
        if not req_list:
            logger.info("nothing to analyse")
            return {'success': True, 'message': "nothing to analyse", 'result': None}
        
        # 最终检测结果汇总容器
        check_result = dict()

        # 对列表中的元素，依次向工作流发起请求进行代码检测
        for res, new_header_flag in req_list:

            # 构造本次专属结果目录（时间戳+change_id+uuid 防重名）
            result_dir = f'{int(time.time())}_{change_id}_{uuid.uuid4().hex}'
            result_path = TEMP_FILE_DIR / result_dir

            # 防御式检查：理论上不会重复，但再确认一次
            if result_path.exists():
                logger.error(f'result_path: {result_path} exists')
                continue
            result_path.mkdir()
            result_path.chmod(0o777)

            # 参数补充
            options['new_h_flag'] = 1 if new_header_flag else 0
            options['result_dir'] = result_dir

            # 工作流调用，阻塞直至完成
            output = self.dify_client.workflow_run(res, options)

            # 工作流返回异常处理
            if output is None:
                logger.error(f"workflow_run return:{output}")
                self._safe_rmtree(result_path)
                return {'success': True, 'message': "workflow return error", 'result': None}
            if output == 'workflow time out':
                self._safe_rmtree(result_path)
                return {'success': False, 'message': "workflow time out"}
            
            # 处理临时目录中所有文件
            result_files = [p for p in result_path.iterdir() if p.is_file()]
            # 记录当前请求的结果容器
            single_result = dict()
            try:
                for file in result_files:
                    node = json.loads(file.read_text())
                    logger.info(node)
                    # 汇总到当前结果容器
                    self._merge_results(single_result, node)
                # 汇总到总结果容器    
                self._merge_results(check_result, single_result)
            except Exception as e:
                logger.error(f"merge error: {e}")
            # 清理临时目录
            self._safe_rmtree(result_path)
        if not check_result:
            return {'success': True, 'message': "There is no bug in this change.", 'result': None}
        return {'success': True, 'result': check_result}


class GerritDetectExecutor():
    def __init__(self):
        pass