import json
import logging
from typing import Any, Generic, Optional, TypeVar, List

from fastapi import FastAPI, Query
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel

from app.config import config
from calc_manager.algorithm.algo import (
    Algorithm, AlgorithmInfo, OperatorComponentToOperatorInfoAdapter)
from calc_manager.algorithm.graph import AlgoGraph
from calc_manager.algorithm.param import ParameterMappingService
from calc_manager.file import FileInfo, FileOperatorHelper
from calc_manager.operator import Operator, OperatorInfo
from calc_manager.base.fields import FieldInfo, STYLE_SELECT
from calc_manager.repo.sh_res_repo import (ShDcAlgoRepository,
                                           ShDcOperatorRepository)
from calc_manager.res_helper import ResourceHelper
import calc_manager
from calc_manager.ray.ray_manager import RayManager
from calc_manager.ray.remote_task import RayAlgoOperatorTaskFactory, RayAlgoSingleTaskFactory
from calc_manager.executor.exec import build_algo_tasks, build_algo_single_task
#from calc_manager.ray.ray_client_job import get_ray_task_factory_from_pickle_b64, get_ray_task_factory_to_pickle_b64
import traceback
from calc_manager.base.plugins import PluginInfo
from calc_manager.plugins.plugins_manager import PluginManager
from calc_manager.base.fields import REQUIRED_KEY, OPTIONAL_KEY
from app.task_manager import TaskManager, TaskStatus
import signal
import sys
import asyncio
import time

#TODO: check log
from calc_manager.res_helper.bdf_op2_structure_helper import BDFOP2Helper

logger = logging.getLogger(__name__)
logging.basicConfig(
    level=logging.DEBUG,
    format='%(asctime)s.%(msecs)03d - %(name)s - %(levelname)s - %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S',
    handlers=[
        logging.StreamHandler(sys.stdout),
        #logging.FileHandler('app.log', encoding='utf-8')
    ]
)

# Optional: Set specific log levels for noisy libraries
#logging.getLogger('urllib3').setLevel(logging.WARNING)
#logging.getLogger('fastapi').setLevel(logging.INFO)

T = TypeVar("T")

import os
res_uri = config.res_endpoint # os.environ.get('RES_URI', "http://osdev.xuelangyun.com:30080/dc-res-backend/")
ray_endpoint = config.ray_endpoint # os.environ.get('RAY_ENDPOINT', "ray://10.130.9.3:10001")
bdf_op2_structure_uri = config.bdf_op2_endpoint

#only one worker
task_manager = TaskManager(res_uri=res_uri, max_workers=1)

app = FastAPI()

# Add CORS middleware configuration
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # Allows all origins
    allow_credentials=True,
    allow_methods=["*"],  # Allows all methods
    allow_headers=["*"],  # Allows all headers
)

## plugin register area
plugin_info = PluginInfo(
    plugin_name="csv_inner",
    plugin_class_name='CsvPlugin',
    plugin_module_name='calc_manager.plugins_inner.csv_inner',
    plugin_class_param_name='csv_plugin',
    plugin_description='csv test',
    plugin_fields={
        "field1": FieldInfo(name="osspath", type="str", optional_field=REQUIRED_KEY, default_val=None, description="osspath"),
    }
)
plugin_bdf_reader = PluginInfo(
    plugin_name="bdf_reader_inner",
    plugin_class_name='BDFReader',
    plugin_module_name='calc_manager.plugins_inner.bdf_reader_inner',
    plugin_class_param_name='bdf_reader',
    plugin_description='bdf reader',
    plugin_fields={
        "field1": FieldInfo(name="oss_url_list", type="list[str]", optional_field=REQUIRED_KEY, default_val=None, description="osspath list"),
        "field2": FieldInfo(name="entry_file_name", type="str", optional_field=REQUIRED_KEY, default_val=None, description="entry file name"),
        "field3": FieldInfo(name="local_path", type="str", optional_field=OPTIONAL_KEY, default_val=None, description="osspath"),
    }
)
plugin_op2_reader = PluginInfo(
    plugin_name="op2_reader_inner",
    plugin_class_name='OP2Reader',
    plugin_module_name='calc_manager.plugins_inner.op2_reader_inner',
    plugin_class_param_name='op2_reader',
    plugin_description='op2 reader',
    plugin_fields={
        "field1": FieldInfo(name="oss_url", type="str", optional_field=REQUIRED_KEY, default_val=None, description="osspath"),
        "field2": FieldInfo(name="local_path", type="str", optional_field=OPTIONAL_KEY, default_val=None, description="osspath"),
    }
)
plugin_mat_csv_reader = PluginInfo(
    plugin_name="mat_csv_inner",
    plugin_class_name='MatCSVReader',
    plugin_module_name='calc_manager.plugins_inner.mat_csv_inner',
    plugin_class_param_name='mat_csv_reader',
    plugin_description='mat csv reader',
    plugin_fields={
        "field1": FieldInfo(name="oss_url", type="str", optional_field=REQUIRED_KEY, default_val=None, description="osspath"),
        "field2": FieldInfo(name="local_path", type="str", optional_field=OPTIONAL_KEY, default_val=None, description="osspath"),
    }
)

plugin_bdf_transformer = PluginInfo(
    plugin_name="bdf_transformer_inner",
    plugin_class_name='BDFTransformer',
    plugin_module_name='calc_manager.plugins_inner.bdf_transformer_inner',
    plugin_class_param_name='bdf_transformer',
    plugin_description='bdf transforeme ',
    plugin_fields={
        "field1": FieldInfo(name="oss_url", type="str", optional_field=REQUIRED_KEY, default_val=None, description="osspath"),
        "field2": FieldInfo(name="local_path", type="str", optional_field=OPTIONAL_KEY, default_val=None, description="osspath"),
    }
)

plugin_material = PluginInfo(
    plugin_name="material_plugin",
    plugin_class_name='MaterialPlugin',
    plugin_module_name='calc_manager.plugins_inner.material_inner',
    plugin_class_param_name='material_plugin',
    plugin_description='material plugin ',
    plugin_fields={}
)

plugin_storage = PluginInfo(
    plugin_name="storage_plugin",
    plugin_class_name='StoragePlugin',
    plugin_module_name='calc_manager.plugins_inner.storage_plugin',
    plugin_class_param_name='storage_plugin',
    plugin_description='storage plugin ',
    plugin_fields={}
)

plugin_bdf = PluginInfo(
    plugin_name="bdf_plugin",
    plugin_class_name='BDFPlugin',
    plugin_module_name='calc_manager.plugins_inner.bdf_inner',
    plugin_class_param_name='bdf_plugin',
    plugin_description='bdf plugin ',
    plugin_fields={}
)

plugin_op2 = PluginInfo(
    plugin_name="op2_plugin",
    plugin_class_name='OP2Plugin',
    plugin_module_name='calc_manager.plugins_inner.op2_inner',
    plugin_class_param_name='op2_plugin',
    plugin_description='op2 plugin ',
    plugin_fields={}
)
## plugin register area


pm = PluginManager()
pm.register_plugin(plugin_info)
pm.register_plugin(plugin_bdf_reader)
pm.register_plugin(plugin_op2_reader)
pm.register_plugin(plugin_mat_csv_reader)
pm.register_plugin(plugin_bdf_transformer)
pm.register_plugin(plugin_material)
pm.register_plugin(plugin_storage)
pm.register_plugin(plugin_bdf)
pm.register_plugin(plugin_op2)

class BaseResponse(BaseModel, Generic[T]):
    success: bool
    message: str
    data: Optional[T] = None


class OperatorData(BaseModel):
    operator_id: str
    params: dict
    result: dict


@app.get("/operators/{operatorid}/params")
async def get_operator(operatorid: str):
    """
    Retrieve the parameters and results of a specific operator by its ID.

    Args:
        operatorid (str): The unique identifier of the operator.

    Returns:
        BaseResponse[OperatorData]: A response containing:
            - success (bool): Operation success status
            - message (str): Description of the operation result
            - data (Optional[OperatorData]): The operator data containing:
                - operator_id (str): ID of the operator
                - params (dict): Operator parameters
                - result (dict): Operator results

    Raises:
        Exception: If there's an error fetching or processing the operator data
    """
    op_repo = ShDcOperatorRepository(res_uri, FileOperatorHelper.extract_path)
    try:
        op_info = op_repo.get_op_by_id(operatorid)
        if not op_info:
            return BaseResponse[OperatorData](
                success=False, message="Operator not found.", data=None
            )
    except Exception as e:
        return BaseResponse[OperatorData](success=False, message=str(e), data=None)

    try:
        o = Operator(op_info)
        params = o.extract_params()
        result = o.extract_result()
        operator_data = OperatorData(
            operator_id=operatorid, params=params, result=result
        )

    except Exception as e:
        logger.error("error in get_operator : %s", e)
        return BaseResponse[OperatorData](success=False, message=str(e), data=None)

    return BaseResponse[OperatorData](
        success=True,
        message="Operator details retrieved successfully.",
        data=operator_data,
    )


@app.get("/algos/{algoid}/params")
async def get_algo_params(algoid: str, check_mapping: Optional[bool] = Query(False)):
    """
    Retrieve the parameters and results of a specific algorithm by its ID.

    This endpoint fetches the algorithm information from the ShDcAlgoRepository,
    constructs the algorithm graph, and extracts all parameters and results from
    its constituent operators.

    Args:
        algoid (str): The unique identifier of the algorithm.
        check_mapping (Optional[bool]): If True, performs additional checks on 
                                        mapping filter types. Defaults to False.

    Returns:
        BaseResponse[str]: A response object containing:
            - success (bool): Indicates if the operation was successful.
            - message (str): A descriptive message about the operation result.
            - data (Optional[str]): A JSON string containing all parameters and 
                                    results of the algorithm's operators.

    Raises:
        Exception: If there's an error fetching the algorithm or processing its data.
    """
    algo_repo = ShDcAlgoRepository(res_uri)
    try:
        algo_info = algo_repo.get_algo_by_id(algoid)
        if not algo_info:
            return BaseResponse[OperatorData](
                success=False, message="Operator not found.", data=None
            )
    except Exception as e:
        return BaseResponse[OperatorData](success=False, message=str(e), data=None)

    try:
        algo_graph = AlgoGraph(algo_info.graph_json, "start", "end")
        op_repo = ShDcOperatorRepository(res_uri, FileOperatorHelper.extract_path)
        pms = ParameterMappingService([])  # no need to use mapping
        a = Algorithm(
            algo_info,
            OperatorComponentToOperatorInfoAdapter(op_repo),
            pms,
            algo_graph,
            **{},
        )
        a.set_plugin_manager(pm)

        op_dict = a.get_all_operators_by_id()
        #check mapping 
        if check_mapping:
            a.check_mapping_filter_types(op_dict)
            
        ret = json.dumps(a.get_all_params_results(op_dict), default=custom_serializer)

    except Exception as e:
        logger.error("error in get_algo_params : %s", e)
        return BaseResponse[str](success=False, message=str(e), data=None)

    return BaseResponse[str](
        success=True, message="Operator details retrieved successfully.", data=ret
    )


@app.get("/algos/{algoid}/invokeparams")
async def get_algo_invoke_params(algoid: str, check_mapping: Optional[bool] = Query(False)):
    """
    Retrieve the input parameters required to invoke a specific algorithm by its ID.

    This endpoint fetches the algorithm information, constructs its graph, and 
    determines the input parameters needed to invoke the algorithm. It can optionally
    perform mapping filter type checks.

    Args:
        algoid (str): The unique identifier of the algorithm.
        check_mapping (Optional[bool]): If True, performs additional checks on 
                                        mapping filter types. Defaults to False.

    Returns:
        BaseResponse[str]: A response object containing:
            - success (bool): Indicates if the operation was successful.
            - message (str): A descriptive message about the operation result.
            - data (Optional[str]): A JSON string containing the input parameters 
                                    required to invoke the algorithm.

    Raises:
        Exception: If there's an error fetching the algorithm or processing its data.
    """
    algo_repo = ShDcAlgoRepository(res_uri)
    try:
        algo_info = algo_repo.get_algo_by_id(algoid)
        if not algo_info:
            return BaseResponse[OperatorData](
                success=False, message="Operator not found.", data=None
            )
    except Exception as e:
        return BaseResponse[OperatorData](success=False, message=str(e), data=None)

    try:
        algo_graph = AlgoGraph(algo_info.graph_json, "start", "end")
        op_repo = ShDcOperatorRepository(res_uri, FileOperatorHelper.extract_path)
        pms = ParameterMappingService([])  # no need to use mapping
        a = Algorithm(
            algo_info,
            OperatorComponentToOperatorInfoAdapter(op_repo),
            pms,
            algo_graph,
            **{},
        )
        a.set_plugin_manager(pm)

        #params = a._get_all_params(a.get_all_operators_by_id())
        #res = Algorithm.get_algo_input_params_without_mapping(params)
        check = False
        if check_mapping:
            op_dict = a.get_all_operators_by_id()
            a.check_mapping_filter_types(op_dict)
            check = True
        else:
            check=False

        op_dict = a.get_all_operators_by_id()
        param_res = a.get_algo_input_params_with_mapping_filter(op_dict, check)
        param_res = build_param_input_style(param_res)
        plugin_res = a.get_algo_plugin_params(op_dict)
        ret_dict = {"params": param_res, "plugin": plugin_res}
        ret = json.dumps(ret_dict, default=custom_serializer)
        logger.info("get_algo_invoke_params : %s", ret)

    except Exception as e:
        error_message = f"Error in invoke params: {str(e)}"
        logger.error("%s\n%s", error_message, traceback.format_exc())
        return BaseResponse[str](success=False, message=error_message, data=None)

    return BaseResponse[str](
        success=True, message="Operator details retrieved successfully.", data=ret
    )


def build_param_input_style(params: List[FieldInfo]):
    bdf_op2_helper = BDFOP2Helper(bdf_op2_structure_uri)
    for param in params:
        if param.name.startswith('bdf_model'):
            param.style = STYLE_SELECT
            option_data = []
            for data in bdf_op2_helper.get_bdf_info():
                option_data.append({
                    "id": data['id'],
                    "name": data['model_name'],
                    "value": data['model_fp'],
                    "oss_path": data['oss_path']
                })
            param.option_data = option_data
        elif param.name.startswith('op2_fp'):
            param.style = STYLE_SELECT
            option_data = []
            for data in bdf_op2_helper.get_op2_info():
                option_data.append({
                    "id": data['id'],
                    "name": data['file_name'],
                    "value": data['fp'],
                    "oss_path": data['oss_path']
                })
            param.option_data = option_data
    return params


class AlgoInvoke(BaseModel):
    params: Any
    types_check: Optional[bool] = False


@app.post("/algos/{algoid}/invoke")
async def algo_invoke(algoid: str, request: AlgoInvoke):
    """
    Asynchronously invoke an algorithm with the provided parameters.

    Args:
        algoid (str): The unique identifier of the algorithm to invoke
        request (AlgoInvoke): The request body containing:
            - params (Any): Parameters for algorithm execution
            - types_check (Optional[bool]): Whether to perform type checking

    Returns:
        BaseResponse[str]: A response containing:
            - success (bool): Operation success status
            - message (str): Description of the operation result
            - data (Optional[str]): Task ID for tracking the algorithm execution

    Raises:
        ValueError: If the algorithm is not found
        RuntimeError: If task creation fails
    """
    def run_algo():
        algo_repo = ShDcAlgoRepository(res_uri)
        algo_info = algo_repo.get_algo_by_id(algoid)
        if not algo_info:
            raise ValueError("Algorithm not found")

        algo_graph = AlgoGraph(algo_info.graph_json, "start", "end")
        op_repo = ShDcOperatorRepository(res_uri)
        mappings = algo_graph.get_all_node_mappings()
        pms = ParameterMappingService(mappings)
        a = Algorithm(
            algo_info,
            OperatorComponentToOperatorInfoAdapter(op_repo),
            pms,
            algo_graph,
            **request.params,
        )
        a.set_plugin_manager(pm)

        # Initialize Ray
        operators_working_dir = FileOperatorHelper.extract_path
        manager = RayManager(
            ray_endpoint,
            {
                "logging_level": logging.INFO,
                "runtime_env": {
                    "py_modules": [calc_manager],
                    "working_dir": operators_working_dir,
                    # "pip": ["pyNastran", "psycopg2-binary", "psycopg2-pool", "minio"],
                    # "pip_options": [
                    #     "--index-url=http://nexus.mom.comac.int/repository/comac_pypi_group/simple",
                    #     "--trusted-host=nexus.mom.comac.int",
                    # ],
                    # pip install pyNastran --index-url http://nexus.mom.comac.int/repository/comac_pypi_group/simple --trusted-host nexus.mom.comac.int
                    "env_vars": {
                        # "PIP_INDEX_URL": "http://nexus.mom.comac.int/repository/comac_pypi_group/simple",
                        "DB_HOST": config.db_host,
                        "DB_PORT": f'{config.db_port}',
                        "DB_USER": config.db_user,
                        "DB_PASSWORD": config.db_password,
                        "DB_DBNAME": config.db_dbname,
                        "OSS_TYPE": config.oss_type,
                        "OSS_ENDPOINT": config.oss_endpoint,
                        "OSS_ACCESS_KEY": config.oss_access_key,
                        "OSS_SECRET_KEY": config.oss_secret_key,
                        "OSS_SECURE": f'{config.oss_secure}',
                        "OSS_BUCKET": config.oss_bucket,
                        "DATA_ENGINE_ENDPOINT": config.data_engine_endpoint,
                        "DATA_ENGINE_API_TOKEN": config.data_engine_api_token
                    }
                },
            },
        )

        algo_graph = AlgoGraph(algo_info.graph_json, "start", "end")
        op_repo = ShDcOperatorRepository(res_uri)
        mappings = algo_graph.get_all_node_mappings()
        pms = ParameterMappingService(mappings)  # no need to use mapping
        a = Algorithm(
            algo_info,
            OperatorComponentToOperatorInfoAdapter(op_repo),
            pms,
            algo_graph,
            **request.params,
        )
        a.set_plugin_manager(pm)
        #a.get_all_operators_by_id() #refresh operators if needed

        try:
            #ray_task_factory = RayAlgoOperatorTaskFactory(a)
            #ray_tasks = build_algo_tasks(ray_task_factory, a)
            #ray_result, ctx = manager.invoke_tasks(ray_tasks)
            #####
            ray_task_factory = RayAlgoSingleTaskFactory(a)
            ray_task = build_algo_single_task(ray_task_factory, a)
            ray_result, ctx = manager.invoke(ray_task)

            #return {"result": ray_result}
            return ray_result
        finally:
            manager.shutdown()

    # Create task using TaskManager
    try:
        task_id = task_manager.create_task(run_algo, algoid)
    except RuntimeError as e:
        return BaseResponse[str](
            success=False,
            message=str(e),
            data=None
        )

    return BaseResponse[str](
        success=True,
        message="Algorithm invocation started",
        data=task_id
    )

@app.get("/tasks/{task_id}")
async def get_task_status(task_id: str):
    """
    Get the status and result of a task.

    Args:
        task_id (str): The unique identifier of the task

    Returns:
        BaseResponse[dict]: A response containing:
            - success (bool): Operation success status
            - message (str): Description of the operation result
            - data (Optional[dict]): Task information including:
                - task_id (str): Task identifier
                - status (str): Current task status
                - start_time (str): ISO formatted start time
                - end_time (Optional[str]): ISO formatted end time if completed
                - result (Any): Task result if completed
                - error (Optional[str]): Error message if failed
    """
    task_info = task_manager.get_task_info(task_id)
    if not task_info:
        return BaseResponse[dict](
            success=False,
            message="Task not found",
            data=None
        )

    response_data = {
        "task_id": task_info.task_id,
        "status": task_info.status.value,
        "start_time": task_info.start_time.isoformat(),
        "end_time": task_info.end_time.isoformat() if task_info.end_time else None,
        "result": task_info.result,
        "error": task_info.error
    }

    return BaseResponse[dict](
        success=True,
        message="Task status retrieved successfully",
        data=response_data
    )

@app.delete("/tasks/{task_id}")
async def delete_task(task_id: str):
    """
    Cancel and delete a task.

    If the task is running, it will be cancelled before deletion.
    If the task is completed or failed, it will be deleted directly.

    Args:
        task_id (str): The unique identifier of the task to delete

    Returns:
        BaseResponse[dict]: A response containing:
            - success (bool): Operation success status
            - message (str): Description of the operation result
            - data (Optional[dict]): Contains the final task status
    """
    task_info = task_manager.get_task_info(task_id)
    if not task_info:
        return BaseResponse[dict](
            success=False,
            message="Task not found",
            data=None
        )
    
    # If task is running, try to cancel it first
    if task_info.status == TaskStatus.RUNNING:
        success = task_manager.cancel_task(task_id)
        if not success:
            return BaseResponse[dict](
                success=False,
                message="Failed to cancel running task",
                data=None
            )
    
    # Delete the task
    success = task_manager.delete_task(task_id)
    
    return BaseResponse[dict](
        success=True,
        message=f"Task {task_id} has been cancelled and deleted" if task_info.status == TaskStatus.RUNNING 
                else f"Task {task_id} has been deleted",
        data={"status": task_info.status.value}
    )

# TODO: move helper to other module
@app.get("/helper/operator/params")
async def helper_get_params(operator_url: str, operator_file_name: str):
    """
    Retrieve the parameters and results of an operator from a helper URL.

    This endpoint downloads an operator file from a given URL, extracts its
    parameters and results, and returns them in a structured format.

    Args:
        operator_url (str): The URL where the operator file can be downloaded.
        operator_file_name (str): The name of the operator file to process.

    Returns:
        BaseResponse[OperatorData]: A response object containing:
            - success (bool): Indicates if the operation was successful.
            - message (str): A descriptive message about the operation result.
            - data (Optional[OperatorData]): The operator data if successfully extracted, including:
                - operator_id (str): Empty string in this case.
                - params (dict): The extracted parameters of the operator.
                - result (dict): The extracted results of the operator.

    Raises:
        Exception: If there's an error downloading, extracting, or processing the operator file.
    """
    try:
        extract_path = "/tmp/sh-computing/helpers/param/"
        download_path = "/tmp/sh-computing/helpers/param/"
        res_helper = ResourceHelper(res_uri)
        file_path = res_helper.get_file_signature(operator_url)
        file_info = FileInfo(file_path, {})
        file_helper = FileOperatorHelper(
            file_info=file_info, extract_path=extract_path, download_path=download_path
        )
        extract_dir = file_helper.download_and_extract()
        info = {
            "operator_id": "temp-id",
            "operator_name": "temp",
            "operator_path": extract_dir + "/" + operator_file_name,
            "operator_func_entrypoint": "no",
            "operator_workingdir": extract_path,
        }
        opinfo = OperatorInfo(**info)

        logger.info("get operator from helper uri : %s", opinfo)
        o = Operator(opinfo)
        params = o.extract_params()
        result = o.extract_result()
        operator_data = OperatorData(operator_id="", params=params, result=result)

    except Exception as e:
        return BaseResponse[OperatorData](success=False, message=str(e), data=None)

    return BaseResponse[OperatorData](
        success=True,
        message="Operator details retrieved successfully.",
        data=operator_data,
    )


class AlgoGraphJson(BaseModel):
    data: Any
    check_mapping: Optional[bool] = False


@app.post("/helper/algo/params")
async def helper_algo_params(request: AlgoGraphJson):
    """
    Retrieve the parameters and results of an algorithm from a helper request.

    This endpoint constructs an algorithm graph from the provided JSON data,
    processes it to extract all parameters and results, and optionally performs
    mapping checks.

    Args:
        request (AlgoGraphJson): The request object containing:
            - data (Any): The algorithm graph data in JSON format.
            - check_mapping (Optional[bool]): If True, performs additional checks on 
                                              mapping filter types. Defaults to False.

    Returns:
        BaseResponse[str]: A response object containing:
            - success (bool): Indicates if the operation was successful.
            - message (str): A descriptive message about the operation result.
            - data (Optional[str]): A JSON string containing all parameters and 
                                    results of the algorithm's operators.

    Raises:
        Exception: If there's an error processing the algorithm graph or extracting data.
    """
    algo_graph_json = json.dumps(request.data)
    check_mapping = request.check_mapping   
    try:
        algo_graph = AlgoGraph(algo_graph_json, "start", "end")
        algo_info = AlgorithmInfo(algo_name="test", graph_json=algo_graph_json)
        op_repo = ShDcOperatorRepository(res_uri, FileOperatorHelper.extract_path)
        pms = ParameterMappingService([])  # no need to use mapping
        a = Algorithm(
            algo_info,
            OperatorComponentToOperatorInfoAdapter(op_repo),
            pms,
            algo_graph,
            **{},
        )
        a.set_plugin_manager(pm)

        op_dict = a.get_all_operators_by_id()
        if check_mapping:
            a.check_mapping_filter_types(op_dict)

        ret = json.dumps(a.get_all_params_results(op_dict), default=custom_serializer)

    except Exception as e:
        logger.error("error in helper_algo_params : %s", str(e))
        return BaseResponse[str](success=False, message=str(e), data=None)

    return BaseResponse[str](
        success=True, message="Operator details retrieved successfully.", data=ret
    )


def custom_serializer(obj):
    if isinstance(obj, FieldInfo):
        return json.loads(obj.model_dump_json())
    raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable")

# Add signal handler setup
should_exit = False

def signal_handler(sig, frame):
    """
    Handle shutdown signals gracefully by cleaning up resources.

    Initiates a graceful shutdown sequence:
    1. Sets the shutdown flag
    2. Cleans up task manager resources
    3. Waits briefly for cleanup to complete
    4. Forces exit if necessary

    Args:
        sig: Signal number
        frame: Current stack frame
    """
    global should_exit
    if should_exit:  # 如果已经在关闭中，直接返回
        return
        
    should_exit = True
    logger.info("Received shutdown signal, cleaning up...")
    try:
        task_manager.shutdown()
        time.sleep(3)  # 给一些时间让任务清理
    except Exception as e:
        logger.error(f"Error during shutdown: {e}")
    finally:
        # 使用 os._exit 来强制退出，避免异步相关的问题
        import os
        os._exit(0)

# Register signal handlers
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)

# 在应用关闭时添加清理代码
@app.on_event("shutdown")
async def shutdown_event():
    """Cleanup when the application shuts down"""
    logger.info("Running shutdown event handler...")
    task_manager.shutdown()
