from abc import ABC
from typing import Any, Literal, Type, Mapping, TypeVar, Union, overload
from pathlib import Path
import copy
import json
import os
import traceback
import uuid
import warnings
from datetime import datetime
from types import MappingProxyType, UnionType
import graphviz
import numpy
import pandas
from pydantic import BaseModel, HttpUrl, field_validator
from ..dataclass.gdimData import GdimJsonTable, GdimMinIOFile, GdimTemplate, SimpleJsonTable
from ..dataclass.geoProfiles import BoreForCadDraw, BoreForPlanDraw, MaterialTable, MultiProfile1D, Profile1D, Secction3D, SectionForCadDraw
from ..dataclass.geoStructures import BasicCompositePile, BasicPile, RecFoundation
from ..dataclass.results import ComplexResult, CoordinateSystem, DocData, SingleResult
from ..dataclass.tables import TableCollection, TableData, TableSeries
from .pipeData import ArrayAttributeSchema, AttributeInfo, AttributesInfoResponse, BooleanAttributeSchema, DynamicPortsInfo, ExecutionContext, FloatAttributeSchema, IntegerAttributeSchema, MapData, ModuleData, MultiResultsDict, NestedRangeModel, ObjectAttributeSchema, PipelineAttribute, PipelineData, PipelineResult, PipelineRunDataItem, PipelineStep, RangeModel, ResultsDict, StepsManager, StringAttributeSchema, UIAttributeSchema
from .portTypes import PortType, get_port_type_hint_to_port_type_mapping, infer_port_types_from_union_args

GRAPHVIZ_AVAILABLE: bool

T = TypeVar('T', bound='PipeModule')

PortDataType: Any

def status_manage(cls: Type[T]) -> Type[T]:
    """
    Class decorator for PipeModule subclasses that preserves docstrings and metadata.
    
    This decorator wraps the __init__, set_cal_params, and execute methods of PipeModule 
    subclasses to provide common functionality while preserving class docstrings.
    
    The decorator:
    1. Adds auto-execution after initialization if auto_run is True
    2. Converts numpy arrays to lists in set_cal_params return values
    3. Tracks execution status and timestamps
    
    Parameters
    ----------
    cls : Type[PipeModule]
        The class to be decorated (must be a subclass of PipeModule)
        
    Returns
    -------
    Type[PipeModule]
        The decorated class with wrapped methods
        
    Raises
    ------
    TypeError
        If the decorated class is not a subclass of PipeModule
    """
    ...

def module_decorator(auto_create_ports: bool = True, status_manage: bool = True) -> Callable[Any, Any]:
    """
    Decorator for PipeModule subclasses that handles both auto-create ports and status management.
    
    This decorator must be used with parentheses and supports parameter customization.
    
    Parameters
    ----------
    auto_create_ports : bool, default True
        If True, automatically creates ports from type annotations using PortTypeHint types.
        Port direction is determined by naming convention:
        - Attributes starting with "Input" are treated as input ports
        - Attributes starting with "Output" are treated as output ports
    
    status_manage : bool, default True
        If True, wraps the `__init__`, `set_cal_params`, `update_ui_schema` and `execute` methods to provide:
        1. Auto-execution after initialization if `auto_run` is True
        2. Set `values_range` from `set_cal_params` return values
        3. Set `ui_schema` from `update_ui_schema` return values
        4. Set `executed` from `execute` return values
        5. Set `executed_time` from `execute` return values
    
    Returns
    -------
    Callable[[Type[PipeModule]], Type[PipeModule]]
        A decorator function that returns the decorated class
    
    Examples
    --------
    # Use both features (default)
    @module_decorator()
    class MyModule(PipeModule):
        InputData: PortTypeHint.TableData
        OutputResult: PortTypeHint.SingleResult
    
    # Only auto-create ports
    @module_decorator(status_manage=False)
    class MyModule(PipeModule):
        InputData: PortTypeHint.TableData
    
    # Only status management
    @module_decorator(auto_create_ports=False)
    class MyModule(PipeModule):
        pass
    """
    ...

class PipelineExecutionError(Exception):
    """
    Custom exception for pipeline execution errors.
    
    This exception provides enhanced context for debugging pipeline failures,
    including module information, operation details, and preserved tracebacks.
    
    Attributes
    ----------
    module_name : str | None
        Name of the module where the error occurred
    module_class : str | None
        Class name of the module where the error occurred
    operation : str
        The operation that was being performed when the error occurred
    original_error : Exception
        The original exception that caused this pipeline error
    context : dict | None
        Additional context information about the error
    """
    def __init__(self, operation: str, original_error: Exception, module_name: Any = None, module_class: Any = None, context: Any = None):
        """
        Initialize PipelineExecutionError.
        
        Parameters
        ----------
        operation : str
            Description of the operation that failed (e.g., "running module", "adding module")
        original_error : Exception
            The original exception that caused this pipeline error
        module_name : str, optional
            Name of the module where the error occurred
        module_class : str, optional
            Class name of the module where the error occurred
        context : dict, optional
            Additional context information (e.g., parameters, step info)
        """
        ...
    def get_detailed_info(self) -> dict:
        """
        Get detailed information about the error for debugging.
        
        Returns
        -------
        dict
            Dictionary containing all available error context
        """
        ...
    def print_detailed_error(self):
        """Print a detailed error report for debugging."""
        ...

class PortReference:
    """A reference to a specific port of a module for IDE-friendly port access."""
    def __init__(self, module: Any, port_name: str, port_direction: Literal[Any, Any], expected_data_type: Any = None):
        """
        Initialize PortReference.
        
        Parameters
        ----------
        module : PipeModule
            The module that owns this port
        
        port_name : str
            The name of the port
        
        port_direction : Literal["input", "output"]
            The direction of port ('input' or 'output')
        
        expected_data_type : Type[PortDataType] | str | None
            The expected data type for this port (for IDE hints)
        """
        ...
    @property
    def data(self) -> PortDataType:
        """Get the actual data from the port."""
        ...
    def __rshift__(self, other: Any) -> Any:
        """Enable >> operator for connecting ports: parent.OutputPort >> child.InputPort"""
        ...
    def __or__(self, other: Any) -> Any:
        """Enable | operator for chaining connections: port1 >> port2 | port3 >> port4"""
        ...
    def __repr__(self) -> str:
        ...

class ChainedConnection:
    """Represents a chain of port connections for batch linking."""
    def __init__(self, connections: list):
        ...
    def __or__(self, other) -> Any:
        """Continue chaining with | operator"""
        ...
    def to_links(self) -> list[Any]:
        """Convert chained connections to Link objects."""
        ...

class _PortDataCheck(BaseModel):
    SingleValue: int | float | str | None
    Number: int | float | None
    NumberArray: Any
    GeneralArray: Any
    FilePath: Path | str | None
    FilesPath: list[Any] | None
    NumberTable: Any
    GeneralTable: Any
    HttpUrl: HttpUrl | None
    Token: tuple[Any, Any, Any] | None
    TableData: Any
    TableSeries: Any
    TableCollection: Any
    SingleResult: Any
    ComplexResult: Any
    TableRowMask: Any
    TablesRowMask: Any
    Attributes: dict[str, Any] | None
    JsonObject: dict | None
    DocData: Any
    General: Any
    ResultsDict: Any
    MultiResultsDict: Any
    CoordinateSystem: Any
    SingleResultList: Any
    TableDataList: Any
    TableCollectionList: Any
    Profile1D: Any
    MultiProfile1D: Any
    MaterialTable: Any
    MaterialTableCollection: Any
    BoreForCadDraw: Any
    BoreForPlanDraw: Any
    RecFoundation: Any
    Pile: Any
    CompositePile: Any
    GeoSection: Any
    GeoSection3D: Any
    GeoSections: Any
    GeoSections3D: Any
    GdimTemplate: Any
    GdimJsonTable: Any
    SimpleJsonTables: Any
    GdimFile: Any
    GdimFiles: Any
    GdimTemplateOld: dict[str, Any] | None
    @field_validator
    def validate_number_array(cls, v):
        ...
    @field_validator
    def validate_general_array(cls, v):
        ...
    @field_validator
    def validate_number_table(cls, v):
        ...
    @field_validator
    def validate_general_table(cls, v):
        ...
    @field_validator
    def validate_gdim_template(cls, v):
        ...
    @field_validator
    def validate_gdim_json_table(cls, v):
        ...
    @field_validator
    def validate_simple_json_tables(cls, v):
        ...
    @field_validator
    def validate_gdim_file(cls, v):
        ...
    @field_validator
    def validate_gdim_files(cls, v):
        ...
    @field_validator
    def validate_table_data(cls, v):
        ...
    @field_validator
    def validate_table_series(cls, v):
        ...
    @field_validator
    def validate_table_collection(cls, v):
        ...
    @field_validator
    def validate_single_result(cls, v):
        ...
    @field_validator
    def validate_single_result_list(cls, v):
        ...
    @field_validator
    def validate_table_data_list(cls, v):
        ...
    @field_validator
    def validate_table_collection_list(cls, v):
        ...
    @field_validator
    def validate_complex_result(cls, v):
        ...
    @field_validator
    def validate_table_row_mask(cls, v):
        ...
    @field_validator
    def validate_tables_row_mask(cls, v):
        ...
    @field_validator
    def validate_doc_data(cls, v):
        ...
    @field_validator
    def validate_general(cls, v):
        ...
    @field_validator
    def validate_results_dict(cls, v):
        ...
    @field_validator
    def validate_multi_results_dict(cls, v):
        ...
    @field_validator
    def validate_profile1d(cls, v):
        ...
    @field_validator
    def validate_multi_profile1d(cls, v):
        ...
    @field_validator
    def validate_material_table(cls, v):
        ...
    @field_validator
    def validate_material_table_collection(cls, v):
        ...
    @field_validator
    def validate_bore_for_cad_draw(cls, v):
        ...
    @field_validator
    def validate_bore_for_plan_draw(cls, v):
        ...
    @field_validator
    def validate_rec_foundation(cls, v):
        ...
    @field_validator
    def validate_pile(cls, v):
        ...
    @field_validator
    def validate_composite_pile(cls, v):
        ...
    @field_validator
    def validate_geo_section(cls, v):
        ...
    @field_validator
    def validate_geo_section_3d(cls, v):
        ...
    @field_validator
    def validate_geo_sections(cls, v):
        ...
    @field_validator
    def validate_geo_sections_3d(cls, v):
        ...
    @field_validator
    def validate_coordinate_system(cls, v):
        ...

class Port:
    def __init__(self, ptype: Any = ..., data: Any = None, pdoc: Any = None) -> Any:
        """
        Initialize the Port object.
        
        Parameters
        ----------
        ptype : PortType | list[PortType], default: PortType.Field
            The data type of in or out of the port which can be one type or several types.
        
        data : Any, default: None
            The data in or out of the port.
        
        pdoc : str, default: None
            The description of the port.
        """
        ...
    @property
    def data(self) -> Any:
        ...
    @data.setter
    def data(self, value: Any) -> Any:
        ...

class PipeModule(ABC):
    """
    This is an abstract class used as a blueprint for all pipe network modules.
    
    Attributes
    ----------
    mname : str
        The name of the module.
    auto_run : bool
        If it's True, the module will be executed automatically.
    executed : bool
        If it's True, the module has been executed.
    executed_time : datetime | None
        The time when the module was executed.
    values_range : dict[str, RangeModel | NestedRangeModel]
        The range of the values of the module attributes. Will be deprecated in the future.
    ui_schema : dict[str, UIAttributeSchema] | None
        The UI schema of the module.
    dynamic_ports_in_type : PortType | list[PortType] | None
        The type of the dynamic input ports.
    dynamic_ports_out_type : PortType | list[PortType] | None
        The type of the dynamic output ports.
    title: str | None
        The title of the module.
    description: str | None
        The description of the module.
    """
    def __init__(self, mname: Any = None, auto_run: bool = True) -> Any:
        """
        Initialize the PipelineModule object.
        
        Parameters
        ----------
        mname : str, default: BaseModule
            The name of the module.
        
        auto_run : bool, default: False
            If it's True, the module will be executed automatically.
        """
        ...
    @property
    def title(self) -> Any:
        ...
    @property
    def description(self) -> Any:
        ...
    def execute(self) -> Any:
        """Define the executing method here"""
        ...
    def set_cal_params(self, reset: bool = False) -> Any:
        """
        Define the method to set the value of the calculation parameters according to input data.
        
        Parameters
        ----------
        reset : bool, default: False
            If it's True, the value of calculation parameters of the module will be reset according to the input data.
        
        Returns
        -------
        dict[str, RangeModel | NestedRangeModel]
            Values range of each parameter of the module will be returned.
            Example: `{"param1": RangeModel(vtype="int", minmax=(1, 10)), "param2": RangeModel(vtype="float", minmax=(1.0, 10.0))}`
        """
        ...
    def update_ui_schema(self, reset: bool = False) -> Any:
        """
        Set the UI schema of the module.
        
        Parameters
        ----------
        reset : bool, default: False
            If it's True, the value of module attributes will be reset according to the input data.
        
        Returns
        -------
        dict[str, UIAttributeSchema] | None
            UI schema of the module will be returned.
            Example: `{"param1": StringAttributeSchema(vtype="string", minmax=(1, 10)), "param2": IntegerAttributeSchema(vtype="int", minmax=(1, 10))}`
        """
        ...
    @property
    def pipeline(self) -> Union[Any, Any]:
        ...
    @property
    def _ports_in(self) -> Mapping[str, Port]:
        ...
    @_ports_in.setter
    def _ports_in(self, value: Any) -> Any:
        ...
    @property
    def _ports_out(self) -> Mapping[str, Port]:
        ...
    @_ports_out.setter
    def _ports_out(self, value: Any) -> Any:
        ...
    @property
    def dynamic_ports_in(self) -> dict[str, Port]:
        ...
    @property
    def dynamic_ports_out(self) -> dict[str, Port]:
        ...
    @property
    def reserved_ports_in(self) -> Mapping[str, Port]:
        ...
    @property
    def reserved_ports_out(self) -> Mapping[str, Port]:
        ...
    def user_defined_ports_in_keys(self) -> list[str]:
        """Return the keys of all user-defined input ports, exclude `reserved_ports_in`"""
        ...
    def user_defined_ports_out_keys(self) -> list[str]:
        """Return the keys of all user-defined output ports, exclude `reserved_ports_out`"""
        ...
    def all_ports_in_keys(self) -> list[str]:
        """Return the keys of all kinds of input port"""
        ...
    def all_ports_out_keys(self) -> list[str]:
        """Return the keys of all kinds of output port"""
        ...
    @property
    def InputAttributes(self) -> PortReference:
        """Get a PortReference for the InputAttributes port to enable linking with >> operator."""
        ...
    @InputAttributes.setter
    def InputAttributes(self, value: Any) -> Any:
        """
        Set the InputAttributes port data.
        
        Supports both direct data assignment and PortReference assignment.
        """
        ...
    @property
    def OutputAttributes(self) -> PortReference:
        """Get a PortReference for the OutputAttributes port to enable linking with >> operator."""
        ...
    def add_dynamic_ports_in(self, key: str, pdoc: Any = None) -> Any:
        """
        Add a dynamic input port with proper naming convention and property creation.
        
        Parameters
        ----------
        key : str
            The name of the port. Must start with 'Input' to follow naming convention.
        
        pdoc : str, optional
            The documentation/description of the port.
        
        Raises
        ------
        RuntimeError
            If dynamic_ports_in_type is not defined.
        ValueError
            If port name doesn't start with 'Input' or port already exists.
        """
        ...
    def add_dynamic_ports_out(self, key: str, pdoc: Any = None) -> Any:
        """
        Add a dynamic output port with proper naming convention and property creation.
        
        Parameters
        ----------
        key : str
            The name of the port. Must start with 'Output' to follow naming convention.
        pdoc : str, optional
            The documentation/description of the port.
        
        Raises
        ------
        RuntimeError
            If dynamic_ports_out_type is not defined.
        ValueError
            If port name doesn't start with 'Output' or port already exists.
        """
        ...
    def remove_dynamic_ports_in(self, pname: str) -> Any:
        """
        Remove a dynamic input port and its associated property.
        
        Parameters
        ----------
        pname : str
            The name of the port to remove.
        
        Raises
        ------
        KeyError
            If the port doesn't exist.
        """
        ...
    def remove_dynamic_ports_out(self, pname: str) -> Any:
        """
        Remove a dynamic output port and its associated property.
        
        Parameters
        ----------
        pname : str
            The name of the port to remove.
        
        Raises
        ------
        KeyError
            If the port doesn't exist.
        """
        ...
    def __setitem__(self, key: str, value: Any) -> Any:
        ...
    def __getitem__(self, key: str) -> Any:
        ...
    def __getattr__(self, name: str) -> PortReference:
        """
        Handle attribute access for dynamic ports.
        
        This method is called when an attribute lookup hasn't found the attribute
        in the usual places (i.e., it's not an instance attribute nor is it found
        in the class tree for self).
        """
        ...
    def __setattr__(self, name: str, value: Any) -> Any:
        """
        Handle attribute assignment for dynamic ports.
        
        This method is called for all attribute assignments, so we need to
        handle both dynamic ports and normal attributes.
        """
        ...
    def keys(self) -> dict[str, Any]:
        ...
    def values(self) -> dict[str, Any]:
        ...
    def items(self) -> dict[str, Any]:
        ...
    @property
    def ports_info(self) -> dict[str, Any]:
        """
        Get information of all ports
        
        Returns
        -------
        Dictionary
            Example:
            ```
            {"input_ports": {"port_name1": port_type1, "port_name2": port_type2},
            "output_ports": {"port_name": port_type}
            }
            ```
        """
        ...
    def get_cal_params(self) -> dict[str, Any]:
        """
        Get the calculation parameters of the module.
        
        Example
        -------
        {"param_name": param_value}
        """
        ...
    def dump_module_params(self) -> ModuleData:
        """
        Dump the module parameters to a ModuleData object (json).
        
        Returns
        -------
        ModuleData
            The module data.
        """
        ...
    def restore_module_params(self, module_data: ModuleData) -> Any:
        """
        Restore the module params of the module from a ModuleData object (Pydantic Model).
        
        Parameters
        ----------
        module_data : ModuleData
            The module data.
        """
        ...
    def get_workspace(self, module_workspace: Any, pipeline_workspace_as_first: bool = True) -> Any:
        """
        Get the real workspace path from the module or pipeline.
        
        Parameters
        ----------
        module_workspace: str | Path | None
            The workspace set by the parameter of the module.
        
        pipeline_workspace_as_first: bool, default: True
            Whether to use the pipeline workspace as the first choice.
        
        Note
        ----
        If both module_workspace and pipeline_workspace are None, the current working directory will be used.
        """
        ...
    def get_token(self, input_token: Any, proj_id_is_must: bool = True) -> Any:
        """
        Get the token from the input token or pipeline.
        
        Parameters
        ----------
        input_token: tuple[str | None, int | str | None, str | None] | None
            The input token. The first element is the token, the second element is the project id, the third element is the host.
            If any element is None, the pipeline's attribute will be used.
        
        proj_id_is_must: bool, default: True
            Whether the project id is must.
            If True, if proj_id is None, return None.
            If False, if proj_id is None, return the token, proj_id and host.
        """
        ...

class LinkList(list):
    """A list subclass that supports the | operator for chaining Link objects."""
    def __or__(self, other: Any) -> Any:
        """Enable | operator for chaining multiple links."""
        ...

class Link:
    """A link to link the output port of parent module and input port of child module."""
    def __init__(self, parent: Any, child: Any, port_out: Any = None, port_in: Any = None, lname: Any = None) -> Any:
        """
        Initialize the Link object.
        
        Parameters
        ----------
        parent: PipeModule | PortReference
            The parent module or a PortReference for the output port.
        
        child: PipeModule | PortReference
            The child module or a PortReference for the input port.
        
        port_out: str, default: None
            The output port name of the parent module. When it's None, the first output port will be used.
            Ignored if parent is a PortReference.
        
        port_in: str, default: None
            The input port name of the child module. When it's None, the first input port will be used.
            Ignored if child is a PortReference.
        """
        ...
    def __eq__(self, other: Any) -> bool:
        ...
    def __hash__(self) -> int:
        ...
    def __or__(self, other: Any) -> LinkList:
        """Enable | operator for combining multiple links: link1 | link2"""
        ...
    def connect(connection: tuple) -> Any:
        """
        Create a Link from a tuple of PortReference objects.
        
        Parameters
        ----------
        connection : tuple
            A tuple of (output_port_reference, input_port_reference)
        
        Returns
        -------
        Link
            The created link
        
        Example
        -------
        >>> link = Link.connect((parent.OutputPort, child.InputPort))
        """
        ...
    def chain(*connections) -> list[Any]:
        """
        Create multiple Links from chained connections.
        
        Parameters
        ----------
        *connections : PortReference or Link
            Chain of connections using >> operator or existing Link objects
        
        Returns
        -------
        list[Link]
            List of created links
        
        Example
        -------
        >>> links = Link.chain(
        ...     parent.OutputPort >> child.InputPort,
        ...     child.OutputResult >> final.InputData
        ... )
        """
        ...

class PipeLine:
    """
    Pipeline system connecting together modules and adding a messaging system.
    
    The Pipeline simplifies the configuration and execution of interconnected modules.
    It provides several features:
    
    1. Module management: Register, configure, and execute processing modules
    2. Parameter binding: Create high-level attributes that control module parameters
    3. Multi-binding attributes: Create a single attribute that controls multiple module
       parameters (across different modules)
    4. Event system: Support for module events and message passing
    5. Pipeline state management: Save/load configuration and results
    
    Attributes
    ----------
    modules : _ModuleSet
        Collection of modules registered with this pipeline
    
    links : set[Link]
        Set of links connecting modules in the pipeline
    
    children : dict[str, list[Link]]
        Dictionary mapping module names to their outgoing links
    
    parents : dict[str, list[Link]]
        Dictionary mapping module names to their incoming links
    
    attributes : dict[str, PipelineAttribute]
        Dictionary of high-level attributes for controlling module parameters
    
    start_input : dict[str, dict[str, Any]] | None
        Input data for start modules' input ports
    
    app_name : str | None
        Name of the pipeline application
    
    app_title : str | None
        Title of the pipeline application
    
    app_version : str | None
        Version of the pipeline application
    
    local_functions_path : str | Path | None
        Path to local functions file for custom operations
    
    workspace : str | Path | None
        Working directory for saving pipeline output files
    
    json_to_db : dict[str, tuple[str, str, str | None]]
        Database connection mappings for JSON data
    
    hide_output_files : dict[str, str] | None
        Controls which module output files to exclude from results when running by `gdi-server`
    
    steps_enabled : bool
        Whether step-by-step execution is enabled. If it's True, the pipeline can be executed step by step.
    
    steps_manager : StepsManager | None
        Manager for controlling pipeline execution steps
    
    current_step : str | None
        Currently executing step name
    
    last_step : bool
        Whether the current step is the last step
    
    executed_steps : list[str]
        History of executed steps
    
    execution_context : ExecutionContext | None
        Current execution context and state
    """
    def __init__(self, app_name: Any = None, app_title: Any = None, app_version: Any = None, steps_enabled: bool = True, start_input: Any = None, local_functions_path: Any = None) -> Any:
        """
        Initialize the PipeLine object.
        
        Parameters
        ----------
        app_name: str | None, default: None
            The name of the app.
        
        app_title: str | None, default: None
            The title of the app.
        
        app_version: str | None, default: None
            The version of the app.
        
        start_input: dict[str, dict[str, Any]], default: None
            The input data for input ports of all the start modules.
            Example: `{"module_name": {"port_name": port_data}}`
        """
        ...
    @property
    def module_names(self) -> list[str]:
        ...
    @property
    def link_names(self) -> list[str]:
        ...
    @property
    def attributes(self) -> dict[str, PipelineAttribute]:
        ...
    @property
    def steps(self) -> list[PipelineStep]:
        ...
    @steps.setter
    def steps(self, value: list[PipelineStep]) -> Any:
        ...
    @property
    def start_modules(self) -> list[PipeModule]:
        ...
    @property
    def start_modules_names(self) -> list[str]:
        ...
    @property
    def end_modules_names(self) -> list[str]:
        ...
    @property
    def end_modules(self) -> list[PipeModule]:
        ...
    def add_link(self, link: Link, reset_child: bool = True) -> Any:
        """
        Add a link for the pipeline. The parent and child module will also be added if they are not in the pipeline.
        
        Parameters
        ----------
        link : Link
            The link object.
        
        reset_child : bool, default: True
            If it's True, the value of calculation parameters of the child module will be reset according to the input data.
        
        Returns
        -------
        dict[str, UIAttributeSchema] | None
            UI schema of each parameter of child module will be returned.
        """
        ...
    def add_links(self, links: list[Link], reset_child: bool = True) -> Any:
        """Add several links for the network."""
        ...
    def get_link(self, lname: str) -> Any:
        """Get a link by its name."""
        ...
    def remove_link(self, link: Any) -> Any:
        """
        Remove a link from the pipelines, but the module will not be removed.
        link: Link or str
            If it's str, it should be the name of the link.
        """
        ...
    def get_below_links(self, module: PipeModule) -> list[Link]:
        """Get all the links below the module."""
        ...
    def get_above_links(self, module: PipeModule) -> list[Link]:
        """Get all the links above the module."""
        ...
    def run(self, from_module: Any = None, return_results: Any = True) -> PipelineResult:
        """
        Run the pipelines from the start modules or a specified module.
        
        Parameters
        ----------
        from_module: PipeModule | None, default: None
            If it's None, it will run from all the start modules.
            Or it will run from a specified module.
        
        return_results: bool | dict[str, str | list[str]], default: True
            If True, the results of the end modules of all output ports data will be returned.
            If False, no results will be returned.
            If a dictionary, the keys are the module names and the values are the output port name or a list of output port names to return.
        
        Returns
        -------
        PipelineResult
            The result of the end modules in the pipeline will be returned.
        """
        ...
    def add_module(self, module: PipeModule) -> Any:
        """Add a module to the pipeline."""
        ...
    def get_module(self, mname: str) -> Any:
        """Get a module by its name."""
        ...
    def remove_module(self, module: Any, with_modules_below: bool = False) -> Any:
        """
        Remove a module or with modules below, and all related links.
        
        Parameters
        ----------
        module: PipeModule or str
            If it's `str`, it should be the name of the module.
        
        with_modules_below: bool, default: False
            If True, the removed module and all the modules below will be removed.
        """
        ...
    def save_pipeline(self, file: Any = None) -> Any:
        """
        Save the pipeline data to a .pipe file and return a PipelineData object.
        
        Parameters
        ----------
        file : str, Path, None
            The file path and name of the pipe file to be saved.
            If it's None, no file will be saved.
        
        Returns
        -------
        PipelineData | None
            The pipeline data. If the pipeline is empty, return None.
        """
        ...
    def restore_pipeline(self, file: Any, silent: bool = False) -> Any:
        """
        Restore the pipeline from a .pipe file (JSON format), a URL, or a PipeLineData object.
        
        Parameters
        ----------
        file : str, Path, PipelineData
            The file path and name of the .pipe file to be read, a URL to download the .pipe file from,
            or a PipeLineData object.
        
        silent : bool, default: False
            If True, suppresses printed messages (default: False)
        """
        ...
    def add_step(self, step: PipelineStep, step_index: Any = None) -> Any:
        """
        Add a new step definition for controlled execution.
        
        Parameters
        ----------
        step : PipelineStep
            The step definition to add.
        
        step_index : int | None, default: None
            The index of this step in the execution steps which is used to insert the step at a specific position.
            If it's None, it will be added to the end of the execution steps.
        
        Note
        ----
        If `steps_manager` is not initialized, it will be initialized with the execution mode and initial step.
        """
        ...
    def remove_step(self, step_name: Any) -> Any:
        """
        Remove a step definition from the pipeline.
        
        Parameters
        ----------
        step_name : str | int
            Name or index of the step to remove
        """
        ...
    def get_steps(self) -> list[PipelineStep]:
        """
        Get all defined execution steps.
        
        Returns
        -------
        list[PipelineStep]
            List of execution steps in order
        """
        ...
    def enable_steps(self, enabled: bool = True) -> Any:
        """
        Enable or disable step-based execution.
        
        Parameters
        ----------
        enabled : bool, default: True
            Whether to enable step-based execution
        """
        ...
    def run_single_step(self, step: Any) -> PipelineResult:
        """Run a single step by its name or index."""
        ...
    def reset_steps(self) -> Any:
        """Reset the execution steps."""
        ...
    def set_all_modules_auto_run(self, auto_run: bool = True) -> Any:
        """Set all modules to auto run."""
        ...
    def run_with_steps(self, **kwargs) -> PipelineResult:
        """
        Run the pipeline according to defined execution steps linear or with flow control.
        
        Parameters
        ----------
        **kwargs
            Attribute names and values to set before running.
        
        Returns
        -------
        PipelineResult
            The result of the current step execution.
        
        Raises
        ------
        ValueError
            If step-based execution is not enabled or if no steps are defined.
        
        Example
        -------
        # Run step by step
        step1_result = pipeline.run_with_steps(depth=10.5)
        step2_result = pipeline.run_with_steps(depth=11.5)
        final_result = pipeline.run_with_steps(depth=12.5)
        """
        ...
    def add_attribute(self, attr_name: str, module_name: str, param_name: str, attr_title: Any = None, ui_schema_overrides: Any = None) -> Any:
        """
        Add an attribute to the pipeline.
        
        Parameters
        ----------
        attr_name : str
            The name of the attribute at pipeline level.
        
        module_name : str
            The name of the module that owns this attribute.
        
        param_name : str
            The name of the parameter in the module.
        
        attr_title : str, optional
            The title of this attribute which is used for users to understand.
            If None, `attr_name` will be used.
        
        ui_schema_overrides : dict[str, Any], optional
            Dictionary of UIAttributeSchema parameters to override. Keys should be valid UIAttributeSchema field names
            (e.g., 'default', 'minimum', 'maximum', 'selections', 'units', 'required', 'visible', etc.).
            This is the preferred way to customize attribute properties.
            If None, the original UI schema from the module will be used as-is.
        
        Examples
        --------
        Basic usage with default UI schema:
        
        >>> pipeline.add_attribute(
        ...     attr_name="pile_length",
        ...     module_name="PileModule",
        ...     param_name="length",
        ...     attr_title="桩长"
        ... )
        
        Override default value and range using UI schema:
        
        >>> pipeline.add_attribute(
        ...     attr_name="pile_diameter",
        ...     module_name="PileModule",
        ...     param_name="diameter",
        ...     ui_schema_overrides={
        ...         "default": 1.2,
        ...         "minimum": 0.5,
        ...         "maximum": 3.0,
        ...         "units": Units.m
        ...     }
        ... )
        
        Customize selections using UI schema:
        
        >>> pipeline.add_attribute(
        ...     attr_name="soil_type",
        ...     module_name="SoilModule",
        ...     param_name="type",
        ...     ui_schema_overrides={
        ...         "selections": ["clay", "sand", "gravel"],
        ...         "selections_name": ["黏土", "砂土", "砾石"],
        ...         "default": "clay"
        ...     }
        ... )
        
        Set UI properties using UI schema:
        
        >>> pipeline.add_attribute(
        ...     attr_name="advanced_param",
        ...     module_name="AdvancedModule",
        ...     param_name="param",
        ...     ui_schema_overrides={
        ...         "visible": False,  # Hide from UI
        ...         "readonly": True,  # Make read-only
        ...     }
        ... )
        
        Notes
        -----
        - The module must have `ui_schema` or `values_range`(legacy) defined and contain the specified parameter
        - The original RangeModel/NestedRangeModel will be converted to UIAttributeSchema automatically
        - Invalid field names for the UI schema type will raise ValueError
        """
        ...
    def get_attribute_value(self, attr_name: str) -> Any:
        """
        Get the value of a pipeline attribute.
        
        Parameters
        ----------
        attr_name : str
            The name of the attribute to get.
        
        Returns
        -------
        Any
            The current value of the attribute.
        
        Raises
        ------
        KeyError
            If the attribute doesn't exist.
        """
        ...
    def set_attributes(self, **kwargs) -> Any:
        """
        Set multiple attribute values for the pipeline at once.
        
        Parameters
        ----------
        **kwargs : dict
            Dictionary of attribute names and their values to set.
            Each key should be an attribute name and each value is the value to set.
        
        Raises
        ------
        KeyError
            If any attribute doesn't exist in the pipeline.
        ValueError
            If any value is not valid for its attribute.
        
        Examples
        --------
        >>> pipeline.set_attributes(token="abc123", proj_id=42, bore_number="ZK1")
        """
        ...
    def remove_attribute(self, attr_name: str) -> Any:
        """
        Remove an attribute from the pipeline.
        
        Parameters
        ----------
        attr_name : str
            The name of the attribute to remove.
        """
        ...
    def get_attribute_info(self) -> AttributesInfoResponse:
        """
        Get detailed information about all attributes including their UI schemas.
        
        Returns
        -------
        AttributesInfoResponse
            Response model containing all attribute information objects.
        """
        ...
    def add_dict_attribute(self, attr_name: str, module_name: str, param_name: str, key_name: str, attr_title: Any = None) -> Any:
        """
        Add an attribute to the pipeline that maps to a specific key in a dictionary parameter.
        
        This method works with dictionary parameters that follow the dict[str, Any] format.
        The parameter must have an ObjectAttributeSchema defined in the module's ui_schema,
        or a NestedRangeModel with vtype="dict" in the module's values_range (legacy support).
        
        Parameters
        ----------
        attr_name : str
            The name of the attribute at pipeline level.
        
        module_name : str
            The name of the module.
        
        param_name : str
            The name of the parameter in the module that is a dictionary.
            This parameter MUST have an ObjectAttributeSchema in ui_schema or
            a NestedRangeModel with vtype="dict" in values_range (legacy).
        
        key_name : str
            The dictionary key to access within the parameter's dictionary.
            This key MUST be defined in the ObjectAttributeSchema.properties or
            NestedRangeModel.fields (legacy).
        
        attr_title : str, optional
            The title of this attribute which is used for users to understand.
            If None, the title from the field schema will be used.
        
        Raises
        ------
        ValueError
            If the parameter doesn't have an ObjectAttributeSchema in ui_schema or
            a NestedRangeModel with vtype="dict" in values_range.
            If the key_name is not defined in the schema properties/fields.
            If the parameter is not a dictionary.
        
        Note
        ----
        Requirements:
        - The parameter must be defined in module.ui_schema with an ObjectAttributeSchema, OR
        - The parameter must be defined in module.values_range with a NestedRangeModel (vtype="dict") [legacy]
        - The key_name must be defined in the schema properties/fields
        - The parameter must be a dictionary (dict[str, Any])
        
        Example
        -------
        # Module must define ObjectAttributeSchema in get_ui_schema():
        ui_schema["data"] = ObjectAttributeSchema(
            title="Data Collection",
            properties={"x": StringAttributeSchema(title="X Value")}
        )
        
        # Or legacy NestedRangeModel in set_cal_params():
        values_range["data"] = NestedRangeModel(
            vtype="dict",
            title="Data Collection",
            fields={"x": RangeModel(vtype="str", title="X Value")}
        )
        
        # Then use the method:
        pipeline.add_dict_attribute("x_value", "my_module", "data", "x")
        """
        ...
    def add_multi_binding_attribute(self, attr_name: str, bindings: list[Any], attr_title: Any = None, ui_schema_overrides: Any = None) -> Any:
        """
        Add an attribute that binds to multiple parameters across different modules.
        
        This method creates a single pipeline attribute that controls multiple module parameters.
        When the attribute value is set, all bound parameters will be updated with the same value.
        The parameters can be regular module parameters, keys in dictionary parameters, or items in list[dict] parameters.
        
        Parameters
        ----------
        attr_name : str
            The name of the attribute at pipeline level.
        
        bindings : list[tuple[str, str, str | None, int | None]]
            List of binding tuples, each containing:
            - module_name: name of the module
            - param_name: name of the parameter in the module
            - dict_key: key in the dictionary parameter (if parameter is a dict, otherwise None)
            - list_index: index in list[dict] parameter (if parameter is list[dict], otherwise None)
        
        attr_title : str, optional
            The title of the attribute for display purposes. If None, attr_name will be used.
        
        ui_schema_overrides : dict[str, Any], optional
            Dictionary of UIAttributeSchema parameters to override. Keys should be valid UIAttributeSchema field names
            (e.g., 'default', 'minimum', 'maximum', 'selections', 'units', 'required', 'visible', etc.).
            This allows customization of the UI schema derived from the first binding.
        
        Example
        -------
        # Bind a single attribute to parameters in multiple modules
        pipeline.add_multi_binding_attribute(
            "shared_threshold",
            [
                ("detection_module", "threshold", None, None),                    # Regular parameter
                ("filtering_module", "params", "min_threshold", None),           # Dictionary parameter
                ("config_module", "filters", "filter_value", 0)                 # List[dict] parameter (first item)
            ],
            attr_title="Detection Threshold",
            ui_schema_overrides={
                "minimum": 0.0,
                "maximum": 1.0,
                "units": Units.m
            }
        )
        
        # When the attribute is set, all parameters will be updated
        pipeline.set_attribute("shared_threshold", 0.7)  # Updates all parameters to 0.7
        """
        ...
    def add_json_to_db(self, name: str, data_type: Literal[Any, Any, Any] = "port", module_name: Any = None) -> Any:
        """
        Add data from various sources (output ports, pipeline attributes, module attributes) to database.
        
        This method supports adding different types of data using a smart key naming strategy
        for uniqueness and easy identification.
        
        Parameters
        ----------
        name : str
            - For ports: the output port name
            - For pipeline attributes: the direct pipeline attribute name (e.g., 'workspace', 'app_name')
            - For module attributes: the module parameter/attribute name
        
        data_type : str, default: "port"
            The type of data to add:
            - "port": Output port data (requires module_name)
            - "pipeline_attr": Direct pipeline attribute value (e.g., workspace, app_name)
            - "module_attr": Module attribute/parameter value (requires module_name)
        
        module_name : str, optional
            The name of the module. Required for "port" and "module_attr" data types.
        
        Raises
        ------
        ValueError
            If required parameters are missing, module doesn't exist, or data source is not found.
        
        Examples
        --------
        # Add output port data
        >>> pipeline.add_json_to_db("OutputData", "port", "MyModule")
        
        # Add pipeline attribute value
        >>> pipeline.add_json_to_db("workspace", "pipeline_attr")
        
        # Add module attribute/parameter value
        >>> pipeline.add_json_to_db("threshold", "module_attr", "FilterModule")
        
        Key Naming Strategy
        -------------------
        - Output ports: "module_name@port_name"
        - Pipeline attributes: "pipeline@attr_name" (e.g., "pipeline@workspace")
        - Module attributes: "module_name#attr_name"
        """
        ...
    def get_json_to_db(self) -> list[PipelineRunDataItem]:
        """
        Get structured data items from various sources that were marked for database storage.
        
        This method retrieves data from output ports, pipeline attributes, and module
        attributes that were previously marked with add_json_to_db() and returns them
        as structured PipelineRunDataItem objects with full metadata for database storage.
        
        Returns
        -------
        list[PipelineRunDataItem]
            List of structured data items, each containing:
            - name: The data item name (port_name, attr_name, or param_name)
            - data_type: Source type ("port", "pipeline_attr", "module_attr")
            - module_name: Module name (None for pipeline attributes)
            - value_type: Type of the data value (e.g., "TableData", "str", "int")
            - data: The actual serialized data content
            - timestamp: ISO format timestamp when data was captured
        
        Note
        ----
        For output ports, if a module hasn't been executed, the data field will be None.
        FastAPI's CustomJSONEncoder handles serialization of complex objects automatically.
        """
        ...
    def add_pipeline_attr_to_db(self, attr_name: str) -> Any:
        """
        Convenience method to add a direct pipeline attribute to database storage.
        
        Parameters
        ----------
        attr_name : str
            The name of the direct pipeline attribute to add (e.g., 'workspace', 'app_name', 'app_title').
        
        Example
        -------
        >>> pipeline.add_pipeline_attr_to_db("workspace")
        >>> pipeline.add_pipeline_attr_to_db("app_name")
        """
        ...
    def add_module_attr_to_db(self, module_name: str, attr_name: str) -> Any:
        """
        Convenience method to add a module attribute/parameter to database storage.
        
        Parameters
        ----------
        module_name : str
            The name of the module.
        attr_name : str
            The name of the module attribute/parameter to add.
        
        Example
        -------
        >>> pipeline.add_module_attr_to_db("FilterModule", "threshold")
        """
        ...
    def add_port_to_db(self, module_name: str, port_name: str) -> Any:
        """
        Convenience method to add a module output port to database storage.
        
        Parameters
        ----------
        module_name : str
            The name of the module.
        port_name : str
            The name of the output port to add.
        
        Example
        -------
        >>> pipeline.add_port_to_db("MyModule", "OutputData")
        """
        ...
    def get_output_files(self) -> dict[str, Any]:
        """
        Get all file paths and file objects from output ports of modules with file-related port types.
        
        This method checks all modules in the pipeline and collects file data
        from output ports that have PortType.FilePath, PortType.FilesPath, PortType.GdimFile,
        or PortType.GdimFiles. Useful for knowing what files were generated after running
        the pipeline or individual steps.
        
        Returns
        -------
        dict[str, dict[str, str | Path | list[str | Path] | GdimMinIOFile | list[GdimMinIOFile] | None]]
            Dictionary mapping module names to their file outputs.
            Format: {module_name: {port_name: file_data}}
            - For PortType.FilePath: file_data is a single str or Path
            - For PortType.FilesPath: file_data is a list[str | Path]
            - For PortType.GdimFile: file_data is a single GdimMinIOFile object
            - For PortType.GdimFiles: file_data is a list[GdimMinIOFile] objects
            Only includes modules that have file-related output ports with actual file data.
            Excludes outputs specified in hide_output_files.
        
        Example
        -------
        >>> files = pipeline.get_output_files()
        >>> print(files)
        {'report_module': {'OutputFile': '/path/to/report.pdf'},
         'batch_module': {'OutputFiles': ['/path/to/file1.csv', '/path/to/file2.csv']},
         'upload_module': {'GdimFile': GdimMinIOFile(fileId='123', originalFilename='data.xlsx', ...)},
         'batch_upload_module': {'GdimFiles': [GdimMinIOFile(...), GdimMinIOFile(...)]}}
        """
        ...
    def draw_pipeline(self, filename: Any = None, format: str = "svg", show_execution_status: bool = True, show_port_names: bool = False, show_steps: bool = False, show_flow_control: bool = False, layout: str = "TB", node_style: Any = None, edge_style: Any = None, view: bool = False) -> Any:
        """
        Draw a visual representation of the pipeline showing module connections and execution status.
        
        This method creates a directed graph visualization of the pipeline using Graphviz.
        Modules are shown as nodes with different colors based on execution status,
        and links are shown as edges between modules.
        
        Parameters
        ----------
        filename : str, optional
            Output filename (without extension). If None, uses app_name or generates one.
        format : str, default: 'svg'
            Output format ('png', 'svg', 'pdf', 'dot', etc.)
        show_execution_status : bool, default: True
            Whether to color-code modules based on their execution status
        show_port_names : bool, default: False
            Whether to show port names on the edges
        show_steps : bool, default: False
            Whether to show step information and group modules by steps
        show_flow_control : bool, default: False
            Whether to show flow control transitions between steps (requires show_steps=True)
        layout : str, default: 'TB'
            Graph layout direction ('TB'=top-bottom, 'LR'=left-right, 'BT'=bottom-top, 'RL'=right-left)
        node_style : dict, optional
            Custom node styling options
        edge_style : dict, optional
            Custom edge styling options
        view : bool, default: False
            Whether to automatically open the generated file
        
        Returns
        -------
        str | None
            Path to the generated file, or None if visualization is not available
        
        Raises
        ------
        ImportError
            If graphviz is not installed
        
        Examples
        --------
        >>> pipeline.draw_pipeline('my_pipeline', format='svg', view=True)
        >>> pipeline.draw_pipeline(show_port_names=True, layout='LR')
        >>> pipeline.draw_pipeline(show_steps=True, show_flow_control=True)
        """
        ...
    def draw_execution_flow(self, filename: Any = None, format: str = "svg", highlight_path: Any = None, view: bool = False) -> Any:
        """
        Draw the execution flow of the pipeline with time-based information.
        
        This creates a specialized view focusing on the execution order and timing,
        useful for understanding pipeline performance and bottlenecks.
        
        Parameters
        ----------
        filename : str, optional
            Output filename (without extension). If None, generates one.
        format : str, default: 'png'
            Output format ('png', 'svg', 'pdf', 'dot', etc.)
        highlight_path : list[str], optional
            List of module names to highlight as a specific execution path
        view : bool, default: False
            Whether to automatically open the generated file
        
        Returns
        -------
        str | None
            Path to the generated file, or None if visualization is not available
        """
        ...
    def get_pipeline_stats(self) -> dict:
        """Get statistics about the pipeline."""
        ...
    def get_table_requirements(self) -> dict[str, Any]:
        """
        Get all essential table and field requirements across all GdimTableReader modules.
        
        Returns
        -------
        dict[str, set[str]]
            Dictionary with table names as keys and sets of required field names as values.
            Combines requirements from all GdimTableReader modules and normalizes to 'name' format.
            Automatically excludes tables and fields marked as optional via validation exclusions.
        """
        ...
    def get_pipeline_requirements(self) -> set[str]:
        """
        Get all unique pipeline dependencies across all GdimPipelineDbReader modules.
        
        Returns
        -------
        set[str]
            Set of unique pipeline names that this pipeline depends on, excluding excluded pipelines.
        """
        ...
    def get_gdim_app_requirements(self) -> set[str]:
        """
        Get all unique GDIM app dependencies across all modules with `_related_gdim_app`.
        
        Returns
        -------
        set[str]
            Set of unique GDIM app names that this pipeline depends on.
            Note: GDIM app requirements cannot be excluded from validation.
        """
        ...
    def set_validation_exclusions(self, exclusions: Any = None) -> Any:
        """
        Set the validation exclusions for tables and fields.
        
        Parameters
        ----------
        exclusions : dict[str, list[str] | None] | None, default: None
            Dictionary mapping table names to field lists to exclude.
            - If value is None: exclude the entire table from validation
            - If value is list of strings: exclude only specific fields from that table
            - If None: clear all exclusions
        
        Examples
        --------
        # Exclude entire tables
        pipeline.set_validation_exclusions({
            'optional_table': None,  # Exclude entire table
            'logging_table': None    # Exclude entire table
        })
        
        # Exclude specific fields
        pipeline.set_validation_exclusions({
            'main_table': ['optional_field1', 'optional_field2'],  # Exclude specific fields
            'config_table': ['debug_flag']  # Exclude specific field
        })
        
        # Mixed exclusions
        pipeline.set_validation_exclusions({
            'optional_table': None,  # Exclude entire table
            'main_table': ['optional_field']  # Exclude specific field
        })
        """
        ...
    def add_validation_exclusion(self, table_name: str, field_names: Any = None) -> Any:
        """
        Add validation exclusions for a specific table and/or fields.
        
        Parameters
        ----------
        table_name : str
            Name of the table to exclude
        field_names : list[str] | None, default: None
            List of field names to exclude from the table.
            If None, the entire table will be excluded.
        """
        ...
    def remove_validation_exclusion(self, table_name: str, field_names: Any = None) -> Any:
        """
        Remove validation exclusions for a specific table and/or fields.
        
        Parameters
        ----------
        table_name : str
            Name of the table to remove exclusions for
        field_names : list[str] | None, default: None
            List of field names to remove from exclusions.
            If None, all exclusions for the table will be removed.
        """
        ...
    def get_validation_exclusions(self) -> dict[str, Any]:
        """
        Get current validation exclusions.
        
        Returns
        -------
        dict[str, set[str] | None]
            Copy of current validation exclusions
        """
        ...
    def clear_validation_exclusions(self) -> Any:
        """Clear all validation exclusions."""
        ...
    def set_pipeline_exclusions(self, exclusions: Any = None) -> Any:
        """
        Set the pipeline validation exclusions.
        
        Parameters
        ----------
        exclusions : list[str] | None, default: None
            List of pipeline names to exclude from validation.
            If None, clear all pipeline exclusions.
        
        Examples
        --------
        # Exclude specific pipelines
        pipeline.set_pipeline_exclusions(['optional_pipeline', 'debug_pipeline'])
        
        # Clear all pipeline exclusions
        pipeline.set_pipeline_exclusions(None)
        """
        ...
    def add_pipeline_exclusion(self, pipeline_names: Any) -> Any:
        """
        Add pipeline validation exclusions.
        
        Parameters
        ----------
        pipeline_names : list[str] | str
            Pipeline name(s) to exclude from validation.
        """
        ...
    def remove_pipeline_exclusion(self, pipeline_names: Any) -> Any:
        """
        Remove pipeline validation exclusions.
        
        Parameters
        ----------
        pipeline_names : list[str] | str
            Pipeline name(s) to remove from exclusions.
        """
        ...
    def get_pipeline_exclusions(self) -> set[str]:
        """
        Get current pipeline validation exclusions.
        
        Returns
        -------
        set[str]
            Copy of current pipeline exclusions
        """
        ...
    def clear_pipeline_exclusions(self) -> Any:
        """Clear all pipeline validation exclusions."""
        ...
