# reader modules in gdi-engine
import copy
import warnings
from pathlib import Path
from typing import Any, Literal

import numpy as np
import pandas as pd

from ..connectors.fileConnector import MDBHandler
from ..connectors.gdimConnector import (
    GdimTableData,
    download_gdim_file,
    get_bores_coordinate,
    get_geo_params_table_data,
    get_geo_params_table_title_name_map,
    get_pipeline_json_to_db,
    get_project_info,
    get_project_info_structure,
    get_project_roles,
    get_report_text_library_list,
    get_survey_statistics_table_data,
    get_survey_statistics_table_title_name_map,
    get_table_data,
    get_tpl_structure,
    log_in,
    log_in_zhengyuan,
)
from ..connectors.skglConnector import (
    get_monitor_data,
    get_monitor_name_title_map,
    get_yesterday_data,
)
from ..dataclass import GDIDataQualityWarning, GDIWarning
from ..dataclass.gdimData import GdimMinIOFile, GdimTemplate
from ..dataclass.results import CoordinateSystem, SingleResult, UnitResult
from ..dataclass.tables import FieldMetadata, TableCollection, TableData
from ..dataclass.terminologies import DataTypes, GeneralProps, GeoResults, Units
from ..dataTransformers.dataCleanAlgorithm import format_dataframe_type
from ..pipeline.pipeData import (
    BooleanAttributeSchema,
    FileAttributeSchema,
    MultiResultsDict,
    PipelineRunDataItem,
    RangeModel,
    ResultsDict,
    StringAttributeSchema,
    UIAttributeSchema,
)
from ..pipeline.pipeline import (
    PipeModule,
    Port,
    PortReference,
    module_decorator,
    status_manage,
)
from ..pipeline.portTypes import PortType, PortTypeHint


##########################
# GDIM Basic Readers
##########################
@module_decorator()
class GetGdimToken(PipeModule):
    """Get user token and project id from Gdim."""

    OutputToken: PortReference[PortTypeHint.Token]

    _port_docs = {"OutputToken": "The token of the user."}

    def __init__(
        self,
        mname: str | None = None,
        auto_run: bool = True,
        user_name: str | None = None,
        password: str | None = None,
        login_by_token: bool = False,
        token: str | None = None,
        proj_id: int | str | None = None,
        host: str | None = None,
        gdim: bool = True,
        platform: str | None = None,
    ) -> None:
        """Initialize GetGdimToken object.

        Parameters
        ----------
        user_name: str | None, default: None
            Username for login. If None, will try to read from .env file (GDIM_USERNAME)

        password: str | None, default: None
            Password for login. If None, will try to read from .env file (GDIM_PASSWORD)

        login_by_token: bool, default: False
            Whether to login by token. If True, user_name and password will be ignored.

        token: str | None, default: None
            The token of the user.
            If not None, user_name and password will be ignored no matter what the value of login_by_token is.

        proj_id: int | str | None, default: None
            The project id.

        gdim: bool, default: True
            Whether the project is a GDIM project.

        host: str | None, default: None
            The host of the platform.

        platform: str | None, default: None
            The platform name.
        """
        super().__init__(mname, auto_run)

        self.user_name = user_name
        self.password = password
        self.token = token
        self.login_by_token = login_by_token
        self.proj_id = proj_id
        self.host = host
        self.gdim = gdim
        self.platform = platform

        self._title = "获取GDIM token"
        self._description = "获取GDIM平台token和项目id"

    def update_ui_schema(self, reset: bool = False) -> dict[str, UIAttributeSchema]:
        return {
            "token": StringAttributeSchema(
                title="用户token",
                default=self.token,
                visible=False,
            ),
            "proj_id": StringAttributeSchema(
                title="项目id",
                default=self.proj_id,
                visible=False,
            ),
        }

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel]:
        return {
            "token": RangeModel(
                vtype="str", default=self.token, title="用户token", visible=False
            ),
            "proj_id": RangeModel(
                vtype=("int", "str"),
                default=self.proj_id,
                title="项目id",
                visible=False,
            ),
        }

    def execute(self) -> PortTypeHint.Token | None:
        if self.token is None and not self.login_by_token:
            try:
                if self.platform == "zhengyuan":
                    # Warning: the cid is fixed in current version
                    token = log_in_zhengyuan(
                        user_name=self.user_name,
                        password=self.password,
                        cid=4,
                        host=self.host,
                    )
                elif self.platform is None:
                    token = log_in(
                        user_name=self.user_name,
                        password=self.password,
                        host=self.host,
                        gdim=self.gdim,
                    )
                else:
                    raise ValueError(f"Invalid platform: {self.platform}")
            except ValueError as e:
                self._ports_out["OutputToken"].data = None
                return None
            except Exception as e:
                raise e
        else:
            if self.token is None:
                if self.pipeline and self.pipeline.gdim_token:
                    token = self.pipeline.gdim_token
                else:
                    self._ports_out["OutputToken"].data = None
                    return None
            else:
                token = self.token

        if self.proj_id is None and self.pipeline:
            proj_id = self.pipeline.gdim_proj_id
        else:
            proj_id = self.proj_id

        if self.host is None and self.pipeline:
            host = self.pipeline.host
        else:
            host = self.host

        self._ports_out["OutputToken"].data = (token, proj_id, host)
        return (token, proj_id, host)


@module_decorator()
class GetGdimFile(PipeModule):
    """Get a file object from Gdim."""

    OutputGdimFile: PortReference[PortTypeHint.GdimFile]

    _port_docs = {"OutputGdimFile": "The file object of Gdim."}

    def __init__(
        self,
        mname: str | None = None,
        auto_run: bool = True,
        gdim_file: dict | None = None,
        host: str | None = None,
    ) -> None:
        """Initialize GetGdimFile object.

        Parameters
        ----------
        gdim_file: dict | None, default: None
            The dict data for gdim file object.
            Check `GdimMinIOFile` for the detail of the data stucture.

        host: str | None, default: None
            The host of the platform.
            If None, the host in the config file will be used.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        self.gdim_file = gdim_file
        self.host = host

        self._title = "获取GDIM文件"
        self._description = "从GDIM文件服务器获取文件对象"

    def update_ui_schema(self, reset: bool = False) -> dict[str, UIAttributeSchema]:
        return {
            "gdim_file": FileAttributeSchema(title="GDIM文件数据"),
        }

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel]:
        return {
            "gdim_file": RangeModel(
                vtype="dict",
                default=self.gdim_file,
                title="Gdim文件数据",
                widget="file",
            )
        }

    def execute(self) -> PortTypeHint.GdimFile | None:
        if self.gdim_file is None:
            self._ports_out["OutputGdimFile"].data = None
            return None

        gdim_file_dict = copy.deepcopy(self.gdim_file)
        gdim_file_dict["host"] = self.host

        gdim_file = GdimMinIOFile.model_validate(gdim_file_dict)
        self._ports_out["OutputGdimFile"].data = gdim_file
        return gdim_file


@module_decorator()
class GdimTemplateReader(PipeModule):
    """Read the structure of a Gdim template by template id or project id."""

    InputToken: PortReference[PortTypeHint.Token]
    OutputTemplate: PortReference[PortTypeHint.GdimTemplate]

    _port_docs = {
        "InputToken": "The token of the user.",
        "OutputTemplate": "The structure of the template.",
    }

    def __init__(
        self,
        mname: str | None = None,
        auto_run: bool = True,
        tpl_id: str | int | None = None,
        get_app_info: bool = False,
        token: str | None = None,
        proj_id: str | int | None = None,
        host: str | None = None,
    ) -> None:
        """Initialize GdimTemplateReader object.

        Parameters
        ----------
        tpl_id: int | None, default: None
            The id of the template.
            If not None, proj_id will be ignored.

        get_app_info: bool, default: False
            Whether to get the application information.

        token: str | None, default: None
            The token of the user

        proj_id: int | None, default: None
            The id of the project.

        host: str | None, default: None
            The host of the platform.
            If None, the host in the config file will be used.

        Ports
        -----
        InputToken: PortReference[PortTypeHint.Token]
            The token of the user.

        OutputTemplate: PortReference[PortTypeHint.GdimTemplate]
            The structure of the template.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is not None:
            self.InputToken = (token, proj_id, host)  # type: ignore

        self.tpl_id = tpl_id
        self.get_app_info = get_app_info

        self._title = "获取GDIM模板结构"
        self._description = "获取GDIM模板结构，包括表结构和应用结构"

    def update_ui_schema(self, reset: bool = False) -> dict[str, UIAttributeSchema]:
        return {
            "tpl_id": StringAttributeSchema(
                title="模板id",
                visible=False,
            ),
            "get_app_info": BooleanAttributeSchema(
                title="是否获取应用结构",
                default=False,
                visible=False,
            ),
        }

    def execute(self) -> PortTypeHint.GdimTemplate | None:
        input_token = self._ports_in["InputToken"].data

        proj_id_is_must = True
        if self.tpl_id:
            proj_id_is_must = False

        input_token = self.get_token(input_token, proj_id_is_must=proj_id_is_must)
        if input_token is None:
            self._ports_out["OutputTemplate"].data = None
            return None
        else:
            token, proj_id, host = input_token

        tpl_structure = get_tpl_structure(
            token,
            tpl_id=self.tpl_id,
            proj_id=proj_id,
            host=host,
            get_app_info=self.get_app_info,
        )

        self._ports_out["OutputTemplate"].data = tpl_structure
        return tpl_structure


@module_decorator()
class GdimTableReader(PipeModule):
    """Read the data of serveral tables in a Gdim project."""

    InputToken: PortReference[PortTypeHint.Token]
    OutputTables: PortReference[PortTypeHint.TableCollection]
    OutputTemplate: PortReference[PortTypeHint.GdimTemplate]

    _port_docs = {
        "InputToken": "The token of the user.",
        "OutputTables": "The data of the tables.",
        "OutputTemplate": "The structure of the template.",
    }

    def __init__(
        self,
        mname: str = "GdimTableReader",
        auto_run: bool = True,
        token: str | None = None,
        proj_id: int | str | None = None,
        host: str | None = None,
        table_fields: dict[str, list[str]] | list[str] | str | None = None,
        format_dict: dict[str, dict[str, str]] | None = None,
        main_table_name: str | None = None,
        sub_table_names: list[str] | None = None,
        main_key: str | None = None,
        main_key_values: dict[str, Any] | None = None,
        keep_gdim_id: bool = False,
        table_collection_name: str | None = None,
        missing_error_type: Literal["error", "warning", "gdi_warning"] = "error",
        empty_error_type: Literal["warning", "gdi_warning"] = "warning",
        gdim: bool = True,
    ) -> None:
        """ "Initialize GdimTableReader object.

        Parameters
        ----------
        token: str | None, default: None
            The token of the user.

        proj_id: int | str | None, default: None
            The id of the project.

        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        table_fields: dict[str, list[str]] | list[str] | str | None, default: None
            If a dict, key can be either table name or title, value can be either field names or titles.
            The system automatically detects whether the provided keys are names or titles.
            Example:
                `{'标惯表':['标贯深度','贯入深度'], '地层表':['x坐标','y坐标']}`
                `{'kl_boreholes':['x_coords','y_coords'], 'kl_cpt_table':['depth','elevation']}`
            If a list of strings or a string, it's the name or title of the tables. All the data in the tables will be read.
            If None, all the data in all tables will be read.

        format_dict: dict[str, dict[str, str]] | None, default: None
            Key can be either table name or title, value is the format dict with field names/titles and their formats.
            The system automatically detects whether the provided keys are names or titles.
            Example:
                `{'标惯表':{'标贯深度':float, '贯入深度':float}}`

        main_table_name: str | None, default: None
            The name or title of the main table. The system automatically detects whether it's a name or title.

        sub_table_names: list[str] | None, default: None
            The names or titles of the sub tables. The system automatically detects whether they are names or titles.

        main_key: str | None, default: None
            The name or title of the main key. The system automatically detects whether it's a name or title.

        main_key_values: dict[str, Any] | None, default: None
            The main key values of the parent table. Keys are table names and values are main key values.
            It's used when the table has a parent table.
            Only valid when gdim is True.

        keep_gdim_id: bool, default: False
            Whether to keep the gdim_id column in the output table.
            Only valid when gdim is True.

        table_collection_name: str | None, default: None
            The name of the table collection.

        missing_error_type: Literal["error", "warning", "gdi_warning"], default: "error"
            The type of the error when the table or field is not found.
            If `error`, a `KeyError` will be raised.
            If `warning`, a `UserWarning` will be printed in the console.
            If `gdi_warning`, a `GDIWarning` will be printed in the console and show a warning in GDIM.

        empty_error_type: Literal["warning", "gdi_warning"], default: "warning"
            The type of the error when the table data is empty.
            If `warning`, a `UserWarning` will be printed in the console.
            If `gdi_warning`, a `GDIWarning` will be printed in the console and show a warning in GDIM.

        gdim: bool, default: True
            Whether to get the data from GDIM. If False, the data is from GBIM.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is not None:
            self.InputToken = (token, proj_id, host)  # type: ignore

        self.table_fields = table_fields
        self.format_dict = format_dict
        self.main_table_name = main_table_name
        self.sub_table_names = sub_table_names
        self.main_key = main_key
        self.main_key_values = main_key_values
        self.keep_gdim_id = keep_gdim_id
        self.table_collection_name = table_collection_name
        self.missing_error_type = missing_error_type
        self.empty_error_type = empty_error_type
        self.gdim = gdim

        self._title = "获取GDIM表数据"
        self._description = "获取GDIM表数据，包括表数据和表结构"

    def execute(self) -> PortTypeHint.TableCollection | None:
        input_token = self._ports_in["InputToken"].data

        input_token = self.get_token(input_token)
        if input_token is None:
            self._ports_out["OutputTables"].data = None
            return None
        else:
            token, proj_id, host = input_token

        # Get project template structure
        tpl_structure: GdimTemplate = get_tpl_structure(
            user_token=token,
            proj_id=proj_id,
            host=host,
            gdim=self.gdim,
            get_app_info=False,
        )

        if self.main_key_values:
            main_key_values = {}  # New primary key values with name
        else:
            main_key_values = self.main_key_values

        table_fields = {}
        # Read all tables
        if self.table_fields is None:
            for table_name, table_metadata in tpl_structure.tables.items():
                table_fields[table_name] = list(table_metadata.fields_metadata.keys())
        # Read specific tables, ignore none-existed tables when missing_error_type is not "error"
        elif isinstance(self.table_fields, (str, list)):
            # names_type doesn't matter in this case
            if isinstance(self.table_fields, str):
                table_names = [self.table_fields]
            else:
                table_names = self.table_fields
            for table_name in table_names:
                table_metadata = tpl_structure.get_table_metadata(table_name)
                if table_metadata:
                    table_fields[table_metadata.name] = list(
                        table_metadata.fields_metadata.keys()
                    )
                else:
                    if self.missing_error_type == "error":
                        raise KeyError(f"读取的表 '{table_name}' 不存在")
                    elif self.missing_error_type == "warning":
                        warnings.warn(
                            f"读取的表 '{table_name}' 不存在，忽略", UserWarning
                        )
                    elif self.missing_error_type == "gdi_warning":
                        warnings.warn(
                            f"读取的表 '{table_name}' 不存在，忽略", GDIWarning
                        )
                    else:
                        raise ValueError(
                            f"Invalid error type: {self.missing_error_type}"
                        )
        else:
            # Process table_fields with automatic name/title detection
            for table_key, field_keys in self.table_fields.items():
                table_metadata = tpl_structure.get_table_metadata(table_key)
                if table_metadata:
                    table_name = table_metadata.name
                    field_names = []
                    for field_key in field_keys:
                        field_metadata = table_metadata.get_field_metadata(field_key)
                        if field_metadata:
                            field_names.append(field_metadata.name)
                        else:
                            if self.missing_error_type == "error":
                                raise KeyError(
                                    f"读取的表字段 '{field_key}' 在表 `{table_key}` 中不存在"
                                )
                            elif self.missing_error_type == "warning":
                                warnings.warn(
                                    f"读取的表字段 '{field_key}' 在表 `{table_key}` 中不存在，忽略",
                                    UserWarning,
                                )
                            elif self.missing_error_type == "gdi_warning":
                                warnings.warn(
                                    f"读取的表字段 '{field_key}' 在表 `{table_key}` 中不存在，忽略",
                                    GDIWarning,
                                )
                            else:
                                raise ValueError(
                                    f"Invalid error type: {self.missing_error_type}"
                                )
                    table_fields[table_name] = field_names
                    if self.main_key_values:
                        if table_key in self.main_key_values:
                            main_key_values[table_name] = self.main_key_values[
                                table_key
                            ]
                else:
                    if self.missing_error_type == "error":
                        raise KeyError(f"读取的表 '{table_key}' 不存在")
                    elif self.missing_error_type == "warning":
                        warnings.warn(
                            f"读取的表 '{table_key}' 不存在，忽略", UserWarning
                        )
                    elif self.missing_error_type == "gdi_warning":
                        warnings.warn(
                            f"读取的表 '{table_key}' 不存在，忽略", GDIWarning
                        )
                    else:
                        raise ValueError(
                            f"Invalid error type: {self.missing_error_type}"
                        )

        # The key in the tables_data is the table name
        tables_data: dict[str, GdimTableData] = get_table_data(
            user_token=self.token,
            proj_id=self.proj_id,
            table_fields=table_fields,
            main_key_values=main_key_values,
            tpl_structure=tpl_structure,
            host=self.host,
            gdim=self.gdim,
        )
        # Convert tables_data to TableCollection
        table_collection = TableCollection(name=self.table_collection_name)
        for _, table_data in tables_data.items():
            if not table_data.data:
                if self.empty_error_type == "warning":
                    warnings.warn(
                        f"表 '{table_data.table_metadata.title}' 数据为空，不读取。",
                        UserWarning,
                    )
                elif self.empty_error_type == "gdi_warning":
                    warnings.warn(
                        f"表 '{table_data.table_metadata.title}' 数据为空，不读取。",
                        GDIWarning,
                    )
                else:
                    raise ValueError(f"Invalid error type: {self.empty_error_type}")
                continue
            table_metadata = table_data.table_metadata
            data = table_data.data
            if self.gdim:
                table_type = table_metadata.table_type
            else:
                table_type = "regular"
            table_data_frame = TableData(
                data,
                name=table_metadata.name,  # type: ignore
                title=table_metadata.title,
                description=table_metadata.description,
                table_type=table_type,
                fields_meta=table_metadata.fields_metadata,
            )
            if self.gdim:  # Rename the id column to gdim_id for GDIM data
                if self.keep_gdim_id:
                    table_data_frame.rename_columns({"id": "gdim_id"}, inplace=True)
                else:
                    table_data_frame.drop(columns=["id"], inplace=True)
            # Format the table data
            if self.format_dict:
                table_format_dict = {}

                # Try to find format_dict entry by table title first, then by name
                if table_data_frame.title in self.format_dict:
                    # Convert field titles/names to field names
                    field_title_to_name = table_data_frame.title_to_name
                    for field_key, field_format in self.format_dict[
                        table_data_frame.title
                    ].items():
                        if field_key in field_title_to_name:
                            # It's a field title
                            field_name = field_title_to_name[field_key]
                            table_format_dict[field_name] = field_format
                        elif field_key in table_data_frame.columns:
                            # It's already a field name
                            table_format_dict[field_key] = field_format
                elif table_data_frame.name in self.format_dict:
                    # Convert field titles/names to field names
                    field_title_to_name = table_data_frame.title_to_name
                    for field_key, field_format in self.format_dict[
                        table_data_frame.name
                    ].items():
                        if field_key in field_title_to_name:
                            # It's a field title
                            field_name = field_title_to_name[field_key]
                            table_format_dict[field_name] = field_format
                        elif field_key in table_data_frame.columns:
                            # It's already a field name
                            table_format_dict[field_key] = field_format

                if table_format_dict:
                    table_data_frame = format_dataframe_type(
                        table_data_frame, table_format_dict
                    )

            main_table = False
            main_key = None
            sub_table = False
            if self.main_table_name:
                if (
                    table_data_frame.name == self.main_table_name
                    or table_data_frame.title == self.main_table_name
                ):
                    main_table = True
                if self.main_key:
                    main_table_metadata = tpl_structure.get_table_metadata(
                        self.main_table_name
                    )
                    main_key_metadata = main_table_metadata.get_field_metadata(
                        self.main_key
                    )
                    main_key = (
                        main_key_metadata.name if main_key_metadata else self.main_key
                    )
            if self.sub_table_names:
                if (
                    table_data_frame.name in self.sub_table_names
                    or table_data_frame.title in self.sub_table_names
                ):
                    sub_table = True

            table_collection.add_table(
                table_data_frame,  # type: ignore
                main_table=main_table,
                main_key=main_key,
                sub_table=sub_table,
            )

        self._ports_out["OutputTables"].data = table_collection
        self._ports_out["OutputTemplate"].data = tpl_structure

        return table_collection


@status_manage
class GdimBoresCoordinateReader(PipeModule):
    """Read the coordinate information of bores from Gdim with 'bore_table (钻孔一览表)'.
    The output table includes column - ProfileNumber, XCoordinate, YCoordinate, Longitude, Latitude.
    """

    def __init__(
        self,
        mname: str | None = None,
        auto_run: bool = True,
        host: str | None = None,
        proj_id: int | None = None,
        token: str | None = None,
        gdim: bool = True,
    ) -> None:
        """ "Initialize GdimAppSurveyStatReader object.

        Parameters
        ----------
        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        proj_id: int | None, default: None
            The id of the project.

        token: str | None, default: None
            The token of the user.

        gdim: bool, default: True
            Whether to get the data from GDIM. If False, the data is from GBIM.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is None and proj_id is None:
            data = None
        else:
            data = (token, proj_id)

        self._ports_in = {
            "InputToken": Port(
                ptype=PortType.Token,
                data=data,
                pdoc="The token of the user.",
            )
        }
        self._ports_out = {
            "OutputTable": Port(
                ptype=PortType.TableData,
                pdoc="The data of the bores coordinate information.",
            )
        }

        self.host = host
        self.proj_id = proj_id
        self.token = token
        self.gdim = gdim  # TODO: not implemented yet

    @property
    def OutputTable(self) -> PortTypeHint.TableData | None:
        return self._ports_out["OutputTable"].data

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel]:
        return {
            "host": RangeModel(vtype="str", default=self.host, title="平台网址"),
            "proj_id": RangeModel(vtype="int", default=self.proj_id, title="项目id"),
            "token": RangeModel(vtype="str", default=self.token, title="用户token"),
            "gdim": RangeModel(
                vtype="bool", default=self.gdim, title="是否从GDIM获取数据"
            ),
        }

    def execute(self) -> PortTypeHint.TableData | None:
        input_token = self._ports_in["InputToken"].data

        if input_token:
            self.token, self.proj_id = input_token
        if self.proj_id is None or self.token is None:
            self._ports_out["OutputTable"].data = None
            return None

        bores_coordinate = get_bores_coordinate(
            pid=self.proj_id, token=self.token, host=self.host
        )
        bores_coordinate.rename(
            {
                "boreholeNo": GeoResults.BoreNumber,
                "coordinateX": GeneralProps.XCoordinate,
                "coordinateY": GeneralProps.YCoordinate,
                "longitude": GeneralProps.Longitude,
                "latitude": GeneralProps.Latitude,
            },
            inplace=True,
        )
        fileds_metadata = [
            FieldMetadata(
                name=GeoResults.BoreNumber,
                title=GeoResults.BoreNumber.title,
                unit=Units.UNITLESS,
            ),
            FieldMetadata(
                name=GeneralProps.XCoordinate,
                title=GeneralProps.XCoordinate.title,
                unit=Units.m,
            ),
            FieldMetadata(
                name=GeneralProps.YCoordinate,
                title=GeneralProps.YCoordinate.title,
                unit=Units.m,
            ),
            FieldMetadata(
                name=GeneralProps.Longitude,
                title=GeneralProps.Longitude.title,
                unit=Units.DEGREE,
            ),
            FieldMetadata(
                name=GeneralProps.Latitude,
                title=GeneralProps.Latitude.title,
                unit=Units.DEGREE,
            ),
        ]

        table_data = TableData(
            bores_coordinate,
            name="bores_coordinate",
            title="钻孔坐标",
            fields_meta=fileds_metadata,
        )

        self._ports_out["OutputTable"].data = table_data


@status_manage
class GdimPipelineDbReader(PipeModule):
    """Read the data stored by pipeline (json_to_db) from the Gdim.

    This module retrieves data that was previously stored using the enhanced add_json_to_db
    functionality, which can include:
    - Output port data from modules (complex objects like TableData, TableCollection, SingleResult)
    - Module attributes/parameters (individual values like str, int, float, bool, etc.)
    - Pipeline attributes (workspace, app_name, etc.)
    - Module output attributes dictionaries

    The data is automatically deserialized using auto_deserialize to restore the original
    object types where applicable, while preserving individual attribute values unchanged.
    """

    def __init__(
        self,
        mname: str | None = "GdimPipelineDbReader",
        auto_run: bool = True,
        host: str | None = None,
        proj_id: int | None = None,
        token: str | None = None,
        pipeline_name: str | list[str] | None = None,
        run_selector: int | str = "latest",
    ) -> None:
        """Initialize GdimPipelineDbReader object.

        Parameters
        ----------
        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        proj_id: int | None, default: None
            The id of the project.

        token: str | None, default: None
            The token of the user.

        pipeline_name: str | list[str] | None, default: None
            The name(s) of the pipeline(s) used in the Gdim APP.

        run_selector: int | str, default: "latest"
            Which pipeline run to select when pipeline_name is a single string.
            This parameter is ignored when pipeline_name is a list (multiple pipelines).
            Available options:
            - "latest": Use the most recent run (default)
            - "first": Use the first run
            - "all": Return all runs as MultiResultsDict (even for single pipeline)
            - int: Use the run at the specified index (0-based)
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is None and proj_id is None:
            data = None
        else:
            data = (token, proj_id)

        self._ports_in = {
            "InputToken": Port(
                ptype=PortType.Token,
                data=data,
                pdoc="The token of the user.",
            )
        }
        self._ports_out = {
            "OutputResultsDict": Port(
                ptype=[PortType.ResultsDict, PortType.MultiResultsDict],
                pdoc="The data from pipeline json_to_db including output ports, module attributes, and pipeline attributes.",
            )
        }

        self.host = host
        self.proj_id = proj_id
        self.token = token
        self.pipeline_name = pipeline_name
        self.run_selector = run_selector

    @property
    def InputToken(self) -> None:
        raise AttributeError("Property 'InputToken' is write-only.")

    @InputToken.setter
    def InputToken(self, value: PortTypeHint.Token) -> None:
        self["InputToken"] = value

    @property
    def OutputResultsDict(
        self,
    ) -> PortTypeHint.ResultsDict | PortTypeHint.MultiResultsDict | None:
        """Get the results dictionary containing data from pipeline json_to_db.

        Returns
        -------
        dict or None
            Dictionary with keys following json_to_db naming convention:
            - "module@port": Output port data (TableData, TableCollection, SingleResult, etc.)
            - "pipeline@attr": Pipeline attribute values (str, int, bool, Path, etc.)
            - "module#param": Module parameter values (any type)
            Values are automatically deserialized where applicable.
        """
        return self._ports_out["OutputResultsDict"].data

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel]:
        return {
            "host": RangeModel(vtype="str", default=self.host, title="平台网址"),
            "proj_id": RangeModel(vtype="int", default=self.proj_id, title="项目id"),
            "token": RangeModel(vtype="str", default=self.token, title="用户token"),
            "pipeline_name": RangeModel(
                vtype=("str", "list"),
                default=self.pipeline_name,
                title="选择pipeline",
                list_type="str",
            ),
            "run_selector": RangeModel(
                vtype=("str", "int"),
                default=self.run_selector,
                choices=("latest", "first", "all"),
                title="运行选择器",
            ),
        }

    def execute(
        self,
    ) -> PortTypeHint.ResultsDict | PortTypeHint.MultiResultsDict | None:
        """Execute the reader to fetch and deserialize pipeline json_to_db data.

        Returns
        -------
        ResultsDict | MultiResultsDict | None
            Return type depends on pipeline_name and run_selector:

            **When pipeline_name is str (single pipeline):**
            - ResultsDict: When run_selector is "latest", "first", or int
            - MultiResultsDict: When run_selector is "all"

            **When pipeline_name is list[str] (multiple pipelines):**
            - MultiResultsDict: Always returns all runs for all pipelines
              (run_selector parameter is ignored)

            **When no data or authentication fails:**
            - None: When no data is found or authentication fails

        The data is automatically deserialized based on metadata, and source_info
        provides detailed information about data origins including value types.
        """
        input_token = self._ports_in["InputToken"].data

        if input_token:
            self.token, self.proj_id = input_token
        if self.proj_id is None or self.token is None or self.pipeline_name is None:
            self._ports_out["OutputResultsDict"].data = None
            return None

        if isinstance(self.pipeline_name, list):
            # Multiple pipelines requested - run_selector is ignored
            # Always return all runs for all pipelines as MultiResultsDict
            pipelines_data = {}
            for pipeline_name in self.pipeline_name:
                try:
                    runs_data = get_pipeline_json_to_db(
                        token=self.token,
                        pid=self.proj_id,
                        pipelineName=pipeline_name,
                        host=self.host,
                    )
                    pipelines_data[pipeline_name] = runs_data
                except Exception as e:
                    print(
                        f"Warning: Failed to fetch data for pipeline '{pipeline_name}': {str(e)}"
                    )
                    pipelines_data[pipeline_name] = []  # Empty runs list

            # Convert raw data to proper format for MultiResultsDict
            formatted_pipelines_data = {}
            for pipeline_name, runs_data in pipelines_data.items():
                # Convert raw dict data to PipelineRunDataItem instances
                formatted_runs = []
                for run_data in runs_data:
                    formatted_run = [PipelineRunDataItem(**item) for item in run_data]
                    formatted_runs.append(formatted_run)
                formatted_pipelines_data[pipeline_name] = formatted_runs

            result = MultiResultsDict.from_pipeline_runs(formatted_pipelines_data)
            self._ports_out["OutputResultsDict"].data = result
            return result

        else:
            # Single pipeline requested
            runs_data = get_pipeline_json_to_db(
                token=self.token,
                pid=self.proj_id,
                pipelineName=self.pipeline_name,
                host=self.host,
            )

            if not runs_data:
                self._ports_out["OutputResultsDict"].data = None
                return None

            # Convert raw dict data to PipelineRunDataItem instances
            formatted_runs_data = []
            for run_data in runs_data:
                formatted_run = [PipelineRunDataItem(**item) for item in run_data]
                formatted_runs_data.append(formatted_run)

            if self.run_selector == "all":
                # Return all runs as MultiResultsDict even for single pipeline
                formatted_pipelines_data = {self.pipeline_name: formatted_runs_data}
                result = MultiResultsDict.from_pipeline_runs(formatted_pipelines_data)
                self._ports_out["OutputResultsDict"].data = result
                return result
            else:
                # Return single run as ResultsDict
                result = ResultsDict.from_multiple_runs(
                    formatted_runs_data, self.run_selector
                )
                self._ports_out["OutputResultsDict"].data = result
                return result


@module_decorator()
class FileDownloader(PipeModule):
    """Download the file from the GDIM platform or a url."""

    InputFileUrl: PortReference[PortTypeHint.HttpUrl | PortTypeHint.GdimFile]
    OutputFile: PortReference[PortTypeHint.FilePath]

    _port_docs = {
        "InputFileUrl": "The url of the file.",
        "OutputFile": "The saved file.",
    }

    def __init__(
        self,
        mname: str | None = "FileDownloader",
        auto_run: bool = True,
        file_url: PortTypeHint.HttpUrl | PortTypeHint.GdimFile | dict | None = None,
        output_path: str | Path | None = None,
        output_name: str | None = None,
        pipeline_workspace_as_output_path: bool = True,
    ) -> None:
        """Initialize FileDownloader object.

        Parameters
        ----------
        file_url: PortTypeHint.HttpUrl | PortTypeHint.GdimFile | dict | None, default: None
            If it's GdimFile object or dict, the file will be downloaded from the GDIM platform.
            If it's url, the file will be downloaded from the url.

        output_path: str | Path | None, default: None
            The path to save the file.
            If pipeline not found or its 'workspace' attribute is None, the current working directory will be used.

        output_name: str | None, default: None
            The name of for the downloaded file.
            If it's None, the file name will be extracted from the file url.

        pipeline_workspace_as_output_path: bool, default: True
            If True, the 'workspace' attribute of the pipeline will be used as the output path for saving the file at first.
            If False, the 'output_path' parameter will be used as the output path for saving the file at first.

        Note
        ----
        If the data in InputFileUrl is not None, the self.file_url will be overwritten by the data of the input port.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if file_url is not None:
            self.InputFileUrl = file_url

        self.file_url = file_url
        self.output_path = output_path
        self.output_name = output_name
        self.pipeline_workspace_as_output_path = pipeline_workspace_as_output_path

        self._title = "下载文件"
        self._description = "下载文件，包括从给定的地址下载文件和从GDIM平台下载文件"

    def update_ui_schema(self, reset: bool = False) -> dict[str, UIAttributeSchema]:
        return {
            "file_url": FileAttributeSchema(
                title="下载地址",
                upload=False,
            ),
        }

    def execute(self) -> PortTypeHint.FilePath | None:
        input_file_url: PortTypeHint.HttpUrl | PortTypeHint.GdimFile | None = (
            self._ports_in["InputFileUrl"].data
        )

        if input_file_url:
            self.file_url = input_file_url

        if self.file_url is None:
            self._ports_out["OutputFile"].data = None
            return None

        if isinstance(self.file_url, PortTypeHint.GdimFile):
            file_url = self.file_url.get_download_url()
        elif isinstance(self.file_url, dict):
            file_url = GdimMinIOFile(**self.file_url)
            file_url = file_url.get_download_url()
        else:
            file_url = self.file_url

        output_path = self.get_workspace(
            self.output_path, self.pipeline_workspace_as_output_path
        )
        output_path = Path(output_path)

        if self.output_name:
            output_file_path = output_path / self.output_name
        else:
            output_file_path = output_path

        output_file = download_gdim_file(file_url, output_file_path)

        self._ports_out["OutputFile"].data = output_file
        return output_file


##########################
# GDIM APP Readers
##########################
@module_decorator()
class GdimAppProjectInfoReader(PipeModule):
    """Read the data from Project Information APP."""

    InputToken: PortReference[PortTypeHint.Token]
    OutputProjectInfo: PortReference[PortTypeHint.SingleResult]
    OutputCoordinateSystem: PortReference[PortTypeHint.CoordinateSystem]

    _port_docs = {
        "InputToken": "The token of the user.",
        "OutputProjectInfo": "The data of the project information.",
        "OutputCoordinateSystem": "The coordinate system of the project.",
    }

    def __init__(
        self,
        mname: str = "GdimAppProjectInfoReader",
        auto_run: bool = True,
        token: str | None = None,
        proj_id: int | None = None,
        host: str | None = None,
        gdim: bool = True,
    ) -> None:
        """ "Initialize GdimAppProjectInforReader object.

        Parameters
        ----------
        token: str | None, default: None
            The token of the user.

        proj_id: int | None, default: None
            The id of the project.

        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        proj_id: int | None, default: None
            The id of the project.

        token: str | None, default: None
            The token of the user.

        gdim: bool, default: True
            Whether to get the data from GDIM. If False, the data is from GBIM.

        Ports
        -----
        InputToken: PortTypeHint.Token
            The token of the user.

        OutputProjectInfo: PortTypeHint.SingleResult
            The data of the project information.

        OutputCoordinateSystem: PortTypeHint.CoordinateSystem
            The coordinate system of the project.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is not None:
            self.InputToken = (token, proj_id, host)  # type: ignore

        self.gdim = gdim
        self._related_gdim_app = "projectInformation"

        self._title = "获取GDIM项目信息"
        self._description = "获取GDIM项目信息，包括项目信息和坐标系统"

    def execute(self) -> PortTypeHint.SingleResult | None:
        input_token: PortTypeHint.Token | None = self._ports_in["InputToken"].data

        input_token = self.get_token(input_token)
        if input_token is None:
            self._ports_out["OutputProjectInfo"].data = None
            self._ports_out["OutputCoordinateSystem"].data = None
            return None
        else:
            token, proj_id, host = input_token

        proj_info_data = get_project_info(
            pid=proj_id, token=token, host=host, gdim=self.gdim
        )

        if self.gdim:
            proj_info_structure = get_project_info_structure(
                pid=proj_id, token=token, host=host
            )
            project_info_list = []
            # Get project information data
            for field_meta in proj_info_structure.fields_metadata.values():
                # Process project address
                if field_meta.name == "projectAddress":
                    project_address = proj_info_data.projectAddress
                    if project_address is None:
                        province = None
                        city = None
                        district = None
                        address = None
                    else:
                        province = project_address.province
                        city = project_address.city
                        district = project_address.district
                        address = project_address.address
                    unit_result = UnitResult(
                        name="projectAddressProvince",
                        title="项目地址-省",
                        unit=Units.UNITLESS,
                        value=province,
                    )
                    project_info_list.append(unit_result)
                    unit_result = UnitResult(
                        name="projectAddressCity",
                        title="项目地址-市",
                        unit=Units.UNITLESS,
                        value=city,
                    )
                    project_info_list.append(unit_result)
                    unit_result = UnitResult(
                        name="projectAddressDistrict",
                        title="项目地址-区",
                        unit=Units.UNITLESS,
                        value=district,
                    )
                    project_info_list.append(unit_result)
                    unit_result = UnitResult(
                        name="projectAddressDetail",
                        title="项目地址-详细",
                        unit=Units.UNITLESS,
                        value=address,
                    )
                    project_info_list.append(unit_result)
                    continue
                # Process project name
                elif field_meta.name == "projectName":
                    unit_result = UnitResult(
                        name="projectName",
                        title=field_meta.title,
                        unit=Units.UNITLESS,
                        value=proj_info_data.projectName,
                    )
                    project_info_list.append(unit_result)
                    continue
                # Process template id
                elif field_meta.name == "dataTemplateId":
                    unit_result = UnitResult(
                        name="dataTemplateId",
                        title=field_meta.title,
                        unit=Units.UNITLESS,
                        value=proj_info_data.dataTemplateId,
                    )
                    project_info_list.append(unit_result)
                    continue
                # Process customer fields
                elif (
                    field_meta.name != "dataTemplateGroupId"
                    and field_meta.name != "dataTemplateId"
                ):
                    unit_result = UnitResult(
                        name=field_meta.name,
                        title=field_meta.title,
                        unit=field_meta.unit,
                        value=proj_info_data.customFields.get(field_meta.name),
                    )
                    project_info_list.append(unit_result)

            # Process coordinate system
            coordinate_system = CoordinateSystem(
                name=proj_info_data.coordinateSystem,
                zoneMethod=proj_info_data.zoneMethod,
                zoneNumber=proj_info_data.zoneNumber,
                centralMeridian=proj_info_data.centralMeridian,
                refPointLongitude=proj_info_data.refPointLongitude,
                refPointLatitude=proj_info_data.refPointLatitude,
                refPointX=proj_info_data.refPointX,
                refPointY=proj_info_data.refPointY,
                elevationDatum=proj_info_data.elevationDatum,
                yAxisDirection=proj_info_data.yAxisDirection,
            )

            project_info_data = SingleResult(result=project_info_list)

            self._ports_out["OutputProjectInfo"].data = project_info_data
            self._ports_out["OutputCoordinateSystem"].data = coordinate_system
            return project_info_data

        else:
            result_list = []
            project_name = proj_info_data.get("project_name")
            unit_result = UnitResult(
                name="project_name",
                title="工程名称",
                unit=Units.UNITLESS,
                value=project_name,
            )
            result_list.append(unit_result)

            project_number = proj_info_data.get("project_no")
            unit_result = UnitResult(
                name="project_number",
                title="工程编号",
                unit=Units.UNITLESS,
                value=project_number,
            )
            result_list.append(unit_result)

            survey_stage = proj_info_data.get("extra_data").get("kl_explore_stage")
            unit_result = UnitResult(
                name="survey_stage",
                title="勘察阶段",
                unit=Units.UNITLESS,
                value=survey_stage,
            )
            result_list.append(unit_result)

            landform_unit = proj_info_data.get("extra_data").get("kl_landform_unit")
            unit_result = UnitResult(
                name="landform_unit",
                title="地貌单元",
                unit=Units.UNITLESS,
                value=landform_unit,
            )
            result_list.append(unit_result)

            longitude = proj_info_data.get("extra_data").get("longitude")
            unit_result = UnitResult(
                name="longitude", title="经度", unit=Units.UNITLESS, value=longitude
            )
            result_list.append(unit_result)

            latitude = proj_info_data.get("extra_data").get("latitude")
            unit_result = UnitResult(
                name="latitude", title="纬度", unit=Units.UNITLESS, value=latitude
            )
            result_list.append(unit_result)

            survey_unit = proj_info_data.get("extra_data").get("kl_explore_unit")
            unit_result = UnitResult(
                name="survey_unit",
                title="勘察单位",
                unit=Units.UNITLESS,
                value=survey_unit,
            )
            result_list.append(unit_result)

            construction_unit = proj_info_data.get("extra_data").get(
                "kl_construction_unit"
            )
            unit_result = UnitResult(
                name="construction_unit",
                title="建设单位",
                unit=Units.UNITLESS,
                value=construction_unit,
            )
            result_list.append(unit_result)

            water_level_range = proj_info_data.get("extra_data").get(
                "kl_groundwater_seasonal_range"
            )
            unit_result = UnitResult(
                name="water_level_range",
                title="地下水位季节性变化幅度",
                unit=Units.m,
                value=water_level_range,
            )
            result_list.append(unit_result)

            recent_highest_water_level = proj_info_data.get("extra_data").get(
                "kl_highest_water_level_recently"
            )
            unit_result = UnitResult(
                name="recent_highest_water_level",
                title="近3~5年最高水位",
                unit=Units.m,
                value=(
                    float(recent_highest_water_level)
                    if recent_highest_water_level
                    else None
                ),
            )
            result_list.append(unit_result)

            highest_water_level = proj_info_data.get("extra_data").get(
                "kl_highest_water_level_in_history"
            )
            unit_result = UnitResult(
                name="highest_water_level",
                title="历史最高水位",
                unit=Units.m,
                value=float(highest_water_level) if highest_water_level else None,
            )
            result_list.append(unit_result)

            anti_floating_water_level = proj_info_data.get("extra_data").get(
                "kl_anti_floating_water_level"
            )
            unit_result = UnitResult(
                name="anti_floating_water_level",
                title="抗浮设防水位",
                unit=Units.m,
                value=(
                    float(anti_floating_water_level)
                    if anti_floating_water_level
                    else None
                ),
            )
            result_list.append(unit_result)

            site_equivalent_shear_wave = proj_info_data.get("extra_data").get(
                "kl_site_eq_shear_wave_velocity"
            )  # 字符串，允许输入范围值，如："100-200"
            unit_result = UnitResult(
                name="site_equivalent_shear_wave",
                title="场地等效剪切波速",
                unit=Units.m_s,
                value=site_equivalent_shear_wave,
            )
            result_list.append(unit_result)

            site_overburden_thickness = proj_info_data.get("extra_data").get("kl_dov")
            unit_result = UnitResult(
                name="site_overburden_thickness",
                title="场地覆盖层厚度",
                unit=Units.m,
                value=(
                    float(site_overburden_thickness)
                    if site_overburden_thickness
                    else None
                ),
            )
            result_list.append(unit_result)

            project_address_dict = proj_info_data.get("extra_data").get(
                "kl_project_location"
            )
            if project_address_dict:
                project_address_value = (
                    project_address_dict.get("province")
                    + project_address_dict.get("town")
                    + project_address_dict.get("county")
                )
                detail_address_value = project_address_dict.get("detailedAddress")
            else:
                project_address_value = None
                detail_address_value = None
            project_address = UnitResult(
                name="project_address", title="项目地址", value=project_address_value
            )
            detail_address = UnitResult(
                name="detaile_address", title="详细地址", value=detail_address_value
            )
            result_list.extend([project_address, detail_address])

            result = SingleResult(result=result_list)
            self._ports_out["OutputProjectInfo"].data = result
            return result


@status_manage
class GdimAppGeoParamsReader(PipeModule):
    """Read the data from the Gdim APP - Geo Parameters Table (岩土参数建议值表)."""

    def __init__(
        self,
        mname: str | None = None,
        auto_run: bool = True,
        host: str | None = None,
        proj_id: int | None = None,
        token: str | None = None,
        fields: list[str] | None = None,
        format_dict: dict[str, str] | None = None,
        names_type: Literal["name", "title"] = "title",
        column_name: Literal["name", "title"] = "name",
        gdim: bool = True,
    ) -> None:
        """ "Initialize GdimAppGeoParamsReader object.

        Parameters
        ----------
        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        proj_id: int | None, default: None
            The id of the project.

        token: str | None, default: None
            The token of the user.

        fields: list[str] | None, default: None
            The field name list.
            If `names_type` is `title`, the field title will be used.
            If `names_type` is `name`, the field name will be used.
            Example: `['地层编号','岩土名称']`
            Note: if the port "InputFields" is not None, the parameter "fields" will be overwrite.

        format_dict: dict[str, str] | None, default: None
            The format dict.
            If `names_type` is `title`, the key is the field title.
            If `names_type` is `name`, the key is the field name.
            Example: `{'地层编号': 'str', '岩土名称': 'str'}`

        names_type: Literal["name", "title"], default: "title"
            The type of the field name.
            If it's `title`, the title will be used which is user-friendly.
            If it's `name`, the name will be used which is used in the database.

        column_name: Literal["name", "title"], default: "name"
            The type of the column name in the output table.
            If it's `title`, the title will be used which is user-friendly.
            If it's `name`, the name will be used which is used in the database.

        gdim: bool, default: True
            Whether to get the data from GDIM. If False, the data is from GBIM.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is None and proj_id is None:
            data = None
        else:
            data = (token, proj_id)

        self._ports_in = {
            "InputToken": Port(
                ptype=PortType.Token,
                data=data,
                pdoc="The token of the user.",
            ),
            "InputFields": Port(
                ptype=PortType.GeneralArray,
                data=fields,
                pdoc="The fields of the geo parameters table.",
            ),
        }
        self._ports_out = {
            "OutputTable": Port(
                ptype=PortType.TableData, pdoc="The data of the geo parameters table."
            )
        }

        self.host = host
        self.proj_id = proj_id
        self.token = token
        self.fields = fields
        self.format_dict = format_dict
        self.names_type = names_type
        self.column_name = column_name
        self.gdim = gdim  # TODO: not implemented yet

    @property
    def InputToken(self) -> None:
        raise AttributeError("Property 'InputToken' is write-only.")

    @InputToken.setter
    def InputToken(self, value: PortTypeHint.Token) -> None:
        self["InputToken"] = value

    @property
    def InputFields(self) -> None:
        raise AttributeError("Property 'InputFields' is write-only.")

    @InputFields.setter
    def InputFields(self, value: PortTypeHint.GeneralArray) -> None:
        self["InputFields"] = value

    @property
    def OutputTable(self) -> PortTypeHint.TableData | None:
        return self._ports_out["OutputTable"].data

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel]:
        return {
            "host": RangeModel(vtype="str", default=self.host, title="平台网址"),
            "proj_id": RangeModel(vtype="int", default=self.proj_id, title="项目id"),
            "token": RangeModel(vtype="str", default=self.token, title="用户token"),
            "fields": RangeModel(vtype="list", default=self.fields, title="字段列表"),
            "names_type": RangeModel(
                vtype="str",
                default=self.names_type,
                choices=("name", "title"),
                title="字段名类型",
            ),
            "column_name": RangeModel(
                vtype="str",
                default=self.column_name,
                choices=("name", "title"),
                title="列名类型",
            ),
            "gdim": RangeModel(
                vtype="bool", default=self.gdim, title="是否从GDIM获取数据"
            ),
        }

    def execute(self) -> PortTypeHint.TableData | None:
        input_token = self._ports_in["InputToken"].data
        input_fields = self._ports_in["InputFields"].data

        if input_token:
            self.token, self.proj_id = input_token
        if (
            self.proj_id is None
            or self.token is None
            or (self.fields is None and input_fields is None)
        ):
            self._ports_out["OutputTable"].data = None
            return None

        if input_fields:
            self.fields = input_fields

        title_name_map = get_geo_params_table_title_name_map(
            pid=self.proj_id, token=self.token, host=self.host
        )
        if self.names_type == "title":
            field_names = [title_name_map[field] for field in self.fields]
        else:
            field_names = self.fields

        table_data = get_geo_params_table_data(
            pid=self.proj_id, token=self.token, host=self.host
        )
        table_data_frame = TableData(
            table_data, name="geo_params_table", title="岩土参数建议值表"
        )
        if table_data_frame.empty:
            warnings.warn("岩土参数建议值表中没有数据！", GDIDataQualityWarning)
            self._ports_out["OutputTable"].data = None
            return None

        # Filter the fields not used
        for field_name in field_names:
            if field_name not in table_data_frame.columns:
                table_data_frame[field_name] = np.nan
        table_data_frame = table_data_frame[field_names]
        # Add fields metadata
        name_title_map = {v: k for k, v in title_name_map.items()}
        fields_meta = []
        for field in field_names:
            fields_meta.append(FieldMetadata(name=field, title=name_title_map[field]))
        table_data_frame.update_fields_meta(fields_meta)

        # Format the table data
        if self.format_dict:
            if self.names_type == "title":
                title_to_name = table_data_frame.title_to_name
                format_dict = {
                    title_to_name[k]: v
                    for k, v in self.format_dict.items()
                    if k in title_to_name
                }
            else:
                format_dict = self.format_dict
            table_data_frame = format_dataframe_type(table_data_frame, format_dict)

        if self.column_name == "title":
            table_data_frame.rename_columns_to_titles()

        self._ports_out["OutputTable"].data = table_data_frame
        return table_data_frame


@status_manage
class GdimAppSurveyStatReader(PipeModule):
    """Read the data from the Gdim APP - Survey Statistics."""

    def __init__(
        self,
        mname: str | None = None,
        auto_run: bool = True,
        host: str | None = None,
        proj_id: int | None = None,
        token: str | None = None,
        stat_type: (
            Literal[
                "地层表",
                "常规试验表",
                "标贯",
                "岩石试验",
                "动探",
                "双桥静探",
                "物理力学指标统计表",
            ]
            | None
        ) = None,
        format_dict: dict[str, str] | None = None,
        column_name: Literal["name", "title"] = "name",
        gdim: bool = True,
    ) -> None:
        """ "Initialize GdimAppSurveyStatReader object.

        Parameters
        ----------
        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        proj_id: int | None, default: None
            The id of the project.

        token: str | None, default: None
            The token of the user.

        stat_type: Literal["地层表", "常规试验表", "标贯", "岩石试验", "动探", "双桥静探", "物理力学指标统计表"] | None, default: None
            The type of the survey statistics.
            If it's "物理力学指标统计表", the name of each table is the layer number.

        format_dict: dict[str, str] | None, default: None
            The format dict for column formatting.
            Key is the column name, value is the column format.
            Example: `{'depth': 'float', 'count': 'int'}`
            Note: Only existing columns will be formatted, non-existing columns will be ignored.

        column_name: Literal["name", "title"], default: "name"
            The type of the column name in the output table.
            If it's `title`, the title will be used which is user-friendly.
            If it's `name`, the name will be used which is used in the database.

        gdim: bool, default: True
            Whether to get the data from GDIM. If False, the data is from GBIM.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is None and proj_id is None:
            data = None
        else:
            data = (token, proj_id)

        self._ports_in = {
            "InputToken": Port(
                ptype=PortType.Token,
                data=data,
                pdoc="The token of the user.",
            )
        }
        self._ports_out = {
            "OutputTables": Port(
                ptype=PortType.TableCollection,
                pdoc="The data of the survey statistics tables.",
            )
        }

        self.host = host
        self.proj_id = proj_id
        self.token = token
        self.stat_type = stat_type
        self.format_dict = format_dict
        self.column_name = column_name
        self.gdim = gdim  # TODO: not implemented yet

    @property
    def InputToken(self) -> None:
        raise AttributeError("Property 'InputToken' is write-only.")

    @InputToken.setter
    def InputToken(self, value: PortTypeHint.Token) -> None:
        self["InputToken"] = value

    @property
    def OutputTables(self) -> PortTypeHint.TableCollection | None:
        return self._ports_out["OutputTables"].data

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel]:
        return {
            "host": RangeModel(vtype="str", default=self.host, title="平台网址"),
            "proj_id": RangeModel(vtype="int", default=self.proj_id, title="项目id"),
            "token": RangeModel(vtype="str", default=self.token, title="用户token"),
            "stat_type": RangeModel(
                vtype="str",
                default=self.stat_type,
                choices=("地层表", "常规试验表", "标贯", "岩石试验", "动探"),
                title="统计类型",
            ),
            "format_dict": RangeModel(
                vtype="dict", default=self.format_dict, title="列格式字典"
            ),
            "column_name": RangeModel(
                vtype="str",
                default=self.column_name,
                choices=("name", "title"),
                title="列名类型",
            ),
            "gdim": RangeModel(
                vtype="bool", default=self.gdim, title="是否从GDIM获取数据"
            ),
        }

    def execute(self) -> PortTypeHint.TableCollection | None:
        input_token = self._ports_in["InputToken"].data

        if input_token:
            self.token, self.proj_id = input_token
        if self.proj_id is None or self.token is None or self.stat_type is None:
            self._ports_out["OutputTables"].data = None
            return None

        title_name_map = get_survey_statistics_table_title_name_map(
            pid=self.proj_id, token=self.token, host=self.host
        )
        if self.stat_type == "物理力学指标统计表":
            stat_type_id = 0
        else:
            stat_type_id = title_name_map[self.stat_type]
        tables_data = get_survey_statistics_table_data(
            pid=self.proj_id, ufid=stat_type_id, token=self.token, host=self.host
        )

        table_collection = TableCollection()
        for table_name, (table_data, field_titles) in tables_data.items():
            field_metadata = []
            column_names = table_data.columns
            for idx, field_title in enumerate(field_titles):
                # The name will be corrected in TableData using the column name
                field_metadata.append(
                    FieldMetadata(name=column_names[idx], title=field_title)
                )
            table_data_frame = TableData(
                table_data,
                name=table_name,
                title=table_name,
                fields_meta=field_metadata,
            )

            # Format the table data
            if self.format_dict:
                # Filter format_dict to only include columns that exist in the current table
                table_format_dict = {}
                for col_name, col_format in self.format_dict.items():
                    if col_name in table_data_frame.columns:
                        table_format_dict[col_name] = col_format

                if table_format_dict:  # Only format if there are valid columns
                    table_data_frame = format_dataframe_type(
                        table_data_frame, table_format_dict
                    )

            table_collection.add_table(table_data_frame)

        if self.column_name == "title":
            table_collection.rename_columns_to_titles()

        self._ports_out["OutputTables"].data = table_collection


@status_manage
class GdimAppReportTextReader(PipeModule):
    """Read the data from a Gdim APP - Report Text."""

    def __init__(
        self,
        mname: str = "GdimAppReportTextReader",
        auto_run: bool = True,
        host: str | None = None,
        proj_id: int | None = None,
        token: str | None = None,
        read_document: bool = False,
        gdim: bool = True,
    ) -> None:
        """ "Initialize GdimAppReportTextReader object.

        Parameters
        ----------
        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        proj_id: int | None, default: None
            The id of the project.

        token: str | None, default: None
            The token of the user.

        read_document: bool, default: False
            If False, only text content will be read.
            If True, only documents and templates (.docx files) will be read.

        gdim: bool, default: True
            Whether to get the data from GDIM. If False, the data is from GBIM.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is None and proj_id is None:
            data = None
        else:
            data = (token, proj_id)

        self._ports_in = {
            "InputToken": Port(
                ptype=PortType.Token,
                data=data,
                pdoc="The token of the user.",
            )
        }
        self._ports_out = {
            "OutputSingleResult": Port(
                ptype=PortType.SingleResult, pdoc="The data of the report text ."
            )
        }

        self.host = host
        self.proj_id = proj_id
        self.token = token
        self.read_document = read_document
        self.gdim = gdim  # TODO: Not implemented yet

    @property
    def InputToken(self) -> None:
        raise AttributeError("Property 'InputToken' is write-only.")

    @InputToken.setter
    def InputToken(self, value: PortTypeHint.Token) -> None:
        self["InputToken"] = value

    @property
    def OutputSingleResult(self) -> PortTypeHint.SingleResult | None:
        return self._ports_out["OutputSingleResult"].data

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel] | None:
        return {
            "host": RangeModel(vtype="str", default=self.host, title="平台网址"),
            "proj_id": RangeModel(vtype="int", default=self.proj_id, title="项目id"),
            "token": RangeModel(vtype="str", default=self.token, title="用户token"),
            "gdim": RangeModel(
                vtype="bool", default=self.gdim, title="是否从GDIM获取数据"
            ),
        }

    def execute(self) -> PortTypeHint.SingleResult | None:
        input_token = self._ports_in["InputToken"].data

        if input_token:
            self.token, self.proj_id = input_token
        if self.proj_id is None or self.token is None:
            self._ports_out["OutputSingleResult"].data = None
            return None

        res_data = get_report_text_library_list(token=self.token, host=self.host)

        result = None
        if self.read_document:
            unit_result_list = (
                []
            )  # All the document in each group will be merged together as a single list
            for group, content in res_data.items():
                documents = content["document"]
                templates = content["template"]
                if documents["documents_path"]:
                    documents_path = documents["documents_path"]
                    documents_name = documents["documents_name"]
                    for document_path, document_name in zip(
                        documents_path, documents_name
                    ):
                        name = group + "_" + document_name
                        unit_result = UnitResult(
                            name=name,
                            title=name,
                            value=document_path,
                            description=DataTypes.FilePath,
                        )
                        unit_result_list.append(unit_result)
                if templates["templates_path"]:
                    templates_path = templates["templates_path"]
                    templates_name = templates["templates_name"]
                    for template_path, template_name in zip(
                        templates_path, templates_name
                    ):
                        name = group + "_" + template_name
                        unit_result = UnitResult(
                            name=name,
                            title=name,
                            value=template_path,
                            description=DataTypes.TemplatePath,
                        )
                        unit_result_list.append(unit_result)
            result = SingleResult(result=unit_result_list)
        else:
            unit_result_list = []
            for group, content in res_data.items():
                if content["text"]:
                    unit_result = UnitResult(
                        name=group,
                        title=group,
                        value=content["text"],
                        description=DataTypes.Text,
                    )
                    unit_result_list.append(unit_result)
            result = SingleResult(result=unit_result_list)

        self._ports_out["OutputSingleResult"].data = result
        return result


@status_manage
class GdimAppCoordinateSystemReader(PipeModule):
    """Read the data from a Gdim APP - Coordinate System.
    Only used for GBIM, for GDIM platform, this module is not available.
    """

    def __init__(
        self,
        mname: str | None = None,
        auto_run: bool = True,
        host: str | None = None,
        proj_id: int | None = None,
        token: str | None = None,
        gdim: bool = True,
    ) -> None:
        """ "Initialize GdimAppCoordinateSystemReader object.

        Parameters
        ----------
        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        proj_id: int | None, default: None
            The id of the project.

        token: str | None, default: None
            The token of the user.

        gdim: bool, default: True
            Whether to get the data from GDIM. If False, the data is from GBIM.
        """

        super().__init__(mname=mname, auto_run=auto_run)

        if token is None and proj_id is None:
            data = None
        else:
            data = (token, proj_id)

        self._ports_in = {
            "InputToken": Port(
                ptype=PortType.Token,
                data=data,
                pdoc="The token of the user.",
            )
        }
        self._ports_out = {
            "OutputCoordinateSystem": Port(
                ptype=PortType.CoordinateSystem,
                pdoc="The data of the coordinate system.",
            )
        }

        self.host = host
        self.proj_id = proj_id
        self.token = token
        self.gdim = gdim  # TODO: Not implemented yet

    @property
    def OutputCoordinateSystem(self) -> PortTypeHint.CoordinateSystem | None:
        return self._ports_out["OutputCoordinateSystem"].data

    @property
    def InputToken(self) -> None:
        raise AttributeError("Property 'InputToken' is write-only.")

    @InputToken.setter
    def InputToken(self, value: PortTypeHint.Token) -> None:
        self["InputToken"] = value

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel] | None:
        return None

    def execute(self) -> PortTypeHint.SingleResult | None:
        input_token = self._ports_in["InputToken"].data

        if input_token:
            self.token, self.proj_id = input_token
        if self.proj_id is None or self.token is None:
            self._ports_out["OutputCoordinateSystem"].data = None
            return None

        res_data = get_project_info(pid=self.proj_id, token=self.token, host=self.host)

        y_direction = res_data.get("horizontal_y")
        if y_direction == 0:
            y_direction = "N"
        elif y_direction == 1:
            y_direction = "E"
        output = CoordinateSystem(y_direction=y_direction)

        # result_list = []

        # system_name = res_data.get("engineering_coordinate_system_name")
        # unit_result = UnitResult(name="system_name", title="控制系统名称", unit=Units.UNITLESS, value=system_name)
        # result_list.append(unit_result)

        # elevation_system_name = res_data.get("heightDatum")
        # unit_result = UnitResult(name="elevation_system_name", title="高程基准系统名称", unit=Units.UNITLESS, value=elevation_system_name)
        # result_list.append(unit_result)

        # horizontal_y = res_data.get("horizontal_y")
        # if horizontal_y == 0:
        #     horizontal_y_direction = "North"
        # elif horizontal_y == 1:
        #     horizontal_y_direction = "East"
        # unit_result = UnitResult(name="horizontal_y", title="Y轴方向", unit=Units.UNITLESS, value=horizontal_y_direction)
        # result_list.append(unit_result)

        # result = SingleResult(result=result_list)

        self._ports_out["OutputCoordinateSystem"].data = output
        return output


@status_manage
class GdimAppMemberManagerReader(PipeModule):
    """Read the data from the Gdim APP - MemberManager."""

    def __init__(
        self,
        mname: str = "GdimAppMemberManagerReader",
        auto_run: bool = True,
        host: str | None = None,
        proj_id: int | None = None,
        token: str | None = None,
        gdim: bool = True,
    ) -> None:
        """ "Initialize GdimAppMemberManagerReader object.

        Parameters
        ----------
        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        proj_id: int | None, default: None
            The id of the project.

        token: str | None, default: None
            The token of the user.

        gdim: bool, default: True
            Whether to get the data from GDIM. If False, the data is from GBIM.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is None and proj_id is None:
            data = None
        else:
            data = (token, proj_id)

        self._ports_in = {
            "InputToken": Port(
                ptype=PortType.Token,
                data=data,
                pdoc="The token of the user.",
            )
        }
        self._ports_out = {
            "OutputSingleResult": Port(
                ptype=PortType.SingleResult, pdoc="The data of the report text ."
            )
        }

        self.host = host
        self.proj_id = proj_id
        self.token = token
        self.gdim = gdim

    @property
    def OutputSingleResult(self) -> PortTypeHint.SingleResult | None:
        return self._ports_out["OutputSingleResult"].data

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel]:
        return {
            "host": RangeModel(vtype="str", default=self.host, title="平台网址"),
            "proj_id": RangeModel(vtype="int", default=self.proj_id, title="项目id"),
            "token": RangeModel(vtype="str", default=self.token, title="用户token"),
            "gdim": RangeModel(
                vtype="bool", default=self.gdim, title="是否从GDIM获取数据"
            ),
        }

    def execute(self) -> PortTypeHint.SingleResult | None:
        input_token = self._ports_in["InputToken"].data

        if input_token:
            self.token, self.proj_id = input_token
        if self.proj_id is None or self.token is None:
            self._ports_out["OutputSingleResult"].data = None
            return None

        roles_data = get_project_roles(
            user_token=self.token, proj_name=self.proj_id, host=self.host
        )

        # 转成SingleResult对象
        role_unitResult_list = []
        if roles_data is None:
            self._ports_out["OutputSingleResult"].data = None
            return None
        else:
            role_names = []
            for user_name, role_list in roles_data.items():
                for role in role_list:
                    role_names.append(role)

            role_names_set = set(role_names)  # 去重

            for role_name in role_names_set:
                for user_name, role_list in roles_data.items():
                    if role_name in role_list:
                        role_unitResult = UnitResult(
                            name=role_name,
                            title=role_name,
                            unit=Units.UNITLESS,
                            value=user_name,
                        )
                        role_unitResult_list.append(role_unitResult)
                        continue

            result = SingleResult(result=role_unitResult_list)
            self._ports_out["OutputSingleResult"].data = result

            return result


######################
# File readers
######################


@module_decorator()
class MdbReader(PipeModule):
    """Read the data from a .mdb file."""

    InputFile: PortReference[PortTypeHint.FilePath]
    OutputTables: PortReference[PortTypeHint.TableCollection]

    _port_docs = {
        "InputFile": "The path to the mdb file.",
        "OutputTables": "The tables of the mdb file.",
    }

    def __init__(
        self,
        mname: str | None = "MdbReader",
        auto_run: bool = True,
        file: PortTypeHint.FilePath | None = None,
    ) -> None:
        """ "Initialize MdbReader object.

        Parameters
        ----------
        file: PortTypeHint.FilePath | None, default: None
            The path to the mdb file.

        Note
        ----
        If the data in InputFile is not None, the self.file will be overwritten by the data of the input port.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if file is not None:
            self.InputFile = file

        self.file = file

        self._title = "读取MDB文件"
        self._description = "读取MDB文件，包括MDB文件中的所有表数据和字段信息"

    def update_ui_schema(self, reset: bool = False) -> dict[str, UIAttributeSchema]:
        return {
            "file": FileAttributeSchema(
                title="文件路径",
            ),
        }

    def execute(self) -> PortTypeHint.TableCollection | None:
        input_file: PortTypeHint.FilePath | None = self._ports_in["InputFile"].data

        if input_file:
            self.file = input_file

        if self.file is None:
            self._ports_out["OutputTables"].data = None
            return None

        mdb_handler = MDBHandler()

        tables_mdb = mdb_handler.read_mdb(self.file)
        if tables_mdb is None:
            self._ports_out["OutputTables"].data = None
            return None

        self._ports_out["OutputTables"].data = tables_mdb
        return tables_mdb


##########################
# SKGL Platform readers
##########################
@status_manage
class SkglMonitorReader(PipeModule):
    """Read the monitor data from skgl platform."""

    def __init__(
        self,
        mname: str | None = None,
        auto_run: bool = True,
        host: str | None = None,
        proj_id: int | None = None,
        token: str | None = None,
        group: Literal["雨水情监测", "安全监测"] = "雨水情监测",
        monitor_type: str | None = None,
        type_name: Literal["name", "title"] = "title",
        column_name: Literal["name", "title"] = "name",
    ) -> None:
        """ "Initialize SkglMonitorReader object.

        Parameters
        ----------
        host: str | None, default: None
            If it's None, defulat value will be used, for example: "https://gdim.kulunsoft.com"

        proj_id: int | None, default: None
            The id of the project.

        token: str | None, default: None
            The token of the user.

        group: Literal["雨水情监测", "安全监测"], default: "雨水情监测"
            The group of the monitor data.

        monitor_type: str | None, default: None
            The type of the monitor data.

        type_name: Literal["name", "title"], default: "title"
            The type of the monitor type name.
            If it's `title`, the input name of the monitor type is the title.
            If it's `name`, the input name of the monitor type is the name.

        column_name: Literal["name", "title"], default: "name"
            The type of the column name in the output table.
            If it's `title`, the title will be used which is user-friendly.
            If it's `name`, the name will be used which is used in the database.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        if token is None and proj_id is None:
            data = None
        else:
            data = (token, proj_id, host)

        self._ports_in = {
            "InputToken": Port(
                ptype=PortType.Token,
                data=data,
                pdoc="The token of the user.",
            )
        }
        self._ports_out = {
            "OutputTable": Port(
                ptype=PortType.TableData, pdoc="The data of the monitoring table."
            )
        }

        self.host = host
        self.proj_id = proj_id
        self.token = token
        self.group = group
        self.monitor_type = monitor_type
        self.type_name = type_name
        self.column_name = column_name

    @property
    def InputToken(self) -> None:
        raise AttributeError("Property 'InputToken' is write-only.")

    @InputToken.setter
    def InputToken(self, value: PortTypeHint.Token | None) -> None:
        self._ports_in["InputToken"].data = value

    @property
    def OutputTable(self) -> PortTypeHint.TableData | None:
        return self._ports_out["OutputTable"].data

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel]:
        return {
            "host": RangeModel(vtype="str", default=self.host, title="平台网址"),
            "proj_id": RangeModel(vtype="int", default=self.proj_id, title="项目id"),
            "token": RangeModel(vtype="str", default=self.token, title="用户token"),
            "group": RangeModel(
                vtype="str",
                choices=("雨水情监测", "安全监测"),
                default=self.group,
                title="组名",
            ),
            "monitor_type": RangeModel(
                vtype="str", default=self.monitor_type, title="监测类型"
            ),
            "column_name": RangeModel(
                vtype="str",
                default=self.column_name,
                choices=("name", "title"),
                title="列名类型",
            ),
        }

    def execute(self) -> PortTypeHint.TableData | None:
        input_token = self._ports_in["InputToken"].data

        if input_token:
            self.token, self.proj_id, self.host = input_token
        if self.proj_id is None or self.token is None or self.monitor_type is None:
            self._ports_out["OutputTable"].data = None
            return None

        # Get the fileds name title mapping
        name_title_map = get_monitor_name_title_map(
            user_token=self.token, host=self.host
        )
        name_title_map = name_title_map[self.group]
        group_name = name_title_map.group_name

        # Get the monitor data
        if self.type_name == "name":
            table_metadata = next(
                (
                    item
                    for item in name_title_map.types
                    if item.name == self.monitor_type
                ),
                None,
            )
            if table_metadata is None:
                raise ValueError(
                    f"The monitor type '{self.monitor_type}' is not found in the group '{self.group}'."
                )
            monitor_type_name = self.monitor_type
        else:
            table_metadata = next(
                (
                    item
                    for item in name_title_map.types
                    if item.title == self.monitor_type
                ),
                None,
            )
            if table_metadata is None:
                raise ValueError(
                    f"The monitor type '{self.monitor_type}' is not found in the group '{self.group}'."
                )
            monitor_type_name = table_metadata.name
        monitor_data = get_monitor_data(
            user_token=self.token,
            pid=self.proj_id,
            group=group_name,
            monitor_type=monitor_type_name,
            host=self.host,
        )

        # Create the table data
        fileds_meta = [
            FieldMetadata(name=item.name, title=item.title)
            for item in table_metadata.fields
        ]
        table_data = TableData(monitor_data, name=table_metadata.name, title=table_metadata.title, fields_meta=fileds_meta)  # type: ignore

        if self.column_name == "title":
            table_data.rename_columns_to_titles()

        self._ports_out["OutputTable"].data = table_data
        return table_data


@status_manage
class SkglYesterdayReader(PipeModule):
    """Read the monitor data and management data of yesterday from skgl platform."""

    def __init__(
        self,
        mname: str | None = None,
        auto_run: bool = True,
        host: str | None = None,
        proj_id: int | None = None,
        token: str | None = None,
    ) -> None:
        if token is None and proj_id is None:
            data = None
        else:
            data = (token, proj_id, host)

        super().__init__(mname=mname, auto_run=auto_run)

        self._ports_in = {
            "InputToken": Port(
                ptype=PortType.Token,
                data=data,
                pdoc="The token of the user.",
            )
        }
        self._ports_out = {
            "OutputTableList": Port(
                ptype=PortType.TableDataList,
                pdoc="Yesterday's data of the monitoring and management in list format.",
            ),
            "OutputTables": Port(
                ptype=PortType.TableCollection,
                pdoc="Yesterday's data of the monitoring and management in collection format.",
            ),
        }

        self.host = host
        self.proj_id = proj_id
        self.token = token

    @property
    def InputToken(self) -> None:
        raise AttributeError("Property 'InputToken' is write-only.")

    @InputToken.setter
    def InputToken(self, value: PortTypeHint.Token | None) -> None:
        self._ports_in["InputToken"].data = value

    @property
    def OutputTableList(self) -> PortTypeHint.TableDataList | None:
        return self._ports_out["OutputTableList"].data

    @property
    def OutputTables(self) -> PortTypeHint.TableCollection | None:
        return self._ports_out["OutputTables"].data

    def set_cal_params(self, reset: bool = True) -> dict[str, RangeModel]:
        return {
            "host": RangeModel(
                vtype="str", default=self.host, title="平台网址", visible=False
            ),
            "proj_id": RangeModel(
                vtype="int", default=self.proj_id, title="项目id", visible=False
            ),
            "token": RangeModel(
                vtype="str", default=self.token, title="用户token", visible=False
            ),
        }

    def execute(self) -> PortTypeHint.TableCollection | None:
        input_token = self._ports_in["InputToken"].data

        if input_token:
            self.token, self.proj_id, self.host = input_token
        if self.proj_id is None or self.token is None:
            self._ports_out["OutputTableList"].data = None
            self._ports_out["OutputTables"].data = None
            return None

        monitor_data, management_data = get_yesterday_data(
            user_token=self.token, pid=self.proj_id, host=self.host
        )

        monitor_fields_meta = []
        monitor_fields_title = {
            "time": "提醒时间",
            "type": "事项类型",
            "info": "情况描述",
        }
        for column in monitor_data.columns:
            monitor_fields_meta.append(
                FieldMetadata(name=column, title=monitor_fields_title[column])
            )

        monitor_table = TableData(monitor_data, name="monitor", title="监测数据", fields_meta=monitor_fields_meta)  # type: ignore

        management_fields_title = {"time": "巡检时间", "info": "检查情况"}
        management_fields_meta = []
        for column in management_data.columns:
            management_fields_meta.append(
                FieldMetadata(name=column, title=management_fields_title[column])
            )

        management_table = TableData(management_data, name="management", title="巡检数据", fields_meta=management_fields_meta)  # type: ignore

        res = TableCollection()
        res.add_table(monitor_table)
        res.add_table(management_table)
        self._ports_out["OutputTableList"].data = [monitor_table, management_table]
        self._ports_out["OutputTables"].data = res
        return res


@module_decorator()
class CsvReader(PipeModule):
    """Read a csv file."""

    OutputTable: PortReference[PortTypeHint.TableData]

    _port_docs = {
        "OutputTable": "The output TableData of the .csv file.",
    }

    def __init__(
        self,
        mname: str = "CsvReader",
        auto_run: bool = True,
        file: str | Path | None = None,
        sep: str = ",",
        header: int | list[int] | str | None = "infer",
        index_col: int | str | list[int] | list[str] | None = None,
        usecols: list[int] | list[str] | None = None,
        dtype: dict[str, str] | str | None = None,
        skiprows: int | list[int] | None = None,
        nrows: int | None = None,
        na_values: str | list[str] | dict[str, str | list[str]] | None = None,
        encoding: str | None = "auto",
    ) -> None:
        """Initialize CsvReader object.

        Parameters
        ----------
        file: str | Path | None
            The path to the .csv file.

        sep: str, default ","
            Delimiter to use for separating fields.

        header: int, list of int, str, default "infer"
            Row number(s) to use as the column names, and the start of the data.
            - int: Row number to use as column names (0-indexed). Use 0 for first row.
            - list[int]: Multiple rows to use for multi-level column names, e.g., [0, 1].
            - "infer": Automatically detect if first row contains column names.
            - None: No header row, columns will be named 0, 1, 2, etc.

        index_col: int, str, sequence of int/str, None, default None
            Column(s) to use as the row labels of the DataFrame.

        usecols: list-like or callable, optional
            Return a subset of the columns.

        dtype: Type name or dict of column -> type, optional
            Data type for data or columns. Available types:
            - Basic types: 'str', 'int', 'float', 'bool'
            - Pandas types: 'Int64', 'Float64', 'string', 'boolean'
            - NumPy types: 'int32', 'int64', 'float32', 'float64', 'object'
            - Category: 'category'
            - DateTime: 'datetime64[ns]'
            - Examples:
              - Single type: 'str' (apply to all columns)
              - Dict format: {'col1': 'int', 'col2': 'float', 'col3': 'str'}

        skiprows: list-like, int or callable, optional
            Line numbers to skip (0-indexed) or number of lines to skip.
            - int: Number of lines to skip from the beginning of file, e.g., 3 (skip first 3 lines)
            - list[int]: Specific line numbers to skip (0-indexed), e.g., [0, 2, 5] (skip lines 1, 3, and 6)
            - callable: Function that takes line number and returns True if line should be skipped
            - Examples:
              - Skip first 2 lines: 2
              - Skip specific lines: [0, 3, 7] (skip lines 1, 4, and 8)
              - Skip header and footer: lambda x: x in [0, 1] or x > 100

        nrows: int, optional
            Number of rows of file to read.

        na_values: scalar, str, list-like, or dict, optional
            Additional strings to recognize as NA/NaN (missing values).
            - str: Single value to treat as NaN, e.g., 'NULL', 'N/A', '无数据'
            - list[str]: Multiple values to treat as NaN, e.g., ['NULL', 'N/A', '', '无数据']
            - dict[str, str|list[str]]: Column-specific NA values
            - Examples:
              - Single NA value: 'NULL'
              - Multiple NA values: ['NULL', 'N/A', '', '无数据', '缺失']
              - Column-specific: {'age': ['unknown', '未知'], 'score': ['absent', '缺考']}

        encoding: str, optional
            Encoding to use when reading the file.
            - 'auto': Automatically detect encoding (tries utf-8, gbk, gb2312, latin-1)
            - 'utf-8': UTF-8 encoding (default)
            - 'gbk': GBK encoding (common for Chinese files)
            - 'gb2312': GB2312 encoding (simplified Chinese)
            - 'latin-1': Latin-1 encoding
            - Or any other standard encoding name

        Ports
        -----
        OutputTable: PortTypeHint.TableData
            The output TableData of the .csv file.
        """
        super().__init__(mname=mname, auto_run=auto_run)

        self.file = file
        self.sep = sep
        self.header = header
        self.index_col = index_col
        self.usecols = usecols
        self.dtype = dtype
        self.skiprows = skiprows
        self.nrows = nrows
        self.na_values = na_values
        self.encoding = encoding

    def _detect_encoding(self, file_path: Path) -> str:
        """Detect file encoding by trying common encodings.

        Parameters
        ----------
        file_path : Path
            Path to the CSV file

        Returns
        -------
        str
            The detected encoding name
        """
        # List of encodings to try in order
        encodings_to_try = ["utf-8", "gbk", "gb2312", "utf-8-sig", "latin-1", "cp1252"]

        for encoding in encodings_to_try:
            try:
                with open(file_path, "r", encoding=encoding) as f:
                    # Try to read first few lines to verify encoding works
                    for _ in range(min(10, sum(1 for _ in open(file_path, "rb")))):
                        line = f.readline()
                        if not line:
                            break
                    return encoding
            except (UnicodeDecodeError, UnicodeError):
                continue

        # If all fail, return utf-8 as fallback
        return "utf-8"

    def update_ui_schema(self, reset: bool = False) -> dict[str, UIAttributeSchema]:
        return {
            "file": FileAttributeSchema(title="选择文件", extension="csv"),
        }

    def execute(self) -> PortTypeHint.TableData | None:
        if self.file is None:
            self._ports_out["OutputTable"].data = None
            return None

        file = Path(self.file)
        if not file.exists():
            raise FileNotFoundError(f"文件 {file} 不存在.")

        # Prepare parameters for pandas.read_csv
        read_csv_params = {
            "filepath_or_buffer": file,
            "sep": self.sep,
        }

        # Add optional parameters only if they are not None or default values
        if self.header != "infer":
            read_csv_params["header"] = self.header

        if self.index_col is not None:
            read_csv_params["index_col"] = self.index_col

        if self.usecols is not None:
            read_csv_params["usecols"] = self.usecols

        if self.dtype is not None:
            read_csv_params["dtype"] = self.dtype

        if self.skiprows is not None:
            if isinstance(self.skiprows, int) and self.skiprows > 0:
                read_csv_params["skiprows"] = self.skiprows
            elif isinstance(self.skiprows, list) and len(self.skiprows) > 0:
                read_csv_params["skiprows"] = self.skiprows

        if self.nrows is not None and self.nrows > 0:
            read_csv_params["nrows"] = self.nrows

        if self.na_values is not None:
            read_csv_params["na_values"] = self.na_values

        if self.encoding is not None and self.encoding.strip():
            encoding_value = self.encoding.strip().lower()
            if encoding_value == "auto":
                # Auto-detect encoding
                detected_encoding = self._detect_encoding(file)
                read_csv_params["encoding"] = detected_encoding
            else:
                read_csv_params["encoding"] = self.encoding.strip()

        # Read the CSV file using pandas
        df = pd.read_csv(**read_csv_params)

        # Convert to TableData
        table_data = TableData(
            df,
            name=f"csv_data_{file.stem}",
            title=f"{file.name}",
            description=f"Data loaded from CSV file: {file.name}",
        )

        # Set the output port data
        self._ports_out["OutputTable"].data = table_data
        return table_data
