# Import the base Model class from Flask-AppBuilder.
from flask_appbuilder import Model
# Import column types from SQLAlchemy to define the database schema.
from sqlalchemy import Column, Integer, String

# Import a custom audit mixin for tracking creation/modification times and users.
from myapp.models.helpers import AuditMixinNullable


# Get the metadata object from the base Flask-AppBuilder Model.
# This metadata is shared across all models and holds the schema information.
metadata = Model.metadata


# Define the EvalDataset class, which maps to the 'eval_dataset' table.
# This model represents a dataset used for evaluation purposes.
class EvalDataset(Model, AuditMixinNullable):
    # Specify the table name in the database.
    __tablename__ = 'eval_dataset'
    # Define the primary key column, which auto-increments.
    id = Column(Integer, primary_key=True, autoincrement=True)
    # The name of the dataset.
    name = Column(String(200), nullable=False, default='', comment='名称')
    # The type of data in the dataset (e.g., image, audio, text).
    data_type = Column(
        String(200),
        nullable=False,
        server_default='',
        default='other',
        comment='数据类型，image/audio/txt/multiple/other',
    )
    # The type of annotation or labeling used in the dataset.
    label_type = Column(
        String(200),
        nullable=False,
        server_default='',
        default='',
        comment='标注模式，mage_caption，image_classification，object_detection，image_division',
    )
    # The source of the dataset (e.g., uploaded by a user, internal system).
    source = Column(
        String(100),
        nullable=False,
        default='',
        comment='数据集来源，upload，internal',
    )
    # A description of the dataset.
    describe = Column(String(2000), nullable=False, default='')

    # A field to store any error message if processing fails.
    err_msg = Column(String(200), nullable=False, server_default='', comment='错误信息')
    # The storage path of the original, unprocessed dataset file.
    origin_path = Column(
        String(200), nullable=False, default='', server_default='', comment='原始文件地址'
    )
    # The storage path of the processed dataset, converted to a standard format for evaluation.
    dataset_path = Column(
        String(200),
        nullable=False,
        default='',
        server_default='',
        comment='可用于评测的数据集地址（标准格式）',
    )
    # The current status of the dataset (e.g., uploading, processing, succeeded, failed).
    status = Column(
        String(200),
        nullable=False,
        default='',
        comment='数据集状态，uploading、processing、succeed、failed',
    )
    # The progress of the dataset import or processing, as a percentage.
    progress = Column(Integer, nullable=False, default=0, comment='导入进度')
    # The total number of entries or records in the dataset.
    entries_num = Column(Integer, nullable=True, default=0, comment='记录数量')
    # The geographical or logical region where the dataset is stored.
    region = Column(String(100), nullable=False, default='default', server_default='default', comment='地区')
