"""
数据同步相关数据模型
"""

from datetime import datetime, timezone, timedelta
from enum import Enum
from typing import Optional, Dict, Any

# 导入时区工具
from app.utils.timezone_utils import get_shanghai_now
from sqlalchemy import (
    Column, BigInteger, String, Text, Integer, Boolean, DateTime,
    ForeignKey, JSON, DECIMAL, Index
)
from sqlalchemy.orm import relationship

from app.core.database import Base


class SyncType(str, Enum):
    """同步类型枚举"""
    FULL = "full"
    INCREMENTAL = "incremental"


class TargetType(str, Enum):
    """目标类型枚举"""
    CSV = "csv"
    DORIS = "doris"
    KAFKA = "kafka"


class ScheduleType(str, Enum):
    """调度类型枚举"""
    MANUAL = "manual"
    CRON = "cron"


class ExecutionStatus(str, Enum):
    """执行状态枚举"""
    PENDING = "pending"
    RUNNING = "running"
    SUCCESS = "success"
    FAILED = "failed"
    PAUSED = "paused"
    CANCELLED = "cancelled"


class ExecutionType(str, Enum):
    """执行类型枚举"""
    MANUAL = "manual"
    SCHEDULED = "scheduled"


class IncrementalFieldType(str, Enum):
    """增量字段类型枚举"""
    TIMESTAMP = "timestamp"
    ID = "id"


class LogLevel(str, Enum):
    """日志级别枚举"""
    DEBUG = "DEBUG"
    INFO = "INFO"
    WARNING = "WARNING"
    ERROR = "ERROR"


class SyncTask(Base):
    """数据同步任务模型"""
    __tablename__ = "dp_sync_tasks"

    id = Column(BigInteger, primary_key=True, autoincrement=True, comment='同步任务ID')
    name = Column(String(100), unique=True, nullable=False, comment='任务名称（唯一标识）')
    display_name = Column(String(100), nullable=False, comment='显示名称')
    description = Column(Text, comment='任务描述')
    
    # 数据源配置
    source_datasource_id = Column(BigInteger, ForeignKey('dp_datasources.id', ondelete='RESTRICT'), nullable=False, comment='源数据源ID')
    source_table_name = Column(String(200), nullable=False, comment='源表名')
    source_query = Column(Text, comment='自定义查询SQL（可选）')
    
    # 目标配置
    target_type = Column(String(20), nullable=False, comment='目标类型')
    target_datasource_id = Column(BigInteger, ForeignKey('dp_datasources.id', ondelete='RESTRICT'), comment='目标数据源ID（doris/kafka时必填）')
    target_table_name = Column(String(200), comment='目标表名')
    target_path = Column(String(500), comment='目标路径（csv文件路径或kafka topic）')

    # CSV配置（仅当target_type为CSV时有效）
    csv_delimiter = Column(String(10), default=',', comment='CSV分隔符')
    csv_quote_char = Column(String(5), default='"', comment='CSV引用字符')
    csv_escape_char = Column(String(5), default='\\', comment='CSV转义字符')
    csv_line_terminator = Column(String(10), default='\n', comment='CSV行终止符')
    csv_encoding = Column(String(20), default='utf-8', comment='CSV文件编码')
    csv_include_header = Column(Boolean, default=True, comment='是否包含表头')
    csv_null_value = Column(String(20), default='', comment='NULL值表示方式')
    csv_date_format = Column(String(50), default='%Y-%m-%d %H:%M:%S', comment='日期时间格式')
    
    # 同步配置
    sync_type = Column(String(20), nullable=False, default=SyncType.FULL, comment='同步类型')
    incremental_field = Column(String(100), comment='增量字段名（时间戳或自增ID）')
    incremental_field_type = Column(String(20), comment='增量字段类型')
    incremental_start_value = Column(String(255), comment='增量起始值（用户可指定）')
    last_incremental_value = Column(String(255), comment='最后增量值（系统自动更新）')
    batch_size = Column(Integer, default=1000, comment='批处理大小')
    
    # 调度配置
    schedule_type = Column(String(20), default=ScheduleType.MANUAL, comment='调度类型')
    cron_expression = Column(String(100), comment='Cron表达式')
    
    # 状态信息
    is_active = Column(Boolean, default=True, comment='是否启用')
    last_execution_id = Column(BigInteger, comment='最后执行记录ID')
    last_execution_time = Column(DateTime, comment='最后执行时间')
    last_execution_status = Column(String(20), comment='最后执行状态')
    next_execution_time = Column(DateTime, comment='下次执行时间')
    
    # 逻辑删除字段
    is_deleted = Column(Boolean, default=False, comment='是否已删除')
    deleted_at = Column(DateTime, comment='删除时间')
    deleted_by = Column(BigInteger, ForeignKey('dp_users.id', ondelete='SET NULL'), comment='删除者ID')
    
    # 审计字段
    created_by = Column(BigInteger, ForeignKey('dp_users.id', ondelete='SET NULL'), comment='创建者ID')
    updated_by = Column(BigInteger, ForeignKey('dp_users.id', ondelete='SET NULL'), comment='更新者ID')
    created_at = Column(DateTime, default=get_shanghai_now, comment='创建时间')
    updated_at = Column(DateTime, default=get_shanghai_now, onupdate=get_shanghai_now, comment='更新时间')

    # 关联关系
    source_datasource = relationship("DataSource", foreign_keys=[source_datasource_id], backref="source_sync_tasks")
    target_datasource = relationship("DataSource", foreign_keys=[target_datasource_id], backref="target_sync_tasks")
    executions = relationship("SyncExecution", back_populates="task", cascade="all, delete-orphan")
    field_mappings = relationship("SyncFieldMapping", back_populates="task", cascade="all, delete-orphan")
    
    # 创建者和更新者关联
    creator = relationship("User", foreign_keys=[created_by], backref="created_sync_tasks")
    updater = relationship("User", foreign_keys=[updated_by], backref="updated_sync_tasks")
    deleter = relationship("User", foreign_keys=[deleted_by], backref="deleted_sync_tasks")

    # 索引
    __table_args__ = (
        Index('idx_dp_sync_tasks_name', 'name'),
        Index('idx_dp_sync_tasks_source_datasource', 'source_datasource_id'),
        Index('idx_dp_sync_tasks_target_datasource', 'target_datasource_id'),
        Index('idx_dp_sync_tasks_is_active', 'is_active'),
        Index('idx_dp_sync_tasks_is_deleted', 'is_deleted'),
        Index('idx_dp_sync_tasks_created_at', 'created_at'),
        Index('idx_dp_sync_tasks_next_execution', 'next_execution_time'),
    )

    def __repr__(self):
        return f"<SyncTask(id={self.id}, name='{self.name}', sync_type='{self.sync_type}')>"


class SyncExecution(Base):
    """数据同步执行记录模型"""
    __tablename__ = "dp_sync_executions"

    id = Column(BigInteger, primary_key=True, autoincrement=True, comment='执行记录ID')
    task_id = Column(BigInteger, ForeignKey('dp_sync_tasks.id', ondelete='CASCADE'), nullable=False, comment='同步任务ID')
    execution_type = Column(String(20), nullable=False, comment='执行类型')

    # 执行状态
    status = Column(String(20), nullable=False, default=ExecutionStatus.PENDING, comment='执行状态')
    progress_percentage = Column(DECIMAL(5, 2), default=0.00, comment='进度百分比')
    
    # 执行统计
    total_records = Column(BigInteger, default=0, comment='总记录数')
    processed_records = Column(BigInteger, default=0, comment='已处理记录数')
    success_records = Column(BigInteger, default=0, comment='成功记录数')
    failed_records = Column(BigInteger, default=0, comment='失败记录数')
    
    # 时间信息
    started_at = Column(DateTime, comment='开始时间')
    completed_at = Column(DateTime, comment='完成时间')
    execution_time_ms = Column(BigInteger, comment='执行时间（毫秒）')
    
    # 错误信息
    error_message = Column(Text, comment='错误消息')
    error_details = Column(JSON, comment='错误详情')
    
    # 执行参数
    execution_params = Column(JSON, comment='执行参数')
    
    # 断点续传信息
    checkpoint_data = Column(JSON, comment='断点数据')
    last_processed_value = Column(String(255), comment='最后处理的值（用于增量同步）')
    
    # 审计字段
    created_by = Column(BigInteger, ForeignKey('dp_users.id', ondelete='SET NULL'), comment='执行者ID')
    created_at = Column(DateTime, default=get_shanghai_now, comment='创建时间')
    updated_at = Column(DateTime, default=get_shanghai_now, onupdate=get_shanghai_now, comment='更新时间')

    # 关联关系
    task = relationship("SyncTask", back_populates="executions")
    logs = relationship("SyncLog", back_populates="execution", cascade="all, delete-orphan")
    executor = relationship("User", foreign_keys=[created_by], backref="executed_sync_tasks")

    # 索引
    __table_args__ = (
        Index('idx_dp_sync_executions_task_id', 'task_id'),
        Index('idx_dp_sync_executions_status', 'status'),
        Index('idx_dp_sync_executions_started_at', 'started_at'),
        Index('idx_dp_sync_executions_created_at', 'created_at'),
    )

    def __repr__(self):
        return f"<SyncExecution(id={self.id}, task_id={self.task_id}, status='{self.status}')>"


class SyncLog(Base):
    """数据同步日志模型"""
    __tablename__ = "dp_sync_logs"

    id = Column(BigInteger, primary_key=True, autoincrement=True, comment='日志ID')
    execution_id = Column(BigInteger, ForeignKey('dp_sync_executions.id', ondelete='CASCADE'), nullable=False, comment='执行记录ID')
    log_level = Column(String(20), nullable=False, comment='日志级别')
    log_message = Column(Text, nullable=False, comment='日志消息')
    log_details = Column(JSON, comment='日志详情')
    batch_number = Column(Integer, comment='批次号')
    created_at = Column(DateTime, default=get_shanghai_now, comment='创建时间')

    # 关联关系
    execution = relationship("SyncExecution", back_populates="logs")

    # 索引
    __table_args__ = (
        Index('idx_dp_sync_logs_execution_id', 'execution_id'),
        Index('idx_dp_sync_logs_level', 'log_level'),
        Index('idx_dp_sync_logs_created_at', 'created_at'),
    )

    def __repr__(self):
        return f"<SyncLog(id={self.id}, execution_id={self.execution_id}, level='{self.log_level}')>"


class SyncFieldMapping(Base):
    """数据同步字段映射模型"""
    __tablename__ = "dp_sync_field_mappings"

    id = Column(BigInteger, primary_key=True, autoincrement=True, comment='映射ID')
    task_id = Column(BigInteger, ForeignKey('dp_sync_tasks.id', ondelete='CASCADE'), nullable=False, comment='同步任务ID')
    source_field = Column(String(100), nullable=False, comment='源字段名')
    target_field = Column(String(100), nullable=False, comment='目标字段名')
    field_type = Column(String(50), comment='字段类型')
    transform_expression = Column(Text, comment='转换表达式')
    is_key_field = Column(Boolean, default=False, comment='是否为主键字段')
    sort_order = Column(Integer, default=0, comment='排序顺序')
    
    # 审计字段
    created_at = Column(DateTime, default=get_shanghai_now, comment='创建时间')
    updated_at = Column(DateTime, default=get_shanghai_now, onupdate=get_shanghai_now, comment='更新时间')

    # 关联关系
    task = relationship("SyncTask", back_populates="field_mappings")

    # 索引和约束
    __table_args__ = (
        Index('idx_dp_sync_field_mappings_task_id', 'task_id'),
        Index('idx_dp_sync_field_mappings_sort_order', 'sort_order'),
        Index('uk_task_source_field', 'task_id', 'source_field', unique=True),
    )

    def __repr__(self):
        return f"<SyncFieldMapping(id={self.id}, task_id={self.task_id}, source_field='{self.source_field}')>"
