"""
数据血缘相关的数据模型
"""

from sqlalchemy import Column, BigInteger, String, Text, DateTime, Integer, Enum, JSON, Boolean, DECIMAL, ForeignKey
from sqlalchemy.sql import func
from sqlalchemy.orm import relationship
from app.core.database import Base
from datetime import datetime
import enum
from app.utils.timezone_utils import get_shanghai_now


class TaskType(enum.Enum):
    """任务类型枚举"""
    FULL = "FULL"
    INCREMENTAL = "INCREMENTAL"


class ParseStatus(enum.Enum):
    """解析状态枚举"""
    PENDING = "PENDING"
    RUNNING = "RUNNING"
    SUCCESS = "SUCCESS"
    FAILED = "FAILED"





class SyncType(enum.Enum):
    """同步类型枚举"""
    FULL = "FULL"
    INCREMENTAL = "INCREMENTAL"


class OperationType(enum.Enum):
    """操作类型枚举"""
    CREATE = "CREATE"
    UPDATE = "UPDATE"
    DELETE = "DELETE"
    MERGE = "MERGE"


class SyncStatus(enum.Enum):
    """同步状态枚举"""
    SUCCESS = "SUCCESS"
    FAILED = "FAILED"
    PARTIAL = "PARTIAL"





class NodeType(enum.Enum):
    """节点类型枚举"""
    TABLE = "TABLE"
    COLUMN = "COLUMN"
    TASK = "TASK"


class RelationshipType(enum.Enum):
    """关系类型枚举"""
    FLOWS_TO = "FLOWS_TO"
    MAPS_TO = "MAPS_TO"
    CONTAINS = "CONTAINS"
    DEPENDS_ON = "DEPENDS_ON"


class ParseTask(Base):
    """解析任务表"""
    __tablename__ = 'dp_parse_tasks'
    
    id = Column(BigInteger, primary_key=True, autoincrement=True, comment='任务ID')
    task_name = Column(String(200), nullable=False, comment='任务名称')
    dinky_task_id = Column(BigInteger, comment='Dinky任务ID')
    task_type = Column(Enum(TaskType), default=TaskType.INCREMENTAL, comment='解析类型：全量/增量')
    sql_content = Column(Text, comment='FlinkSQL内容')
    parse_status = Column(Enum(ParseStatus), default=ParseStatus.PENDING, comment='解析状态')
    parse_result = Column(JSON, comment='解析结果详情')
    error_message = Column(Text, comment='错误信息')
    nodes_count = Column(Integer, default=0, comment='解析出的节点数量')
    edges_count = Column(Integer, default=0, comment='解析出的边数量')
    execution_time_ms = Column(BigInteger, comment='执行时间(毫秒)')
    last_sync_time = Column(DateTime, comment='最后同步到Neo4j的时间')
    started_at = Column(DateTime, comment='解析开始时间')
    completed_at = Column(DateTime, comment='解析完成时间')
    created_at = Column(DateTime, default=get_shanghai_now, comment='创建时间')
    updated_at = Column(DateTime, default=get_shanghai_now, onupdate=get_shanghai_now, comment='更新时间')
    
    # 关系
    sync_logs = relationship("LineageSyncLog", back_populates="task")
    
    def __repr__(self):
        return f"<ParseTask(id={self.id}, task_name='{self.task_name}')>"





class LineageSyncLog(Base):
    """血缘同步记录表"""
    __tablename__ = 'dp_lineage_sync_logs'
    
    id = Column(BigInteger, primary_key=True, autoincrement=True, comment='同步ID')
    task_id = Column(BigInteger, ForeignKey('dp_parse_tasks.id', ondelete='SET NULL'), comment='关联任务ID')
    sync_type = Column(Enum(SyncType), comment='同步类型')
    operation_type = Column(Enum(OperationType), comment='操作类型')
    affected_nodes = Column(Integer, default=0, comment='影响节点数')
    affected_edges = Column(Integer, default=0, comment='影响边数')
    neo4j_operations = Column(JSON, comment='Neo4j操作记录')
    sync_status = Column(Enum(SyncStatus), comment='同步状态')
    error_message = Column(Text, comment='错误信息')
    execution_time_ms = Column(BigInteger, comment='执行时间(毫秒)')
    sync_data_hash = Column(String(64), comment='同步数据哈希值(用于增量检测)')
    created_at = Column(DateTime, default=get_shanghai_now, comment='创建时间')
    
    # 关系
    task = relationship("ParseTask", back_populates="sync_logs")
    
    def __repr__(self):
        return f"<LineageSyncLog(id={self.id}, sync_status='{self.sync_status}')>"





class LineageNodeCache(Base):
    """血缘节点缓存表"""
    __tablename__ = 'dp_lineage_nodes_cache'
    
    id = Column(BigInteger, primary_key=True, autoincrement=True, comment='缓存ID')
    node_id = Column(String(255), nullable=False, unique=True, comment='节点唯一标识')
    node_type = Column(Enum(NodeType), nullable=False, comment='节点类型')
    node_name = Column(String(200), nullable=False, comment='节点名称')
    database_name = Column(String(100), comment='数据库名')
    schema_name = Column(String(100), comment='模式名')
    table_name = Column(String(100), comment='表名')
    column_name = Column(String(100), comment='字段名')
    node_properties = Column(JSON, comment='节点属性')
    is_active = Column(Boolean, default=True, comment='是否激活')
    last_updated_task_id = Column(BigInteger, comment='最后更新的任务ID')
    created_at = Column(DateTime, default=get_shanghai_now, comment='创建时间')
    updated_at = Column(DateTime, default=get_shanghai_now, onupdate=get_shanghai_now, comment='更新时间')
    
    def __repr__(self):
        return f"<LineageNodeCache(id={self.id}, node_id='{self.node_id}')>"


class LineageEdgeCache(Base):
    """血缘关系缓存表"""
    __tablename__ = 'dp_lineage_edges_cache'
    
    id = Column(BigInteger, primary_key=True, autoincrement=True, comment='缓存ID')
    edge_id = Column(String(255), nullable=False, unique=True, comment='边唯一标识')
    source_node_id = Column(String(255), nullable=False, comment='源节点ID')
    target_node_id = Column(String(255), nullable=False, comment='目标节点ID')
    relationship_type = Column(Enum(RelationshipType), nullable=False, comment='关系类型')
    task_id = Column(BigInteger, ForeignKey('dp_parse_tasks.id', ondelete='SET NULL'), comment='关联任务ID')
    sql_fragment = Column(Text, comment='SQL片段')
    transformation_logic = Column(Text, comment='转换逻辑')
    edge_properties = Column(JSON, comment='边属性')
    is_active = Column(Boolean, default=True, comment='是否激活')
    created_at = Column(DateTime, default=get_shanghai_now, comment='创建时间')
    updated_at = Column(DateTime, default=get_shanghai_now, onupdate=get_shanghai_now, comment='更新时间')
    
    # 关系
    task = relationship("ParseTask")
    
    def __repr__(self):
        return f"<LineageEdgeCache(id={self.id}, edge_id='{self.edge_id}')>"


# 为了向后兼容，添加别名
DataLineage = ParseTask
LineageNode = LineageNodeCache
LineageEdge = LineageEdgeCache
