"""
数据血缘同步服务
负责将解析的血缘数据同步到Neo4j图数据库
"""

import hashlib
import json
from typing import Dict, List, Any, Optional
from datetime import datetime
from sqlalchemy.orm import Session

from app.core.database import get_db
from app.core.neo4j_client import neo4j_client
from app.core.logger import logger
from app.models.lineage import (
    ParseTask, LineageSyncLog,
    LineageNodeCache, LineageEdgeCache,
    SyncType, OperationType, SyncStatus
)
from app.utils.timezone_utils import get_shanghai_now


class LineageSyncService:
    """血缘同步服务类"""
    
    def __init__(self):
        self.neo4j_client = neo4j_client
    
    def sync_task_lineage(self, task_id: int, db: Session, force_full: bool = False) -> Dict:
        """同步任务血缘数据到Neo4j"""
        try:
            # 获取任务信息
            task = db.query(ParseTask).filter(ParseTask.id == task_id).first()
            if not task:
                raise ValueError(f"任务不存在: {task_id}")
            
            # 检查是否需要同步
            if not force_full and task.parse_result is None:
                return {
                    'success': False,
                    'message': '任务尚未解析完成，无法同步'
                }
            
            # 确定同步类型
            sync_type = SyncType.FULL if force_full else SyncType.INCREMENTAL
            
            # 计算数据哈希
            data_hash = self._calculate_data_hash(task.parse_result)
            
            # 检查是否需要增量同步
            if sync_type == SyncType.INCREMENTAL:
                last_sync = db.query(LineageSyncLog).filter(
                    LineageSyncLog.task_id == task_id,
                    LineageSyncLog.sync_status == SyncStatus.SUCCESS
                ).order_by(LineageSyncLog.created_at.desc()).first()
                
                if last_sync and last_sync.sync_data_hash == data_hash:
                    return {
                        'success': True,
                        'message': '数据未变更，无需同步',
                        'sync_type': 'SKIP'
                    }
            
            # 开始同步
            sync_log = LineageSyncLog(
                task_id=task_id,
                sync_type=sync_type,
                operation_type=OperationType.MERGE,
                sync_data_hash=data_hash
            )
            db.add(sync_log)
            db.flush()
            
            start_time = get_shanghai_now()
            
            # 准备血缘数据
            lineage_data = self._prepare_lineage_data(task.parse_result)
            
            # 同步到Neo4j
            neo4j_result = self.neo4j_client.batch_sync_lineage(lineage_data)
            
            end_time = get_shanghai_now()
            execution_time = int((end_time - start_time).total_seconds() * 1000)
            
            if neo4j_result.get('success'):
                # 更新同步日志
                sync_log.sync_status = SyncStatus.SUCCESS
                sync_log.affected_nodes = len(lineage_data.get('tables', [])) + len(lineage_data.get('columns', []))
                sync_log.affected_edges = len(lineage_data.get('relationships', []))
                sync_log.execution_time_ms = execution_time
                sync_log.neo4j_operations = neo4j_result.get('results', [])
                
                # 更新任务同步时间
                task.last_sync_time = end_time
                
                # 更新缓存
                self._update_cache(lineage_data, task_id, db)
                
                db.commit()
                
                logger.info(f"任务 {task_id} 血缘同步成功")
                
                return {
                    'success': True,
                    'message': '血缘同步成功',
                    'sync_type': sync_type.value,
                    'affected_nodes': sync_log.affected_nodes,
                    'affected_edges': sync_log.affected_edges,
                    'execution_time_ms': execution_time
                }
            else:
                # 同步失败
                sync_log.sync_status = SyncStatus.FAILED
                sync_log.error_message = neo4j_result.get('error', '未知错误')
                sync_log.execution_time_ms = execution_time
                
                db.commit()
                
                logger.error(f"任务 {task_id} 血缘同步失败: {sync_log.error_message}")
                
                return {
                    'success': False,
                    'message': f'血缘同步失败: {sync_log.error_message}',
                    'sync_type': sync_type.value
                }
                
        except Exception as e:
            db.rollback()
            logger.error(f"血缘同步异常: {str(e)}")
            return {
                'success': False,
                'message': f'血缘同步异常: {str(e)}'
            }
    
    def incremental_sync_from_dinky(self, db: Session) -> Dict:
        """从Dinky增量同步血缘数据"""
        try:
            # 获取检查点
            checkpoint = db.query(SyncCheckpoint).filter(
                SyncCheckpoint.checkpoint_name == 'dinky_incremental_sync'
            ).first()
            
            if not checkpoint:
                return {
                    'success': False,
                    'message': '检查点不存在，请先初始化'
                }
            
            # 获取需要同步的任务
            # 这里需要连接到Dinky数据库查询新增/更新的任务
            # 暂时返回示例结果
            
            logger.info("增量同步检查完成")
            
            return {
                'success': True,
                'message': '增量同步完成',
                'processed_tasks': 0,
                'last_sync_time': checkpoint.last_sync_time.isoformat()
            }
            
        except Exception as e:
            logger.error(f"增量同步异常: {str(e)}")
            return {
                'success': False,
                'message': f'增量同步异常: {str(e)}'
            }
    
    def _calculate_data_hash(self, data: Dict) -> str:
        """计算数据哈希值"""
        if not data:
            return ""
        
        # 将数据转换为JSON字符串并计算MD5
        json_str = json.dumps(data, sort_keys=True, ensure_ascii=False)
        return hashlib.md5(json_str.encode('utf-8')).hexdigest()
    
    def _prepare_lineage_data(self, parse_result: Dict) -> Dict:
        """准备血缘数据格式"""
        if not parse_result:
            return {'tables': [], 'columns': [], 'tasks': [], 'relationships': []}
        
        lineage_data = {
            'tables': [],
            'columns': [],
            'tasks': [],
            'relationships': []
        }
        
        # 处理表节点
        for table_info in parse_result.get('tables', []):
            table_node = {
                'id': table_info.get('id', ''),
                'name': table_info.get('name', ''),
                'database': table_info.get('database', ''),
                'schema': table_info.get('schema', ''),
                'table_type': table_info.get('type', 'TABLE'),
                'connector_type': table_info.get('connector_type', ''),
                'connector_config': table_info.get('connector_config', {})
            }
            lineage_data['tables'].append(table_node)
        
        # 处理字段节点
        for column_info in parse_result.get('columns', []):
            column_node = {
                'id': column_info.get('id', ''),
                'name': column_info.get('name', ''),
                'data_type': column_info.get('data_type', ''),
                'is_primary_key': column_info.get('is_primary_key', False),
                'is_nullable': column_info.get('is_nullable', True)
            }
            lineage_data['columns'].append(column_node)
        
        # 处理任务节点
        for task_info in parse_result.get('tasks', []):
            task_node = {
                'id': task_info.get('id', ''),
                'name': task_info.get('name', ''),
                'sql_content': task_info.get('sql_content', ''),
                'task_type': 'FLINK_SQL'
            }
            lineage_data['tasks'].append(task_node)
        
        # 处理关系
        for rel_info in parse_result.get('relationships', []):
            relationship = {
                'source_id': rel_info.get('source_id', ''),
                'target_id': rel_info.get('target_id', ''),
                'relationship_type': rel_info.get('type', 'FLOWS_TO'),
                'task_id': rel_info.get('task_id', ''),
                'sql_fragment': rel_info.get('sql_fragment', ''),
                'operation_type': rel_info.get('operation_type', ''),
                'transformation': rel_info.get('transformation', ''),
                'access_type': rel_info.get('access_type', '')
            }
            lineage_data['relationships'].append(relationship)
        
        return lineage_data
    
    def _update_cache(self, lineage_data: Dict, task_id: int, db: Session):
        """更新缓存表"""
        try:
            # 更新节点缓存
            for table in lineage_data.get('tables', []):
                cache_node = db.query(LineageNodeCache).filter(
                    LineageNodeCache.node_id == table['id']
                ).first()
                
                if cache_node:
                    # 更新现有节点
                    cache_node.node_name = table['name']
                    cache_node.database_name = table['database']
                    cache_node.schema_name = table['schema']
                    cache_node.node_properties = table
                    cache_node.last_updated_task_id = task_id
                    cache_node.updated_at = get_shanghai_now()
                else:
                    # 创建新节点
                    cache_node = LineageNodeCache(
                        node_id=table['id'],
                        node_type='TABLE',
                        node_name=table['name'],
                        database_name=table['database'],
                        schema_name=table['schema'],
                        table_name=table['name'],
                        node_properties=table,
                        last_updated_task_id=task_id
                    )
                    db.add(cache_node)
            
            # 更新关系缓存
            for rel in lineage_data.get('relationships', []):
                edge_id = f"{rel['source_id']}-{rel['target_id']}-{rel['relationship_type']}"
                
                cache_edge = db.query(LineageEdgeCache).filter(
                    LineageEdgeCache.edge_id == edge_id
                ).first()
                
                if cache_edge:
                    # 更新现有关系
                    cache_edge.sql_fragment = rel.get('sql_fragment', '')
                    cache_edge.transformation_logic = rel.get('transformation', '')
                    cache_edge.edge_properties = rel
                    cache_edge.task_id = task_id
                    cache_edge.updated_at = get_shanghai_now()
                else:
                    # 创建新关系
                    cache_edge = LineageEdgeCache(
                        edge_id=edge_id,
                        source_node_id=rel['source_id'],
                        target_node_id=rel['target_id'],
                        relationship_type=rel['relationship_type'],
                        task_id=task_id,
                        sql_fragment=rel.get('sql_fragment', ''),
                        transformation_logic=rel.get('transformation', ''),
                        edge_properties=rel
                    )
                    db.add(cache_edge)
            
            logger.info(f"任务 {task_id} 缓存更新完成")
            
        except Exception as e:
            logger.error(f"更新缓存失败: {str(e)}")
            raise


# 全局服务实例
lineage_sync_service = LineageSyncService()
