from fastapi import FastAPI, Depends, HTTPException, status, WebSocket, WebSocketDisconnect
from fastapi.middleware.cors import CORSMiddleware
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from models import Base, DTSTask, DTSTaskRuntime, DTSTaskLog
from schemas import DTSTaskCreate, DTSTaskUpdate, DTSTaskResponse, DTSTaskRuntimeResponse, DatabaseConfig, TaskOptions, MultiDatabaseConfig, DTSMultiTaskCreate
import os
import json
from dotenv import load_dotenv
from typing import List, Optional
from datetime import datetime
from celery import Celery

# 加载环境变量
load_dotenv()

# 创建 Celery 实例用于监听任务结果
redis_host = os.getenv("REDIS_HOST", "localhost")
redis_port = os.getenv("REDIS_PORT", "6379")
redis_password = os.getenv("REDIS_PASSWORD", "")
redis_db = os.getenv("REDIS_DB", "0")

# 构造 Redis URL
if redis_password:
    redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}/{redis_db}"
else:
    redis_url = f"redis://{redis_host}:{redis_port}/{redis_db}"

# 创建 Celery 实例用于监听任务结果
celery_app = Celery(
    "dts-backend",
    broker=redis_url,
    backend=redis_url
)

# Celery 配置
celery_app.conf.update(
    task_serializer="json",
    accept_content=["json"],
    result_serializer="json",
    timezone="UTC",
    enable_utc=True,
)

# 创建 FastAPI 应用实例
app = FastAPI(
    title="DTS-Plus API",
    description="异构数据库迁移与同步平台 API",
    version="0.1.0"
)

# 添加CORS中间件
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # 在生产环境中应该指定具体的域名
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# 数据库配置
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./dts.db")
engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False})
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)

# 创建数据库表
Base.metadata.create_all(bind=engine)

# 依赖项
def get_db():
    db = SessionLocal()
    try:
        yield db
    finally:
        db.close()

@app.get("/")
def read_root():
    return {"message": "DTS-Plus API"}

@app.get("/health")
def health_check():
    return {"status": "healthy"}

def task_to_response(task: DTSTask) -> DTSTaskResponse:
    """将数据库任务模型转换为响应模型"""
    return DTSTaskResponse(
        id=task.id,
        name=task.name,
        type=task.type,
        status=task.status,
        source=DatabaseConfig(**task.source),
        target=DatabaseConfig(**task.target),
        options=TaskOptions(**task.options_dict) if task.options_dict else None,
        created_at=task.created_at,
        updated_at=task.updated_at
    )

# 添加获取任务列表的API
@app.get("/api/v1/tasks", response_model=List[DTSTaskResponse])
def get_tasks(db: Session = Depends(get_db)):
    """获取所有任务列表"""
    try:
        tasks = db.query(DTSTask).all()
        return [task_to_response(task) for task in tasks]
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"获取任务列表失败: {str(e)}")

# 数据库连接测试接口
@app.post("/api/v1/database/test-connection")
def test_database_connection(config: DatabaseConfig):
    """测试数据库连接"""
    try:
        if config.type == "mysql":
            try:
                import pymysql
                connection = pymysql.connect(
                    host=config.host,
                    port=config.port,
                    user=config.user,
                    password=config.password,
                    database=config.database if config.database else None,
                    connect_timeout=10
                )
                connection.close()
                return {"success": True, "message": "MySQL连接成功"}
            except ImportError:
                return {"success": False, "message": "缺少pymysql驱动，请安装pymysql"}
            except Exception as e:
                return {"success": False, "message": f"MySQL连接失败: {str(e)}"}
                
        elif config.type == "postgresql":
            try:
                import psycopg2
                connection = psycopg2.connect(
                    host=config.host,
                    port=config.port,
                    user=config.user,
                    password=config.password,
                    database=config.database if config.database else "postgres",
                    connect_timeout=10
                )
                connection.close()
                return {"success": True, "message": "PostgreSQL连接成功"}
            except ImportError:
                return {"success": False, "message": "缺少psycopg2驱动，请安装psycopg2"}
            except Exception as e:
                return {"success": False, "message": f"PostgreSQL连接失败: {str(e)}"}
                
        elif config.type == "mongodb":
            try:
                import pymongo
                client = pymongo.MongoClient(
                    f"mongodb://{config.user}:{config.password}@{config.host}:{config.port}/",
                    serverSelectionTimeoutMS=10000
                )
                client.server_info()  # 触发连接
                client.close()
                return {"success": True, "message": "MongoDB连接成功"}
            except ImportError:
                return {"success": False, "message": "缺少pymongo驱动，请安装pymongo"}
            except Exception as e:
                return {"success": False, "message": f"MongoDB连接失败: {str(e)}"}
                
        elif config.type == "redis":
            try:
                import redis
                client = redis.Redis(
                    host=config.host,
                    port=config.port,
                    password=config.password if config.password else None,
                    socket_connect_timeout=10
                )
                client.ping()
                client.close()
                return {"success": True, "message": "Redis连接成功"}
            except ImportError:
                return {"success": False, "message": "缺少redis驱动，请安装redis"}
            except Exception as e:
                return {"success": False, "message": f"Redis连接失败: {str(e)}"}
                
        else:
            return {"success": False, "message": "不支持的数据库类型"}
            
    except Exception as e:
        return {"success": False, "message": f"连接失败: {str(e)}"}

# 获取数据库列表接口
@app.post("/api/v1/database/list-databases")
def list_databases(config: DatabaseConfig):
    """获取数据库列表"""
    try:
        if config.type == "mysql":
            try:
                import pymysql
                connection = pymysql.connect(
                    host=config.host,
                    port=config.port,
                    user=config.user,
                    password=config.password,
                    connect_timeout=10
                )
                cursor = connection.cursor()
                cursor.execute("SHOW DATABASES")
                databases = [row[0] for row in cursor.fetchall()]
                cursor.close()
                connection.close()
                return {"databases": databases}
            except ImportError:
                raise HTTPException(status_code=500, detail="缺少pymysql驱动，请安装pymysql")
            except Exception as e:
                raise HTTPException(status_code=500, detail=f"MySQL获取数据库列表失败: {str(e)}")
                
        elif config.type == "postgresql":
            try:
                import psycopg2
                connection = psycopg2.connect(
                    host=config.host,
                    port=config.port,
                    user=config.user,
                    password=config.password,
                    database="postgres",
                    connect_timeout=10
                )
                cursor = connection.cursor()
                cursor.execute("SELECT datname FROM pg_database WHERE datistemplate = false")
                databases = [row[0] for row in cursor.fetchall()]
                cursor.close()
                connection.close()
                return {"databases": databases}
            except ImportError:
                raise HTTPException(status_code=500, detail="缺少psycopg2驱动，请安装psycopg2")
            except Exception as e:
                raise HTTPException(status_code=500, detail=f"PostgreSQL获取数据库列表失败: {str(e)}")
                
        elif config.type == "mongodb":
            try:
                import pymongo
                client = pymongo.MongoClient(
                    f"mongodb://{config.user}:{config.password}@{config.host}:{config.port}/",
                    serverSelectionTimeoutMS=10000
                )
                databases = client.list_database_names()
                client.close()
                return {"databases": databases}
            except ImportError:
                raise HTTPException(status_code=500, detail="缺少pymongo驱动，请安装pymongo")
            except Exception as e:
                raise HTTPException(status_code=500, detail=f"MongoDB获取数据库列表失败: {str(e)}")
                
        elif config.type == "redis":
            # Redis没有数据库概念，但有数据库编号
            databases = [f"db{i}" for i in range(16)]  # Redis默认有16个数据库
            return {"databases": databases}
            
        else:
            return {"databases": []}
            
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"获取数据库列表失败: {str(e)}")

# 任务管理 API
@app.post("/api/v1/tasks/migrate", response_model=DTSTaskResponse)
def create_migration_task(task: DTSTaskCreate, db: Session = Depends(get_db)):
    """创建迁移任务"""
    try:
        db_task = DTSTask(
            name=task.name,
            type="migrate",
            source_config=json.dumps(task.source.model_dump()),
            target_config=json.dumps(task.target.model_dump()),
            options=json.dumps(task.options.model_dump()) if task.options else None
        )
        db.add(db_task)
        db.commit()
        db.refresh(db_task)
        
        # 创建运行时记录
        runtime = DTSTaskRuntime(task_id=db_task.id)
        db.add(runtime)
        db.commit()
        
        return task_to_response(db_task)
    except Exception as e:
        db.rollback()
        raise HTTPException(status_code=500, detail=f"创建任务失败: {str(e)}")

@app.post("/api/v1/tasks/migrate/batch", response_model=List[DTSTaskResponse])
def create_batch_migration_tasks(task: DTSMultiTaskCreate, db: Session = Depends(get_db)):
    """创建批量迁移任务"""
    try:
        created_tasks = []
        
        # 如果源和目标数据库数量相同，需要确保顺序匹配
        if len(task.source.databases) == len(task.target.databases):
            # 添加校验：检查数据库名称是否一一对应
            # 这里我们假设用户希望按照列表中的顺序进行迁移
            # 即源列表中的第i个数据库迁移到目标列表中的第i个数据库
            for i in range(len(task.source.databases)):
                db_task = DTSTask(
                    name=f"{task.name} - {task.source.databases[i]} → {task.target.databases[i]}",
                    type="migrate",
                    source_config=json.dumps({
                        "type": task.source.type,
                        "host": task.source.host,
                        "port": task.source.port,
                        "user": task.source.user,
                        "password": task.source.password,
                        "database": task.source.databases[i]
                    }),
                    target_config=json.dumps({
                        "type": task.target.type,
                        "host": task.target.host,
                        "port": task.target.port,
                        "user": task.target.user,
                        "password": task.target.password,
                        "database": task.target.databases[i]
                    }),
                    options=json.dumps(task.options.model_dump()) if task.options else None
                )
                db.add(db_task)
                db.commit()
                db.refresh(db_task)
                
                # 创建运行时记录
                runtime = DTSTaskRuntime(task_id=db_task.id)
                db.add(runtime)
                db.commit()
                
                created_tasks.append(task_to_response(db_task))
        else:
            # 如果数量不同，为每个源数据库创建一个迁移到每个目标数据库的任务
            # 注意：这种情况下无法保证顺序，会在前端明确提示用户
            for source_db in task.source.databases:
                for target_db in task.target.databases:
                    db_task = DTSTask(
                        name=f"{task.name} - {source_db} → {target_db}",
                        type="migrate",
                        source_config=json.dumps({
                            "type": task.source.type,
                            "host": task.source.host,
                            "port": task.source.port,
                            "user": task.source.user,
                            "password": task.source.password,
                            "database": source_db
                        }),
                        target_config=json.dumps({
                            "type": task.target.type,
                            "host": task.target.host,
                            "port": task.target.port,
                            "user": task.target.user,
                            "password": task.target.password,
                            "database": target_db
                        }),
                        options=json.dumps(task.options.model_dump()) if task.options else None
                    )
                    db.add(db_task)
                    db.commit()
                    db.refresh(db_task)
                    
                    # 创建运行时记录
                    runtime = DTSTaskRuntime(task_id=db_task.id)
                    db.add(runtime)
                    db.commit()
                    
                    created_tasks.append(task_to_response(db_task))
        
        return created_tasks
    except Exception as e:
        db.rollback()
        raise HTTPException(status_code=500, detail=f"创建批量任务失败: {str(e)}")

# 添加用于接收worker状态更新的API端点
@app.post("/api/v1/tasks/{task_id}/update-status")
def update_task_status(task_id: int, result: dict, db: Session = Depends(get_db)):
    """更新任务状态（由worker调用）"""
    try:
        task = db.query(DTSTask).filter(DTSTask.id == task_id).first()
        if task:
            # 根据任务结果更新状态
            if result.get("status") == "completed":
                task.status = "COMPLETED"
            elif result.get("status") == "failed":
                task.status = "FAILED"
                # 记录错误信息
                runtime = db.query(DTSTaskRuntime).filter(DTSTaskRuntime.task_id == task_id).first()
                if runtime:
                    runtime.last_error = result.get("message", "任务执行失败")
            
            db.commit()
            
            # 通过WebSocket通知前端任务状态更新
            import asyncio
            # 使用asyncio.create_task正确处理异步函数调用
            asyncio.create_task(_notify_task_update(task_id, {
                "type": "status_update",
                "task_id": task_id,
                "status": task.status,
                "message": result.get("message", "")
            }))
            
            return {"message": "任务状态更新成功"}
        else:
            raise HTTPException(status_code=404, detail="任务不存在")
    except Exception as e:
        db.rollback()
        raise HTTPException(status_code=500, detail=f"更新任务状态失败: {str(e)}")

@app.get("/api/v1/tasks/{task_id}", response_model=DTSTaskResponse)
def get_task(task_id: int, db: Session = Depends(get_db)):
    """获取指定任务详情"""
    try:
        task = db.query(DTSTask).filter(DTSTask.id == task_id).first()
        if task:
            return task_to_response(task)
        else:
            raise HTTPException(status_code=404, detail="任务不存在")
    except HTTPException:
        raise
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"获取任务详情失败: {str(e)}")

@app.post("/api/v1/tasks/{task_id}/start")
def start_task(task_id: int, db: Session = Depends(get_db)):
    """启动任务"""
    try:
        task = db.query(DTSTask).filter(DTSTask.id == task_id).first()
        if task:
            # 发送任务给Celery Worker执行
            import json
            source_config = json.loads(task.source_config)
            target_config = json.loads(task.target_config)
            options = json.loads(task.options) if task.options else {}
            
            # 根据任务类型选择合适的Celery任务
            if task.type == "migrate":
                # 检查是否需要迁移结构和数据
                migrate_schema = options.get("migrate_schema", True)
                migrate_data = options.get("migrate_data", True)
                enable_incremental = options.get("enable_incremental", False)
                
                # 添加dts-worker到Python路径
                import sys
                import os
                worker_path = os.path.join(os.path.dirname(__file__), '..', 'dts-worker')
                if worker_path not in sys.path:
                    sys.path.append(worker_path)
                
                # 发送任务给Celery Worker
                if migrate_schema and migrate_data and not enable_incremental:
                    # 全量迁移任务
                    from tasks import migrate_full_task
                    migrate_full_task.delay(
                        task_id=task.id,
                        source_config=source_config,
                        target_config=target_config
                    )
                elif migrate_schema:
                    # 结构迁移任务
                    from tasks import migrate_schema_task
                    migrate_schema_task.delay(
                        task_id=task.id,
                        source_config=source_config,
                        target_config=target_config
                    )
                elif migrate_data:
                    # 数据迁移任务
                    from tasks import migrate_data_task
                    filter_condition = options.get("filter_condition", None)
                    migrate_data_task.delay(
                        task_id=task.id,
                        source_config=source_config,
                        target_config=target_config,
                        filter_condition=filter_condition
                    )
                elif enable_incremental:
                    # 增量同步任务
                    from tasks import sync_incremental_task
                    sync_incremental_task.delay(
                        task_id=task.id,
                        source_config=source_config,
                        target_config=target_config
                    )
            
            # 更新任务状态为RUNNING
            task.status = "RUNNING"
            db.commit()
            return {"message": "任务启动成功"}
        else:
            raise HTTPException(status_code=404, detail="任务不存在")
    except HTTPException:
        raise
    except Exception as e:
        db.rollback()
        raise HTTPException(status_code=500, detail=f"启动任务失败: {str(e)}")

@app.post("/api/v1/tasks/{task_id}/pause")
def pause_task(task_id: int, db: Session = Depends(get_db)):
    """暂停任务"""
    try:
        task = db.query(DTSTask).filter(DTSTask.id == task_id).first()
        if task:
            # 更新任务状态为PAUSED
            task.status = "PAUSED"
            db.commit()
            return {"message": "任务暂停成功"}
        else:
            raise HTTPException(status_code=404, detail="任务不存在")
    except HTTPException:
        raise
    except Exception as e:
        db.rollback()
        raise HTTPException(status_code=500, detail=f"暂停任务失败: {str(e)}")

@app.post("/api/v1/tasks/{task_id}/resume")
def resume_task(task_id: int, db: Session = Depends(get_db)):
    """恢复任务"""
    try:
        task = db.query(DTSTask).filter(DTSTask.id == task_id).first()
        if task:
            # 更新任务状态为RUNNING
            task.status = "RUNNING"
            db.commit()
            return {"message": "任务恢复成功"}
        else:
            raise HTTPException(status_code=404, detail="任务不存在")
    except HTTPException:
        raise
    except Exception as e:
        db.rollback()
        raise HTTPException(status_code=500, detail=f"恢复任务失败: {str(e)}")

@app.delete("/api/v1/tasks/{task_id}")
def delete_task(task_id: int, db: Session = Depends(get_db)):
    """删除任务"""
    try:
        task = db.query(DTSTask).filter(DTSTask.id == task_id).first()
        if task:
            db.delete(task)
            db.commit()
            return {"message": "任务删除成功"}
        else:
            raise HTTPException(status_code=404, detail="任务不存在")
    except HTTPException:
        raise
    except Exception as e:
        db.rollback()
        raise HTTPException(status_code=500, detail=f"删除任务失败: {str(e)}")

# WebSocket连接管理
active_connections = {}

@app.websocket("/api/v1/ws/task/{task_id}")
async def websocket_endpoint(websocket: WebSocket, task_id: int):
    """WebSocket端点，用于实时接收任务状态更新"""
    await websocket.accept()
    if task_id not in active_connections:
        active_connections[task_id] = []
    active_connections[task_id].append(websocket)
    try:
        while True:
            await websocket.receive_text()  # 保持连接，直到客户端断开
    except WebSocketDisconnect:
        active_connections[task_id].remove(websocket)
        if not active_connections[task_id]:
            del active_connections[task_id]

# 向特定任务的所有WebSocket连接发送消息
async def _notify_task_update(task_id: int, message: dict):
    """向特定任务的所有WebSocket连接发送更新消息"""
    if task_id in active_connections:
        # 创建要发送的消息
        import json
        message_str = json.dumps(message)
        
        # 向所有连接发送消息
        for connection in active_connections[task_id][:]:  # 使用切片复制避免在迭代时修改列表
            try:
                await connection.send_text(message_str)
            except Exception as e:
                # 如果发送失败，移除连接
                if connection in active_connections[task_id]:
                    active_connections[task_id].remove(connection)
        
        # 清理空的连接列表
        if not active_connections[task_id]:
            del active_connections[task_id]

# 添加启动服务器的代码
if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=8000)
