from fastapi import FastAPI, WebSocket, WebSocketDisconnect
import json
import asyncio
import os
import sys
from typing import Dict

# 添加项目根目录到路径
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))

from core.processing_chain import DocumentGenerationChain
from tools.ollama_manager import OllamaManager
from tools.vector_store import VectorStoreManager
from data.loaders import CodeLoader
from data.splitters import CodeSplitter

app = FastAPI(title="代码文档生成WebSocket API")


# 连接管理器
class ConnectionManager:
    def __init__(self):
        self.active_connections: Dict[str, WebSocket] = {}

    async def connect(self, websocket: WebSocket, client_id: str):
        await websocket.accept()
        self.active_connections[client_id] = websocket

    def disconnect(self, client_id: str):
        if client_id in self.active_connections:
            del self.active_connections[client_id]

    async def send_message(self, client_id: str, message: str):
        if client_id in self.active_connections:
            await self.active_connections[client_id].send_text(message)


manager = ConnectionManager()


async def stream_process_file(client_id: str, file_path: str, file_content: str, model_name: str):
    """处理单个文件并流式返回结果"""
    try:
        # 发送开始处理的通知
        await manager.send_message(
            client_id,
            json.dumps({
                "type": "start",
                "file": file_path
            })
        )

        # 初始化组件
        ollama_manager = OllamaManager()
        llm = ollama_manager.get_model(model_name)

        # 为这个客户端创建临时向量库
        temp_db_path = f"vectordb/ws_{client_id}"
        vector_store_manager = VectorStoreManager()
        db = vector_store_manager.init_chroma(persist_directory=temp_db_path)

        # 加载和分割文件
        document = Document(page_content=file_content, metadata={"source": file_path})
        _, ext = os.path.splitext(file_path)

        # 分割文档
        splitter = CodeSplitter.create_splitter(ext)
        chunks = splitter.split_text(file_content)

        # 发送进度更新
        await manager.send_message(
            client_id,
            json.dumps({
                "type": "progress",
                "file": file_path,
                "progress": 0.2,
                "message": "正在准备文档..."
            })
        )

        # 创建文档对象并添加到向量库
        doc_chunks = []
        for i, chunk in enumerate(chunks):
            doc_chunk = Document(
                page_content=chunk,
                metadata={
                    "source": file_path,
                    "chunk_id": i
                }
            )
            doc_chunks.append(doc_chunk)

        vector_store_manager.add_documents(doc_chunks)

        # 发送进度更新
        await manager.send_message(
            client_id,
            json.dumps({
                "type": "progress",
                "file": file_path,
                "progress": 0.4,
                "message": "正在处理文档..."
            })
        )

        # 创建文档生成链
        doc_chain = DocumentGenerationChain(llm, db)
        doc_chain.setup_chain()

        # 简化处理，直接生成
        result = doc_chain.process_file(file_content)

        # 发送最终结果
        await manager.send_message(
            client_id,
            json.dumps({
                "type": "result",
                "file": file_path,
                "content": result,
                "progress": 1.0
            })
        )

    except Exception as e:
        # 发送错误信息
        await manager.send_message(
            client_id,
            json.dumps({
                "type": "error",
                "file": file_path,
                "error": str(e)
            })
        )


@app.websocket("/ws/{client_id}")
async def websocket_endpoint(websocket: WebSocket, client_id: str):
    await manager.connect(websocket, client_id)
    try:
        while True:
            data = await websocket.receive_text()
            message = json.loads(data)

            if message["type"] == "process_file":
                file_path = message["file_path"]
                file_content = message["content"]
                model_name = message.get("model_name", "codellama")

                # 异步处理文件
                asyncio.create_task(
                    stream_process_file(client_id, file_path, file_content, model_name)
                )

            elif message["type"] == "ping":
                # 心跳检查
                await manager.send_message(client_id, json.dumps({"type": "pong"}))

    except WebSocketDisconnect:
        manager.disconnect(client_id)
    except Exception as e:
        try:
            await manager.send_message(
                client_id,
                json.dumps({
                    "type": "error",
                    "error": str(e)
                })
            )
        except:
            pass
        finally:
            manager.disconnect(client_id)


if __name__ == "__main__":
    import uvicorn

    uvicorn.run(app, host="0.0.0.0", port=8001)
