import asyncio
import json
import aiohttp
from fastapi import WebSocket, WebSocketDisconnect
from fastapi.responses import HTMLResponse
import logging

logger = logging.getLogger(__name__)

class WebSocketManager:
    def __init__(self):
        self.active_connections: list[WebSocket] = []

    async def connect(self, websocket: WebSocket):
        await websocket.accept()
        self.active_connections.append(websocket)

    def disconnect(self, websocket: WebSocket):
        if websocket in self.active_connections:
            self.active_connections.remove(websocket)

    async def send_personal_message(self, message: str, websocket: WebSocket):
        await websocket.send_text(message)

    async def broadcast(self, message: str):
        for connection in self.active_connections:
            await connection.send_text(message)

websocket_manager = WebSocketManager()

async def handle_websocket_chat(websocket: WebSocket, config_manager):
    """处理WebSocket聊天连接"""
    await websocket_manager.connect(websocket)
    logger.info("WebSocket客户端已连接")
    
    ollama_config = config_manager.llm_providers.ollama
    ollama_base_url = ollama_config.base_url
    ollama_timeout = ollama_config.timeout
    
    try:
        while True:
            # 接收客户端消息
            data = await websocket.receive_text()
            logger.debug(f"接收到客户端消息: {data}")
            
            try:
                # 解析客户端发送的JSON数据
                request_data = json.loads(data)
                messages = request_data.get("messages", [])
                
                if not messages:
                    await websocket.send_text(json.dumps({
                        "type": "error",
                        "content": "缺少messages参数"
                    }))
                    continue
                
                # 构建转发到Ollama的请求数据
                ollama_request = {
                    "model": ollama_config.model,
                    "messages": messages,
                    "stream": True
                }
                
                # 转发请求到Ollama API
                ollama_chat_url = f"{ollama_base_url}/api/chat"
                logger.debug(f"转发请求到Ollama: {ollama_chat_url}")
                
                async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=ollama_timeout)) as session:
                    async with session.post(ollama_chat_url, json=ollama_request) as response:
                        if response.status != 200:
                            error_content = await response.text()
                            logger.error(f"Ollama API调用失败: {response.status}, {error_content}")
                            await websocket.send_text(json.dumps({
                                "type": "error",
                                "content": f"Ollama API调用失败: {response.status}"
                            }))
                            continue
                        
                        # 处理流式响应
                        buffer = ""
                        async for chunk in response.content.iter_any():
                            if chunk:
                                try:
                                    # 尝试解析每个chunk
                                    chunk_str = chunk.decode('utf-8')
                                    buffer += chunk_str
                                    
                                    # 处理每个JSON对象
                                    while '\n' in buffer:
                                        line, buffer = buffer.split('\n', 1)
                                        line = line.strip()
                                        if line:
                                            try:
                                                ollama_response = json.loads(line)
                                                                                            
                                                # 提取内容并发送到客户端
                                                if "message" in ollama_response and "content" in ollama_response["message"]:
                                                    content = ollama_response["message"]["content"]
                                                    if content:
                                                        await websocket.send_text(json.dumps({
                                                            "type": "chunk",
                                                            "content": content
                                                        }))
                                            except Exception as e:
                                                logger.error(f"处理Ollama响应时出错: {str(e)}")
                                                # 继续处理下一个chunk，不中断连接
                                except Exception as e:
                                    logger.error(f"处理流式数据时出错: {str(e)}")
                        
                        # 发送完成信号
                        await websocket.send_text(json.dumps({"type": "done"}))
                        logger.debug("WebSocket聊天会话已完成")
                
            except json.JSONDecodeError as e:
                logger.error(f"JSON解析错误: {str(e)}")
                await websocket.send_text(json.dumps({
                    "type": "error",
                    "content": f"JSON解析错误: {str(e)}"
                }))
            except Exception as e:
                logger.error(f"处理WebSocket消息时出错: {str(e)}")
                await websocket.send_text(json.dumps({
                    "type": "error",
                    "content": f"内部错误: {str(e)}"
                }))
                
    except WebSocketDisconnect:
        logger.info("WebSocket客户端已断开连接")
    except Exception as e:
        logger.error(f"WebSocket连接异常: {str(e)}")
    finally:
        websocket_manager.disconnect(websocket)