import httpx
import json
import uuid
import time
import logging
from typing import Dict, Any, AsyncGenerator, Union

from fastapi import Request, HTTPException
from fastapi.responses import StreamingResponse, JSONResponse

from app.providers.base_provider import BaseProvider
from app.providers.worker_manager import worker_manager, Worker
from app.core.config import settings

logger = logging.getLogger(__name__)

class WritifyAIProvider(BaseProvider):
    """
    Writify.ai 服务提供商 (v2.2 - 生产级流式/非流式分离)
    """
    async def chat_completion(self, request_data: Dict[str, Any], original_request: Request) -> Union[StreamingResponse, JSONResponse]:
        worker = await worker_manager.get_worker()
        try:
            model = request_data.get("model", settings.DEFAULT_MODEL)
            is_stream = request_data.get("stream", False)

            headers, payload = self._prepare_request_parts(request_data, model, worker)
            api_url = "https://writify.ai/wp-json/mwai-ui/v1/chats/submit"

            if is_stream:
                return StreamingResponse(self._stream_response_generator(api_url, headers, payload, model), media_type="text/event-stream")
            else:
                return await self._non_stream_response(api_url, headers, payload, model)

        except Exception as e:
            logger.error(f"使用 Worker {worker} 处理请求失败: {e}", exc_info=True)
            await worker_manager.retire_worker(worker)
            raise HTTPException(status_code=502, detail=f"上游服务请求失败: {e}")

    def _prepare_request_parts(self, request_data: Dict[str, Any], model: str, worker: Worker) -> tuple[Dict, Dict]:
        """准备请求头和请求体"""
        headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36",
            "Referer": "https://writify.ai/tool/ai-chat/",
            "Origin": "https://writify.ai",
            "Cookie": worker.cookie,
            "X-WP-Nonce": worker.nonce,
            "Content-Type": "application/json",
        }

        last_user_message = next((msg["content"] for msg in reversed(request_data.get("messages", [])) if msg["role"] == "user"), "")
        if not last_user_message:
            raise ValueError("请求中未找到用户消息。")

        payload = {
            "botId": model,
            "customId": None,
            "session": f"session-{uuid.uuid4()}",
            "chatId": f"chat-{uuid.uuid4()}",
            "contextId": 1000012232,
            "messages": request_data.get("messages", []),
            "newMessage": last_user_message,
            "newFileId": None,
            "stream": True, # 无论如何都向上游请求流式，以便统一处理
        }
        return headers, payload

    async def _stream_response_generator(self, url: str, headers: Dict, payload: Dict, model: str) -> AsyncGenerator[str, None]:
        """处理流式响应的生成器"""
        chat_id = f"chatcmpl-{uuid.uuid4().hex}"
        try:
            async with httpx.AsyncClient(timeout=settings.API_REQUEST_TIMEOUT) as client:
                async with client.stream("POST", url, headers=headers, json=payload) as response:
                    response.raise_for_status()

                    # 发送角色块
                    role_chunk = {"id": chat_id, "object": "chat.completion.chunk", "created": int(time.time()), "model": model, "choices": [{"index": 0, "delta": {"role": "assistant"}, "finish_reason": None}]}
                    yield f"data: {json.dumps(role_chunk)}\n\n"

                    async for line in response.aiter_lines():
                        if line.startswith("data:"):
                            data_str = line[len("data:"):].strip()
                            if not data_str: continue
                            
                            try:
                                data_json = json.loads(data_str)
                                if data_json.get("type") == "live":
                                    chunk_content = data_json.get("data", "")
                                    delta_chunk = {"id": chat_id, "object": "chat.completion.chunk", "created": int(time.time()), "model": model, "choices": [{"index": 0, "delta": {"content": chunk_content}, "finish_reason": None}]}
                                    yield f"data: {json.dumps(delta_chunk)}\n\n"
                                elif data_json.get("type") == "end":
                                    break
                            except json.JSONDecodeError:
                                logger.warning(f"无法解析 SSE 数据: {data_str}")
            
            # 发送结束块
            final_chunk = {"id": chat_id, "object": "chat.completion.chunk", "created": int(time.time()), "model": model, "choices": [{"index": 0, "delta": {}, "finish_reason": "stop"}]}
            yield f"data: {json.dumps(final_chunk)}\n\n"
            yield "data: [DONE]\n\n"

        except httpx.RequestError as e:
            logger.error(f"请求上游 API 时发生网络错误: {e}")
            error_chunk = {"id": chat_id, "object": "chat.completion.chunk", "created": int(time.time()), "model": model, "choices": [{"index": 0, "delta": {"content": f"\n\n[ERROR: {e}]"}, "finish_reason": "error"}]}
            yield f"data: {json.dumps(error_chunk)}\n\n"
            yield "data: [DONE]\n\n"

    async def _non_stream_response(self, url: str, headers: Dict, payload: Dict, model: str) -> JSONResponse:
        """处理非流式响应"""
        full_content = ""
        async with httpx.AsyncClient(timeout=settings.API_REQUEST_TIMEOUT) as client:
            async with client.stream("POST", url, headers=headers, json=payload) as response:
                response.raise_for_status()
                async for line in response.aiter_lines():
                    if line.startswith("data:"):
                        data_str = line[len("data:"):].strip()
                        if not data_str: continue
                        try:
                            data_json = json.loads(data_str)
                            if data_json.get("type") == "live":
                                full_content += data_json.get("data", "")
                            elif data_json.get("type") == "end":
                                break
                        except json.JSONDecodeError:
                            logger.warning(f"无法解析 SSE 数据: {data_str}")
        
        return JSONResponse({
            "id": f"chatcmpl-{uuid.uuid4().hex}",
            "object": "chat.completion",
            "created": int(time.time()),
            "model": model,
            "choices": [{"index": 0, "message": {"role": "assistant", "content": full_content.strip()}, "finish_reason": "stop"}],
            "usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}
        })
