import httpx
import json
import uuid
import time
import base64
import hashlib
import traceback
from typing import Dict, Any, AsyncGenerator, Union, Optional

from fastapi import Request
from fastapi.responses import StreamingResponse, JSONResponse
from loguru import logger

from app.providers.base_provider import BaseProvider
from app.core.config import settings
from app.utils.sse_utils import create_sse_data, create_chat_completion_chunk

class InkeepProvider(BaseProvider):
    BASE_URL_V1 = "https://api.inkeep.com/v1"
    
    def __init__(self):
        self.client: Optional[httpx.AsyncClient] = None

    async def initialize(self):
        self.client = httpx.AsyncClient(timeout=settings.API_REQUEST_TIMEOUT)

    async def close(self):
        if self.client:
            await self.client.aclose()

    async def chat_completion(self, request_data: Dict[str, Any], original_request: Request) -> Union[StreamingResponse, JSONResponse]:
        try:
            logger.info("检测到 Inkeep 聊天任务，开始处理...")
            # Inkeep API 主要通过流式工作，这里统一引导至流式处理
            return await self._handle_stream_task(request_data)
        except Exception as e:
            logger.error(f"处理 Inkeep 任务时出错: {type(e).__name__}: {e}")
            traceback.print_exc()
            return JSONResponse(content={"error": {"message": f"处理任务时出错: {e}", "type": "provider_error"}}, status_code=500)

    # 关键修正：PoW 算法的目标是 'challenge'，不是 'signature'
    def _solve_pow(self, algorithm: str, challenge: str, max_number: int, salt: str) -> int:
        start_time = time.time()
        logger.info(f"   [PoW] 开始计算挑战, 范围: {max_number}...")
        
        if algorithm.upper() != 'SHA-256':
            raise ValueError(f"不支持的哈希算法: {algorithm}")

        for number in range(max_number + 1):
            data_to_hash = f"{salt}{number}".encode('utf-8')
            h = hashlib.sha256(data_to_hash).hexdigest()
            
            if h == challenge:
                end_time = time.time()
                logger.success(f"   [PoW] 挑战成功! Nonce: {number}, 耗时: {(end_time - start_time)*1000:.2f}ms")
                return number
        
        raise RuntimeError("未能在范围内找到 PoW 解决方案。")

    async def _get_pow_response(self) -> str:
        headers = self._prepare_headers()
        
        response = await self.client.get(f"{self.BASE_URL_V1}/challenge", headers=headers)
        response.raise_for_status()
        challenge_data = response.json()
        logger.info(f"   [PoW] 收到质询数据: {challenge_data}")
        
        # 关键修正：使用 'challenge' 字段作为 PoW 的目标哈希
        answer = self._solve_pow(
            challenge_data["algorithm"],
            challenge_data["challenge"], # 正确的目标哈希
            challenge_data.get("maxnumber", 50000),
            challenge_data["salt"]
        )
        
        pow_response_data = {**challenge_data, "number": answer}
        return base64.b64encode(json.dumps(pow_response_data).encode('utf-8')).decode('utf-8')

    async def _handle_stream_task(self, request_data: Dict[str, Any]) -> StreamingResponse:
        headers = self._prepare_headers()
        model_name_for_client = request_data.get("model", settings.DEFAULT_MODEL)
        
        try:
            pow_response_header = await self._get_pow_response()
            headers["x-inkeep-challenge-solution"] = pow_response_header
        except Exception as e:
            logger.error(f"   [Error] 获取或解决 PoW 挑战失败: {e}")
            traceback.print_exc()
            async def error_stream():
                error_payload = {
                    "error": {
                        "message": f"Failed to solve Inkeep PoW challenge: {e}",
                        "type": "authentication_error",
                        "code": "pow_failed"
                    }
                }
                yield f"data: {json.dumps(error_payload)}\n\n"
                yield "data: [DONE]\n\n"
            return StreamingResponse(error_stream(), media_type="text/event-stream", status_code=500)

        payload = self._prepare_payload(request_data)
        
        logger.info(f"   [Request] 正在向模型 '{model_name_for_client}' 发送流式请求...")
        return StreamingResponse(
            self._stream_generator(f"{self.BASE_URL_V1}/chat/completions", headers, payload, model_name_for_client),
            media_type="text/event-stream"
        )

    async def _stream_generator(self, url: str, headers: Dict, payload: Dict, model_name: str) -> AsyncGenerator[str, None]:
        request_id = f"chatcmpl-{uuid.uuid4()}"
        try:
            async with self.client.stream("POST", url, headers=headers, json=payload) as response:
                response.raise_for_status()
                
                async for line in response.aiter_lines():
                    if line.startswith("data:"):
                        content = line[len("data:"):].strip()
                        if content == "[DONE]":
                            break
                        try:
                            data = json.loads(content)
                            choice = data.get("choices", [{}])[0]
                            delta = choice.get("delta", {})
                            finish_reason = choice.get("finish_reason")

                            # 如果 delta 不为空或有 finish_reason，则创建并发送 chunk
                            if delta or finish_reason:
                                chunk = create_chat_completion_chunk(
                                    request_id=request_id,
                                    model=model_name,
                                    delta=delta,
                                    finish_reason=finish_reason
                                )
                                yield create_sse_data(chunk)

                        except (json.JSONDecodeError, IndexError) as e:
                            logger.warning(f"   [Warning] SSE 数据解析失败: {e}, 内容: {content}")
                            continue
        
        except httpx.HTTPStatusError as e:
            error_content = await e.response.aread()
            logger.error(f"   [Error] 流式请求失败，状态码: {e.response.status_code}, 响应: {error_content.decode()}")
        except Exception as e:
            logger.error(f"   [Error] 流式生成器发生未知错误: {e}")
            traceback.print_exc()
        
        finally:
            # 发送最后的终止 chunk
            final_chunk = create_chat_completion_chunk(request_id, model_name, {}, "stop")
            yield create_sse_data(final_chunk)
            logger.info("   [Stream] 流式传输结束。")
            yield "data: [DONE]\n\n"

    def _prepare_headers(self) -> Dict[str, str]:
        if not settings.INKEEP_BEARER_TOKEN:
            raise ValueError("INKEEP_BEARER_TOKEN 未配置。")
        
        return {
            "Accept": "application/json, text/plain, */*",
            "Content-Type": "application/json",
            "Authorization": settings.INKEEP_BEARER_TOKEN,
            "Origin": "https://docs.claude.com",
            "Referer": "https://docs.claude.com/",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36",
            "x-stainless-arch": "unknown",
            "x-stainless-helper-method": "stream",
            "x-stainless-lang": "js",
            "x-stainless-os": "Unknown",
            "x-stainless-package-version": "4.78.1",
            "x-stainless-retry-count": "0",
            "x-stainless-runtime": "browser:chrome",
            "x-stainless-runtime-version": "141.0.0"
        }

    def _prepare_payload(self, request_data: Dict[str, Any]) -> Dict[str, Any]:
        # 确保 stream 总是 true
        payload = {
            "stream": True,
            "model": request_data.get("model", settings.DEFAULT_MODEL),
            "messages": request_data.get("messages", []),
        }
        # 只有当 tools 存在时才添加到 payload 中
        if "tools" in request_data and request_data["tools"]:
            payload["tools"] = request_data["tools"]
            # tool_choice 也依赖于 tools
            if "tool_choice" in request_data:
                 payload["tool_choice"] = request_data["tool_choice"]
        return payload

    async def get_models(self) -> JSONResponse:
        model_data = {
            "object": "list",
            "data": [
                {"id": name, "object": "model", "created": int(time.time()), "owned_by": "inkeep"}
                for name in settings.KNOWN_MODELS
            ]
        }
        return JSONResponse(content=model_data)
