import asyncio
import time
import random
import logging
import threading
from typing import Optional
from contextlib import asynccontextmanager
import aiohttp
from dataclasses import dataclass
from enum import Enum
from app.core.config import settings  # 新增

logger = logging.getLogger(__name__)

class APIType(Enum):
    UPLOAD = "upload"   # 图片上传 10 QPS
    GENERAL = "general" # 常规接口 200 QPS

@dataclass
class RateLimitConfig:
    qps: float
    burst: int
    max_concurrent: int

class TokenBucket:
    def __init__(self, qps: float, burst: int):
        self.qps = qps
        self.burst = burst
        self.tokens = float(burst)
        self.last_refill = time.time()
        self._lock = asyncio.Lock()

    async def acquire(self) -> None:
        async with self._lock:
            await self._refill()
            while self.tokens < 1.0:
                wait_time = (1.0 - self.tokens) / self.qps if self.qps > 0 else 0.1
                await asyncio.sleep(max(0.0, wait_time))
                await self._refill()
            self.tokens -= 1.0

    async def _refill(self):
        now = time.time()
        elapsed = now - self.last_refill
        if elapsed <= 0:
            return
        self.tokens = min(float(self.burst), self.tokens + elapsed * self.qps)
        self.last_refill = now

class CozeRateLimiter:
    def __init__(self):
        # 上传与常规通道的 QPS、突发与并发改为可配置
        self.upload_config = RateLimitConfig(
            qps=settings.coze_upload_qps,
            burst=settings.coze_upload_burst,
            max_concurrent=settings.coze_upload_max_concurrency,
        )
        self.general_config = RateLimitConfig(
            qps=settings.coze_general_qps,
            burst=settings.coze_general_burst,
            max_concurrent=settings.coze_general_max_concurrency,
        )

        # 延迟初始化：避免在无事件循环的上下文中创建 asyncio 原语
        self.upload_bucket = None
        self.general_bucket = None

        self.upload_semaphore = None
        self.general_semaphore = None

        self._upload_session: Optional[aiohttp.ClientSession] = None
        self._general_session: Optional[aiohttp.ClientSession] = None

        # 新增：记录所属事件循环，发生切换时重建资源
        self._loop: Optional[asyncio.AbstractEventLoop] = None

    async def ensure_initialized(self):
        """确保在事件循环中的 Task 内初始化 asyncio 原语和会话"""
        # 新增：检测事件循环是否变化
        current_loop = asyncio.get_running_loop()
        if self._loop is None:
            self._loop = current_loop
        elif self._loop is not current_loop:
            # 事件循环切换：关闭旧会话并重置所有与旧 loop 绑定的原语
            await self.close()
            self.upload_bucket = None
            self.general_bucket = None
            self.upload_semaphore = None
            self.general_semaphore = None
            self._upload_session = None
            self._general_session = None
            self._loop = current_loop

        if self.upload_bucket is None:
            self.upload_bucket = TokenBucket(self.upload_config.qps, self.upload_config.burst)
        if self.general_bucket is None:
            self.general_bucket = TokenBucket(self.general_config.qps, self.general_config.burst)
        if self.upload_semaphore is None:
            self.upload_semaphore = asyncio.Semaphore(self.upload_config.max_concurrent)
        if self.general_semaphore is None:
            self.general_semaphore = asyncio.Semaphore(self.general_config.max_concurrent)
        # 会话在 get_session 中按需创建

    async def get_session(self, api_type: APIType) -> aiohttp.ClientSession:
        await self.ensure_initialized()
        if api_type == APIType.UPLOAD:
            if self._upload_session is None or self._upload_session.closed:
                connector = aiohttp.TCPConnector(
                    limit=self.upload_config.max_concurrent,  # 减少连接池大小
                    limit_per_host=self.upload_config.max_concurrent,
                    enable_cleanup_closed=True,
                    ssl=False
                )
                timeout = aiohttp.ClientTimeout(
                    total=settings.coze_upload_total_timeout,
                    connect=settings.coze_upload_connect_timeout,
                )
                self._upload_session = aiohttp.ClientSession(
                    connector=connector,
                    timeout=timeout,
                    headers={"User-Agent": "CozeAPI-Upload/1.0"},
                )
                logger.info(f"创建新的上传会话，并发限制: {self.upload_config.max_concurrent}")
            return self._upload_session
        else:
            if self._general_session is None or self._general_session.closed:
                connector = aiohttp.TCPConnector(
                    limit=self.general_config.max_concurrent,  # 减少连接池大小
                    limit_per_host=self.general_config.max_concurrent,
                    enable_cleanup_closed=True,
                    ssl=False
                )
                timeout = aiohttp.ClientTimeout(
                    total=settings.coze_general_total_timeout,
                    connect=settings.coze_general_connect_timeout,
                )
                self._general_session = aiohttp.ClientSession(
                    connector=connector,
                    timeout=timeout,
                    headers={"User-Agent": "CozeAPI-General/1.0"},
                )
                logger.info(f"创建新的常规会话，并发限制: {self.general_config.max_concurrent}")
            return self._general_session

    @asynccontextmanager
    async def request(self, api_type: APIType, method: str, url: str, **kwargs):
        await self.ensure_initialized()
        bucket = self.upload_bucket if api_type == APIType.UPLOAD else self.general_bucket
        semaphore = self.upload_semaphore if api_type == APIType.UPLOAD else self.general_semaphore

        await bucket.acquire()
        async with semaphore:
            session = await self.get_session(api_type)
            async with session.request(method, url, **kwargs) as response:
                yield response

    async def close(self):
        if self._upload_session and not self._upload_session.closed:
            await self._upload_session.close()
        if self._general_session and not self._general_session.closed:
            await self._general_session.close()

class RetryableRequestError(Exception):
    pass

class NonRetryableRequestError(Exception):
    pass

async def coze_request_with_retry(
    limiter: CozeRateLimiter,
    api_type: APIType,
    method: str,
    url: str,
    max_retries: int = 3,
    base_delay: float = 1.0,
    jitter: bool = True,
    **request_kwargs,
):
    last_exception = None
    for attempt in range(max_retries + 1):
        try:
            async with limiter.request(api_type, method, url, **request_kwargs) as response:
                if response.status == 200:
                    return response
                elif response.status == 429:
                    if attempt == max_retries:
                        raise RetryableRequestError(f"429 after {max_retries} retries")
                    retry_after = response.headers.get("Retry-After")
                    delay = float(retry_after) if retry_after else base_delay * (2 ** attempt)
                    if jitter:
                        delay += random.uniform(0, delay * 0.1)
                    logger.warning(f"429 rate limited, retry in {delay:.2f}s (attempt {attempt+1}/{max_retries+1})")
                    await asyncio.sleep(delay)
                    continue
                elif 500 <= response.status < 600:
                    if attempt == max_retries:
                        raise RetryableRequestError(f"Server {response.status} after {max_retries} retries")
                    delay = base_delay * (2 ** attempt)
                    if jitter:
                        delay += random.uniform(0, delay * 0.1)
                    logger.warning(f"Server {response.status}, retry in {delay:.2f}s (attempt {attempt+1}/{max_retries+1})")
                    await asyncio.sleep(delay)
                    continue
                elif 400 <= response.status < 500:
                    text = await response.text()
                    raise NonRetryableRequestError(f"Client {response.status}: {text}")
                else:
                    text = await response.text()
                    raise RetryableRequestError(f"Unexpected {response.status}: {text}")
        except (aiohttp.ClientError, asyncio.TimeoutError) as e:
            last_exception = e
            
            # 特殊处理连接器关闭错误
            if "Connector is closed" in str(e) or "Connection pool is closed" in str(e):
                logger.warning(f"检测到连接器关闭错误，重建会话: {e}")
                # 强制重建会话
                if api_type == APIType.UPLOAD:
                    if limiter._upload_session:
                        await limiter._upload_session.close()
                        limiter._upload_session = None
                else:
                    if limiter._general_session:
                        await limiter._general_session.close()
                        limiter._general_session = None
            
            if attempt == max_retries:
                raise RetryableRequestError(f"Network error after {max_retries} retries: {e}")
            delay = base_delay * (2 ** attempt)
            if jitter:
                delay += random.uniform(0, delay * 0.1)
            logger.warning(f"Network error, retry in {delay:.2f}s (attempt {attempt+1}/{max_retries+1}): {e}")
            await asyncio.sleep(delay)
            continue
    raise RetryableRequestError(f"Request failed after retries: {last_exception}")

# 使用线程本地存储，每个线程都有自己的limiter实例
_thread_local = threading.local()

def get_global_limiter() -> CozeRateLimiter:
    if not hasattr(_thread_local, 'limiter') or _thread_local.limiter is None:
        _thread_local.limiter = CozeRateLimiter()
    return _thread_local.limiter

async def close_global_limiter():
    if hasattr(_thread_local, 'limiter') and _thread_local.limiter:
        await _thread_local.limiter.close()
        _thread_local.limiter = None