diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..b489ea9847aa1804b3f1de1403cc50bf5f6f4461 --- /dev/null +++ b/.env.example @@ -0,0 +1,107 @@ +# NVIDIA NIM Config +NVIDIA_NIM_API_KEY="" + + +# All Claude model requests are mapped to these models, plain model is fallback +# Format: provider_type/model/name +# Valid provider: "nvidia_nim" +MODEL_OPUS= +MODEL_SONNET= +MODEL_HAIKU= +MODEL="nvidia_nim/z-ai/glm4.7" + + +# Optional live smoke model overrides. Smoke runs for NVIDIA NIM. +FCC_SMOKE_MODEL_NVIDIA_NIM= + + +# Thinking output +# Per-Claude-model switches for provider reasoning requests and Claude thinking blocks. +# Blank per-model switches inherit ENABLE_MODEL_THINKING. +ENABLE_OPUS_THINKING= +ENABLE_SONNET_THINKING= +ENABLE_HAIKU_THINKING= +ENABLE_MODEL_THINKING=true + + +# Provider config +# Per-provider proxy support: http and socks5, example: "http://username:password@host:port" +NVIDIA_NIM_PROXY="" + +PROVIDER_RATE_LIMIT=1 +PROVIDER_RATE_WINDOW=3 +PROVIDER_MAX_CONCURRENCY=5 + + +# HTTP client timeouts (seconds) for provider API requests +HTTP_READ_TIMEOUT=300 +HTTP_WRITE_TIMEOUT=10 +HTTP_CONNECT_TIMEOUT=10 + + +# Optional server API key (Anthropic-style) +ANTHROPIC_AUTH_TOKEN="freecc" + + +# Messaging Platform: "telegram" | "discord" | "none" +MESSAGING_PLATFORM="discord" +MESSAGING_RATE_LIMIT=1 +MESSAGING_RATE_WINDOW=1 + + +# Voice Note Transcription +VOICE_NOTE_ENABLED=false +# WHISPER_DEVICE: "cpu" | "cuda" | "nvidia_nim" +# - "cpu"/"cuda": Hugging Face transformers Whisper (offline, free; install with: uv sync --extra voice_local) +# - "nvidia_nim": NVIDIA NIM Whisper via Riva gRPC (requires NVIDIA_NIM_API_KEY; install with: uv sync --extra voice) +# (Independent of MODEL=nvidia_nim/...: that selects the *chat* provider; this selects voice STT only.) +WHISPER_DEVICE="nvidia_nim" +# WHISPER_MODEL: +# - For cpu/cuda: Hugging Face ID or short name (tiny, base, small, medium, large-v2, large-v3, large-v3-turbo) +# - For nvidia_nim: NVIDIA NIM model (e.g., "nvidia/parakeet-ctc-1.1b-asr", "openai/whisper-large-v3") +# - For nvidia_nim, default to "openai/whisper-large-v3" for best performance +WHISPER_MODEL="openai/whisper-large-v3" +HF_TOKEN="" + + +# Telegram Config +TELEGRAM_BOT_TOKEN="" +ALLOWED_TELEGRAM_USER_ID="" + + +# Discord Config +DISCORD_BOT_TOKEN="" +ALLOWED_DISCORD_CHANNELS="" + + +# Agent Config +CLAUDE_WORKSPACE="./agent_workspace" +ALLOWED_DIR="" +CLAUDE_CLI_BIN="claude" +FAST_PREFIX_DETECTION=true +ENABLE_NETWORK_PROBE_MOCK=true +ENABLE_TITLE_GENERATION_SKIP=true +ENABLE_SUGGESTION_MODE_SKIP=true +ENABLE_FILEPATH_EXTRACTION_MOCK=true + + +# Local Anthropic web_search / web_fetch handling (performs outbound HTTP; on by default) +ENABLE_WEB_SERVER_TOOLS=true +WEB_FETCH_ALLOWED_SCHEMES=http,https +WEB_FETCH_ALLOW_PRIVATE_NETWORKS=false + + +# Verbose diagnostics (avoid logging raw prompts / SSE bodies in production) +DEBUG_PLATFORM_EDITS=false +DEBUG_SUBAGENT_STACK=false +# When true, also allows DEBUG-level httpx/httpcore/telegram log noise (not just payload logging). +LOG_RAW_API_PAYLOADS=false +LOG_RAW_SSE_EVENTS=false +# When true, log full exception text and tracebacks for unhandled errors (may leak request-derived data). +LOG_API_ERROR_TRACEBACKS=false +# When true, log message/transcription text previews in messaging adapters (may leak user content). +LOG_RAW_MESSAGING_CONTENT=false +# When true, log full Claude CLI stderr, non-JSON stdout lines, and parser error text. +LOG_RAW_CLI_DIAGNOSTICS=false +# When true, log full exception and CLI error message strings in messaging (may leak user content). +LOG_MESSAGING_ERROR_DETAILS=false diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..b1a8d0c73afc51ac27db797fb75708242a59bae1 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +FROM python:3.14-slim + +WORKDIR /app + +# Install uv +RUN pip install uv + +# Copy project files +COPY pyproject.toml uv.lock ./ +COPY api/ ./api/ +COPY cli/ ./cli/ +COPY config/ ./config/ +COPY core/ ./core/ +COPY messaging/ ./messaging/ +COPY providers/ ./providers/ +COPY server.py ./ +COPY .env.example ./ + +# Install dependencies +RUN uv sync --frozen --no-dev + +# Expose port (HF Spaces default) +EXPOSE 7860 + +# Run server +CMD ["uv", "run", "uvicorn", "server:app", "--host", "0.0.0.0", "--port", "7860"] \ No newline at end of file diff --git a/api/__init__.py b/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5f6a6e42b489cb2f82c2fbb9f831d4c02a8ad1a3 --- /dev/null +++ b/api/__init__.py @@ -0,0 +1,17 @@ +"""API layer for Claude Code Proxy.""" + +from .app import create_app +from .models import ( + MessagesRequest, + MessagesResponse, + TokenCountRequest, + TokenCountResponse, +) + +__all__ = [ + "MessagesRequest", + "MessagesResponse", + "TokenCountRequest", + "TokenCountResponse", + "create_app", +] diff --git a/api/__pycache__/__init__.cpython-314.pyc b/api/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..20e4ed826e3fadaf56416008be6d65689e75c0c9 Binary files /dev/null and b/api/__pycache__/__init__.cpython-314.pyc differ diff --git a/api/__pycache__/app.cpython-314.pyc b/api/__pycache__/app.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19e2bffaa9cd1897e37e0022b97cfeae4f8daf87 Binary files /dev/null and b/api/__pycache__/app.cpython-314.pyc differ diff --git a/api/__pycache__/command_utils.cpython-314.pyc b/api/__pycache__/command_utils.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ba8623e3ed6968452a272cd642fce870b875f5c Binary files /dev/null and b/api/__pycache__/command_utils.cpython-314.pyc differ diff --git a/api/__pycache__/dependencies.cpython-314.pyc b/api/__pycache__/dependencies.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9cb388e8eeaba1d207395a9b6beb1f56a93c8d2e Binary files /dev/null and b/api/__pycache__/dependencies.cpython-314.pyc differ diff --git a/api/__pycache__/detection.cpython-314.pyc b/api/__pycache__/detection.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5204cd8a6b046b6d94e31d3f22cbbc95955dfe7 Binary files /dev/null and b/api/__pycache__/detection.cpython-314.pyc differ diff --git a/api/__pycache__/gateway_model_ids.cpython-314.pyc b/api/__pycache__/gateway_model_ids.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4be55d02d961b2be9b2c668257f8dd5ccbef11f5 Binary files /dev/null and b/api/__pycache__/gateway_model_ids.cpython-314.pyc differ diff --git a/api/__pycache__/model_router.cpython-314.pyc b/api/__pycache__/model_router.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b459dc19c3854037a83fc3db6ed8fb03e7972aaa Binary files /dev/null and b/api/__pycache__/model_router.cpython-314.pyc differ diff --git a/api/__pycache__/optimization_handlers.cpython-314.pyc b/api/__pycache__/optimization_handlers.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3414318aca778e4f8f4384d6cb3223c1eaa35903 Binary files /dev/null and b/api/__pycache__/optimization_handlers.cpython-314.pyc differ diff --git a/api/__pycache__/routes.cpython-314.pyc b/api/__pycache__/routes.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f0e71d6f3bd1557d734eea17c1efc6f8e3103883 Binary files /dev/null and b/api/__pycache__/routes.cpython-314.pyc differ diff --git a/api/__pycache__/runtime.cpython-314.pyc b/api/__pycache__/runtime.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e2299acbc91263b97b02d41b52ca9673372b7f2c Binary files /dev/null and b/api/__pycache__/runtime.cpython-314.pyc differ diff --git a/api/__pycache__/services.cpython-314.pyc b/api/__pycache__/services.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b68a5d59b4332d0f667908d51ed80cfe7c6b602 Binary files /dev/null and b/api/__pycache__/services.cpython-314.pyc differ diff --git a/api/__pycache__/validation_log.cpython-314.pyc b/api/__pycache__/validation_log.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5984cca231bb0ac818c980213d8d8f439613a956 Binary files /dev/null and b/api/__pycache__/validation_log.cpython-314.pyc differ diff --git a/api/app.py b/api/app.py new file mode 100644 index 0000000000000000000000000000000000000000..04feb0f9cc274b7cd76c7faff7c1584463f3e01a --- /dev/null +++ b/api/app.py @@ -0,0 +1,175 @@ +"""FastAPI application factory and configuration.""" + +import traceback +from contextlib import asynccontextmanager +from typing import Any + +from fastapi import FastAPI, Request +from fastapi.exception_handlers import request_validation_exception_handler +from fastapi.exceptions import RequestValidationError +from fastapi.responses import JSONResponse +from loguru import logger +from starlette.types import Receive, Scope, Send + +from config.logging_config import configure_logging +from config.settings import get_settings +from providers.exceptions import ProviderError + +from .routes import router +from .runtime import AppRuntime, startup_failure_message +from .validation_log import summarize_request_validation_body + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Application lifespan manager.""" + runtime = AppRuntime.for_app(app, settings=get_settings()) + await runtime.startup() + + yield + + await runtime.shutdown() + + +class GracefulLifespanApp: + """ASGI wrapper that reports startup failures without Starlette tracebacks.""" + + def __init__(self, app: FastAPI): + self.app = app + + def __getattr__(self, name: str) -> Any: + return getattr(self.app, name) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] != "lifespan": + await self.app(scope, receive, send) + return + await self._lifespan(receive, send) + + async def _lifespan(self, receive: Receive, send: Send) -> None: + settings = get_settings() + runtime = AppRuntime.for_app(self.app, settings=settings) + startup_complete = False + while True: + message = await receive() + if message["type"] == "lifespan.startup": + try: + await runtime.startup() + except Exception as exc: + await send( + { + "type": "lifespan.startup.failed", + "message": startup_failure_message(settings, exc), + } + ) + return + startup_complete = True + await send({"type": "lifespan.startup.complete"}) + continue + + if message["type"] == "lifespan.shutdown": + if startup_complete: + try: + await runtime.shutdown() + except Exception as exc: + logger.error("Shutdown failed: exc_type={}", type(exc).__name__) + await send({"type": "lifespan.shutdown.failed", "message": ""}) + return + await send({"type": "lifespan.shutdown.complete"}) + return + + +def create_app(*, lifespan_enabled: bool = True) -> FastAPI: + """Create and configure the FastAPI application.""" + settings = get_settings() + configure_logging( + settings.log_file, verbose_third_party=settings.log_raw_api_payloads + ) + + app_kwargs: dict[str, Any] = { + "title": "Claude Code Proxy", + "version": "2.0.0", + } + if lifespan_enabled: + app_kwargs["lifespan"] = lifespan + app = FastAPI(**app_kwargs) + + # Register routes + app.include_router(router) + + # Exception handlers + @app.exception_handler(RequestValidationError) + async def validation_error_handler(request: Request, exc: RequestValidationError): + """Log request shape for 422 debugging without content values.""" + body: Any + try: + body = await request.json() + except Exception as e: + body = {"_json_error": type(e).__name__} + + message_summary, tool_names = summarize_request_validation_body(body) + + logger.debug( + "Request validation failed: path={} query={} error_locs={} error_types={} message_summary={} tool_names={}", + request.url.path, + str(request.url.query), + [list(error.get("loc", ())) for error in exc.errors()], + [str(error.get("type", "")) for error in exc.errors()], + message_summary, + tool_names, + ) + return await request_validation_exception_handler(request, exc) + + @app.exception_handler(ProviderError) + async def provider_error_handler(request: Request, exc: ProviderError): + """Handle provider-specific errors and return Anthropic format.""" + err_settings = get_settings() + if err_settings.log_api_error_tracebacks: + logger.error( + "Provider Error: error_type={} status_code={} message={}", + exc.error_type, + exc.status_code, + exc.message, + ) + else: + logger.error( + "Provider Error: error_type={} status_code={}", + exc.error_type, + exc.status_code, + ) + return JSONResponse( + status_code=exc.status_code, + content=exc.to_anthropic_format(), + ) + + @app.exception_handler(Exception) + async def general_error_handler(request: Request, exc: Exception): + """Handle general errors and return Anthropic format.""" + settings = get_settings() + if settings.log_api_error_tracebacks: + logger.error("General Error: {}", exc) + logger.error(traceback.format_exc()) + else: + logger.error( + "General Error: path={} method={} exc_type={}", + request.url.path, + request.method, + type(exc).__name__, + ) + return JSONResponse( + status_code=500, + content={ + "type": "error", + "error": { + "type": "api_error", + "message": "An unexpected error occurred.", + }, + }, + ) + + return app + + +def create_asgi_app() -> GracefulLifespanApp: + """Create the server ASGI app with graceful lifespan failure reporting.""" + return GracefulLifespanApp(create_app(lifespan_enabled=False)) diff --git a/api/command_utils.py b/api/command_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..951c16309a7ffae5f6ec52132b4d661153464252 --- /dev/null +++ b/api/command_utils.py @@ -0,0 +1,164 @@ +"""Command parsing utilities for API optimizations.""" + +import re +import shlex + +_ENV_ASSIGNMENT_RE = re.compile(r"^[A-Za-z_][A-Za-z0-9_]*=.*$") + + +def _is_env_assignment(part: str) -> bool: + """Return True when a token is a shell-style env assignment.""" + return bool(_ENV_ASSIGNMENT_RE.match(part)) + + +def _strip_env_assignments(parts: list[str]) -> list[str]: + """Return command parts after leading shell-style env assignments.""" + cmd_start = 0 + for i, part in enumerate(parts): + if _is_env_assignment(part): + cmd_start = i + 1 + else: + break + return parts[cmd_start:] + + +def extract_command_prefix(command: str) -> str: + """Extract the command prefix for fast prefix detection. + + Parses a shell command safely, handling environment variables and + command injection attempts. Returns the command prefix suitable + for quick identification. + + Returns: + Command prefix (e.g., "git", "git commit", "npm install") + or "none" if no valid command found + """ + if "`" in command or "$(" in command: + return "command_injection_detected" + + try: + parts = shlex.split(command, posix=False) + if not parts: + return "none" + + env_prefix = [] + cmd_start = 0 + for i, part in enumerate(parts): + if _is_env_assignment(part): + env_prefix.append(part) + cmd_start = i + 1 + else: + break + + if cmd_start >= len(parts): + return "none" + + cmd_parts = parts[cmd_start:] + if not cmd_parts: + return "none" + + first_word = cmd_parts[0] + two_word_commands = { + "git", + "npm", + "docker", + "kubectl", + "cargo", + "go", + "pip", + "yarn", + } + + if first_word in two_word_commands and len(cmd_parts) > 1: + second_word = cmd_parts[1] + if not second_word.startswith("-"): + return f"{first_word} {second_word}" + return first_word + return first_word if not env_prefix else " ".join(env_prefix) + " " + first_word + + except ValueError: + parts = command.split() + if not parts: + return "none" + cmd_parts = _strip_env_assignments(parts) + return cmd_parts[0] if cmd_parts else "none" + + +def extract_filepaths_from_command(command: str, output: str) -> str: + """Extract file paths from a command locally without API call. + + Determines if the command reads file contents and extracts paths accordingly. + Commands like ls/dir/find just list files, so return empty. + Commands like cat/head/tail actually read contents, so extract the file path. + + Returns: + Filepath extraction result in format + """ + listing_commands = { + "ls", + "dir", + "find", + "tree", + "pwd", + "cd", + "mkdir", + "rmdir", + "rm", + } + + reading_commands = {"cat", "head", "tail", "less", "more", "bat", "type"} + + try: + parts = shlex.split(command, posix=False) + if not parts: + return "\n" + + cmd_parts = _strip_env_assignments(parts) + if not cmd_parts: + return "\n" + + base_cmd = cmd_parts[0].split("/")[-1].split("\\")[-1].lower() + + if base_cmd in listing_commands: + return "\n" + + if base_cmd in reading_commands: + filepaths = [] + for part in cmd_parts[1:]: + if part.startswith("-"): + continue + filepaths.append(part) + + if filepaths: + paths_str = "\n".join(filepaths) + return f"\n{paths_str}\n" + return "\n" + + if base_cmd == "grep": + flags_with_args = {"-e", "-f", "-m", "-A", "-B", "-C"} + pattern_provided_via_flag = False + positional = [] + + skip_next = False + for part in cmd_parts[1:]: + if skip_next: + skip_next = False + continue + if part.startswith("-"): + if part in flags_with_args: + if part in {"-e", "-f"}: + pattern_provided_via_flag = True + skip_next = True + continue + positional.append(part) + + filepaths = positional if pattern_provided_via_flag else positional[1:] + if filepaths: + paths_str = "\n".join(filepaths) + return f"\n{paths_str}\n" + return "\n" + + return "\n" + + except ValueError: + return "\n" diff --git a/api/dependencies.py b/api/dependencies.py new file mode 100644 index 0000000000000000000000000000000000000000..fbc37b08aa79a0f22be92163b4a29b01bdc1a3af --- /dev/null +++ b/api/dependencies.py @@ -0,0 +1,144 @@ +"""Dependency injection for FastAPI.""" + +import secrets + +from fastapi import Depends, HTTPException, Request +from loguru import logger +from starlette.applications import Starlette + +from config.settings import Settings +from config.settings import get_settings as _get_settings +from core.anthropic import get_user_facing_error_message +from providers.base import BaseProvider +from providers.exceptions import ( + AuthenticationError, + ServiceUnavailableError, + UnknownProviderTypeError, +) +from providers.registry import PROVIDER_DESCRIPTORS, ProviderRegistry + +# Process-level cache: only for :func:`get_provider_for_type` / :func:`get_provider` +# when there is no ``Request``/``app`` (unit tests, scripts). HTTP handlers must pass +# ``app`` to :func:`resolve_provider` so the app-scoped registry is used. +_providers: dict[str, BaseProvider] = {} + + +def get_settings() -> Settings: + """Return cached :class:`~config.settings.Settings` (FastAPI-friendly alias).""" + return _get_settings() + + +def resolve_provider( + provider_type: str, + *, + app: Starlette | None, + settings: Settings, +) -> BaseProvider: + """Resolve a provider using the app-scoped registry when ``app`` is set. + + When ``app`` is not ``None``, the app-owned :attr:`app.state.provider_registry` + must exist (installed by :class:`~api.runtime.AppRuntime` during startup). + Callers that construct a bare ``FastAPI`` without lifespan must set + ``app.state.provider_registry`` explicitly. + + When ``app`` is ``None`` (no HTTP context), uses the process-level + :data:`_providers` cache only. + """ + if app is not None: + reg = getattr(app.state, "provider_registry", None) + if reg is None: + raise ServiceUnavailableError( + "Provider registry is not configured. Ensure AppRuntime startup ran " + "or assign app.state.provider_registry for test apps." + ) + return _resolve_with_registry(reg, provider_type, settings) + return _resolve_with_registry(ProviderRegistry(_providers), provider_type, settings) + + +def _resolve_with_registry( + registry: ProviderRegistry, provider_type: str, settings: Settings +) -> BaseProvider: + should_log_init = not registry.is_cached(provider_type) + try: + provider = registry.get(provider_type, settings) + except AuthenticationError as e: + # Provider :class:`~providers.exceptions.AuthenticationError` messages are + # curated configuration hints (env var names, docs links), not upstream noise. + detail = str(e).strip() or get_user_facing_error_message(e) + raise HTTPException(status_code=503, detail=detail) from e + except UnknownProviderTypeError: + logger.error( + "Unknown provider_type: '{}'. Supported: {}", + provider_type, + ", ".join(f"'{key}'" for key in PROVIDER_DESCRIPTORS), + ) + raise + if should_log_init: + logger.info("Provider initialized: {}", provider_type) + return provider + + +def get_provider_for_type(provider_type: str) -> BaseProvider: + """Get or create a provider in the process-level cache (no ``app``/Request). + + HTTP route handlers should call :func:`resolve_provider` with the active + :attr:`request.app` (via :class:`~api.runtime.AppRuntime`) instead of this + process-wide cache. + """ + return resolve_provider(provider_type, app=None, settings=get_settings()) + + +def require_api_key( + request: Request, settings: Settings = Depends(get_settings) +) -> None: + """Require a server API key (Anthropic-style). + + Checks `x-api-key` header or `Authorization: Bearer ...` against + `Settings.anthropic_auth_token`. If `ANTHROPIC_AUTH_TOKEN` is empty, this is a no-op. + """ + anthropic_auth_token = settings.anthropic_auth_token + if not anthropic_auth_token: + # No API key configured -> allow + return + + header = ( + request.headers.get("x-api-key") + or request.headers.get("authorization") + or request.headers.get("anthropic-auth-token") + ) + if not header: + raise HTTPException(status_code=401, detail="Missing API key") + + # Support both raw key in X-API-Key and Bearer token in Authorization + token = header + if header.lower().startswith("bearer "): + token = header.split(" ", 1)[1] + + # Strip anything after the first colon to handle tokens with appended model names + if token and ":" in token: + token = token.split(":", 1)[0] + + # Constant-time comparison to avoid leaking the configured token via + # response-time differences on a per-byte mismatch (CWE-208). + if not secrets.compare_digest( + token.encode("utf-8"), anthropic_auth_token.encode("utf-8") + ): + raise HTTPException(status_code=401, detail="Invalid API key") + + +def get_provider() -> BaseProvider: + """Get or create the default provider (``MODEL`` / ``provider_type``). + + Process-cache helper for scripts, unit tests, and non-FastAPI callers. HTTP + handlers must use :func:`resolve_provider` with :attr:`request.app` so the + app-scoped :class:`~providers.registry.ProviderRegistry` is used. + """ + return get_provider_for_type(get_settings().provider_type) + + +async def cleanup_provider(): + """Cleanup all provider resources.""" + global _providers + await ProviderRegistry(_providers).cleanup() + _providers = {} + logger.debug("Provider cleanup completed") diff --git a/api/detection.py b/api/detection.py new file mode 100644 index 0000000000000000000000000000000000000000..7977c1bb199cdb013aa6967f19ca4aad2304c24f --- /dev/null +++ b/api/detection.py @@ -0,0 +1,136 @@ +"""Request detection utilities for API optimizations. + +Detects quota checks, title generation, prefix detection, suggestion mode, +and filepath extraction requests to enable fast-path responses. +""" + +from core.anthropic import extract_text_from_content + +from .models.anthropic import MessagesRequest + + +def is_quota_check_request(request_data: MessagesRequest) -> bool: + """Check if this is a quota probe request. + + Quota checks are typically simple requests with max_tokens=1 + and a single message containing the word "quota". + """ + if ( + request_data.max_tokens == 1 + and len(request_data.messages) == 1 + and request_data.messages[0].role == "user" + ): + text = extract_text_from_content(request_data.messages[0].content) + if "quota" in text.lower(): + return True + return False + + +def is_title_generation_request(request_data: MessagesRequest) -> bool: + """Check if this is a conversation title generation request. + + Title generation requests are detected by a system prompt containing + title extraction instructions, no tools, and a single user message. + + Matches Claude Code session title prompts (sentence-case title, JSON + \"title\" field, etc.). + """ + if not request_data.system or request_data.tools: + return False + system_text = extract_text_from_content(request_data.system).lower() + if "title" not in system_text: + return False + return "sentence-case title" in system_text or ( + "return json" in system_text + and "field" in system_text + and ("coding session" in system_text or "this session" in system_text) + ) + + +def is_prefix_detection_request(request_data: MessagesRequest) -> tuple[bool, str]: + """Check if this is a fast prefix detection request. + + Prefix detection requests contain a policy_spec block and + a Command: section for extracting shell command prefixes. + + Returns: + Tuple of (is_prefix_request, command_string) + """ + if len(request_data.messages) != 1 or request_data.messages[0].role != "user": + return False, "" + + content = extract_text_from_content(request_data.messages[0].content) + + if "" in content and "Command:" in content: + try: + cmd_start = content.rfind("Command:") + len("Command:") + return True, content[cmd_start:].strip() + except TypeError: + return False, "" + + return False, "" + + +def is_suggestion_mode_request(request_data: MessagesRequest) -> bool: + """Check if this is a suggestion mode request. + + Suggestion mode requests contain "[SUGGESTION MODE:" in the user's message, + used for auto-suggesting what the user might type next. + """ + for msg in request_data.messages: + if msg.role == "user": + text = extract_text_from_content(msg.content) + if "[SUGGESTION MODE:" in text: + return True + return False + + +def is_filepath_extraction_request( + request_data: MessagesRequest, +) -> tuple[bool, str, str]: + """Check if this is a filepath extraction request. + + Filepath extraction requests have a single user message with + "Command:" and "Output:" sections, asking to extract file paths + from command output. + + Returns: + Tuple of (is_filepath_request, command, output) + """ + if len(request_data.messages) != 1 or request_data.messages[0].role != "user": + return False, "", "" + if request_data.tools: + return False, "", "" + + content = extract_text_from_content(request_data.messages[0].content) + + if "Command:" not in content or "Output:" not in content: + return False, "", "" + + # Match if user content OR system block indicates filepath extraction + user_has_filepaths = ( + "filepaths" in content.lower() or "" in content.lower() + ) + system_text = ( + extract_text_from_content(request_data.system) if request_data.system else "" + ) + system_has_extract = ( + "extract any file paths" in system_text.lower() + or "file paths that this command" in system_text.lower() + ) + if not user_has_filepaths and not system_has_extract: + return False, "", "" + + cmd_start = content.find("Command:") + len("Command:") + output_marker = content.find("Output:", cmd_start) + if output_marker == -1: + return False, "", "" + + command = content[cmd_start:output_marker].strip() + output = content[output_marker + len("Output:") :].strip() + + for marker in ["<", "\n\n"]: + if marker in output: + output = output.split(marker)[0].strip() + + return True, command, output diff --git a/api/gateway_model_ids.py b/api/gateway_model_ids.py new file mode 100644 index 0000000000000000000000000000000000000000..bb0564a15ad3476f3bca0b17892890764c2fbd93 --- /dev/null +++ b/api/gateway_model_ids.py @@ -0,0 +1,54 @@ +"""Gateway-safe model id encoding for Claude Code model discovery.""" + +from __future__ import annotations + +from dataclasses import dataclass + +GATEWAY_MODEL_ID_PREFIX = "anthropic" + +# Claude Code currently treats any model id containing ``claude-3-`` as not +# supporting thinking. This intentionally uses that client-side capability +# heuristic while keeping the real provider/model ref reversible for routing. +NO_THINKING_GATEWAY_MODEL_ID_PREFIX = "claude-3-freecc-no-thinking" + + +@dataclass(frozen=True, slots=True) +class DecodedGatewayModelId: + provider_id: str + provider_model: str + force_thinking_enabled: bool | None = None + + +def gateway_model_id(provider_model_ref: str) -> str: + """Return the normal Claude Code-discoverable id for a provider/model ref.""" + return f"{GATEWAY_MODEL_ID_PREFIX}/{provider_model_ref}" + + +def no_thinking_gateway_model_id(provider_model_ref: str) -> str: + """Return a Claude Code-discoverable id that disables client thinking.""" + return f"{NO_THINKING_GATEWAY_MODEL_ID_PREFIX}/{provider_model_ref}" + + +def decode_gateway_model_id(model_name: str) -> DecodedGatewayModelId | None: + """Decode a model id advertised by this gateway, if it is one.""" + prefix, separator, remainder = model_name.partition("/") + if not separator: + return None + + force_thinking_enabled: bool | None + if prefix == GATEWAY_MODEL_ID_PREFIX: + force_thinking_enabled = None + elif prefix == NO_THINKING_GATEWAY_MODEL_ID_PREFIX: + force_thinking_enabled = False + else: + return None + + provider_id, provider_separator, provider_model = remainder.partition("/") + if not provider_separator or not provider_model: + return None + + return DecodedGatewayModelId( + provider_id=provider_id, + provider_model=provider_model, + force_thinking_enabled=force_thinking_enabled, + ) diff --git a/api/model_router.py b/api/model_router.py new file mode 100644 index 0000000000000000000000000000000000000000..98d64d3f28a82aa521a935956e22ef49da76fc33 --- /dev/null +++ b/api/model_router.py @@ -0,0 +1,261 @@ +"""Model routing for Claude-compatible requests.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from loguru import logger + +from config.provider_ids import SUPPORTED_PROVIDER_IDS +from config.settings import Settings + +from .gateway_model_ids import decode_gateway_model_id +from .models.anthropic import MessagesRequest, TokenCountRequest +from providers.rate_limit import GlobalRateLimiter + + +@dataclass(frozen=True, slots=True) +class ResolvedModel: + original_model: str + provider_id: str + provider_model: str + provider_model_ref: str + thinking_enabled: bool + + +@dataclass(frozen=True, slots=True) +class RoutedMessagesRequest: + request: MessagesRequest + resolved: ResolvedModel + + +@dataclass(frozen=True, slots=True) +class RoutedTokenCountRequest: + request: TokenCountRequest + resolved: ResolvedModel + + +class ModelRouter: + """Resolve incoming Claude model names to configured provider/model pairs.""" + + def __init__(self, settings: Settings): + self._settings = settings + + def _is_auto(self, model_name: str) -> bool: + """Return whether the model name refers to the virtual 'auto' model.""" + name_lower = model_name.lower() + return name_lower == "auto" or name_lower == "anthropic/auto" + + def _normalize_candidate_ref(self, raw_ref: str) -> str | None: + """Normalize auto candidate refs to ``provider/model`` when possible.""" + candidate = (raw_ref or "").strip() + if not candidate: + return None + + provider_id, separator, remainder = candidate.partition("/") + if separator and provider_id in SUPPORTED_PROVIDER_IDS and remainder: + return f"{provider_id}/{remainder}" + + # Treat bare model ids and vendor/model ids as NVIDIA NIM models. + return f"nvidia_nim/{candidate}" + + def resolve(self, claude_model_name: str) -> ResolvedModel: + # Special virtual model 'auto' maps to the configured default MODEL and + # enables provider-side fallbacks. Resolve it to the configured model + # while preserving the original requested name. + if self._is_auto(claude_model_name): + # If the user configured an explicit AUTO_MODEL_ORDER, try each + # provider/model pair in order and pick the first provider that is + # plausibly configured. Fall back to the single configured MODEL. + order_csv = (self._settings.auto_model_order or "").strip() + if order_csv: + for cand in [c.strip() for c in order_csv.split(",") if c.strip()]: + if "/" not in cand: + # assume vendor-prefixed entries; skip malformed + continue + provider_id = Settings.parse_provider_type(cand) + provider_model = Settings.parse_model_name(cand) + if self._settings.provider_is_configured(provider_id): + thinking_enabled = self._settings.resolve_thinking(claude_model_name) + return ResolvedModel( + original_model=claude_model_name, + provider_id=provider_id, + provider_model=provider_model, + provider_model_ref=cand, + thinking_enabled=thinking_enabled, + ) + # No explicit order matched or none configured — fall back to default MODEL + provider_model_ref = self._settings.model + provider_id = Settings.parse_provider_type(provider_model_ref) + provider_model = Settings.parse_model_name(provider_model_ref) + thinking_enabled = self._settings.resolve_thinking(claude_model_name) + return ResolvedModel( + original_model=claude_model_name, + provider_id=provider_id, + provider_model=provider_model, + provider_model_ref=provider_model_ref, + thinking_enabled=thinking_enabled, + ) + + ( + direct_provider_id, + direct_provider_model, + force_thinking_enabled, + ) = self._direct_provider_model(claude_model_name) + if direct_provider_id is not None and direct_provider_model is not None: + thinking_enabled = ( + force_thinking_enabled + if force_thinking_enabled is not None + else self._settings.resolve_thinking(direct_provider_model) + ) + logger.debug( + "MODEL DIRECT: '{}' -> provider='{}' model='{}' thinking={}", + claude_model_name, + direct_provider_id, + direct_provider_model, + thinking_enabled, + ) + return ResolvedModel( + original_model=claude_model_name, + provider_id=direct_provider_id, + provider_model=direct_provider_model, + provider_model_ref=claude_model_name, + thinking_enabled=thinking_enabled, + ) + + provider_model_ref = self._settings.resolve_model(claude_model_name) + thinking_enabled = self._settings.resolve_thinking(claude_model_name) + provider_id = Settings.parse_provider_type(provider_model_ref) + provider_model = Settings.parse_model_name(provider_model_ref) + if provider_model != claude_model_name: + logger.debug( + "MODEL MAPPING: '{}' -> '{}'", claude_model_name, provider_model + ) + return ResolvedModel( + original_model=claude_model_name, + provider_id=provider_id, + provider_model=provider_model, + provider_model_ref=provider_model_ref, + thinking_enabled=thinking_enabled, + ) + + def resolve_candidates(self, claude_model_name: str) -> list[ResolvedModel]: + """Resolve a model name to a prioritized list of candidates. + + Used by the 'auto' routing logic to implement provider-side failover. + """ + if not self._is_auto(claude_model_name): + return [self.resolve(claude_model_name)] + + healthy_candidates: list[ResolvedModel] = [] + blocked_candidates: list[ResolvedModel] = [] + seen: set[str] = set() + + + def add_candidate(ref: str | None, source: str) -> None: + normalized_ref = self._normalize_candidate_ref(ref or "") + if normalized_ref is None or normalized_ref in seen: + return + provider_id = Settings.parse_provider_type(normalized_ref) + provider_model = Settings.parse_model_name(normalized_ref) + if self._settings.provider_is_configured(provider_id): + seen.add(normalized_ref) + resolved = ResolvedModel( + original_model=claude_model_name, + provider_id=provider_id, + provider_model=provider_model, + provider_model_ref=normalized_ref, + thinking_enabled=self._settings.resolve_thinking(claude_model_name), + ) + + limiter = GlobalRateLimiter.get_scoped_instance(provider_id) + if limiter.is_blocked(): + logger.debug( + "Routing: candidate '{}' (from {}) is BLOCKED", + normalized_ref, + source, + ) + blocked_candidates.append(resolved) + else: + logger.debug( + "Routing: added candidate '{}' (from {})", + normalized_ref, + source, + ) + healthy_candidates.append(resolved) + else: + logger.debug( + "Routing: candidate '{}' (from {}) is NOT CONFIGURED", + normalized_ref, + source, + ) + + # 1. Preferred order (AUTO_MODEL_ORDER) + order_csv = (self._settings.auto_model_order or "").strip() + if order_csv: + for cand in [c.strip() for c in order_csv.split(",") if c.strip()]: + add_candidate(cand, "AUTO_MODEL_PRIORITY") + + # 2. Main MODEL + add_candidate(self._settings.model, "MODEL") + + # 3. NVIDIA Fallbacks + nim_csv = (self._settings.nvidia_nim_fallback_models or "").strip() + if nim_csv: + for cand in [c.strip() for c in nim_csv.split(",") if c.strip()]: + add_candidate(cand, "NVIDIA_NIM_FALLBACK_MODELS") + + # 4. Model-specific overrides + add_candidate(self._settings.model_opus, "MODEL_OPUS") + add_candidate(self._settings.model_sonnet, "MODEL_SONNET") + add_candidate(self._settings.model_haiku, "MODEL_HAIKU") + + all_candidates = healthy_candidates + blocked_candidates + logger.info( + "Routing: resolved '{}' to {} candidates: {}", + claude_model_name, + len(all_candidates), + ", ".join(c.provider_model_ref for c in all_candidates), + ) + return all_candidates + + def _direct_provider_model( + self, model_name: str + ) -> tuple[str | None, str | None, bool | None]: + decoded = decode_gateway_model_id(model_name) + if decoded is not None: + if decoded.provider_id not in SUPPORTED_PROVIDER_IDS: + return None, None, None + return ( + decoded.provider_id, + decoded.provider_model, + decoded.force_thinking_enabled, + ) + + provider_id, separator, provider_model = model_name.partition("/") + if not separator: + return None, None, None + if provider_id not in SUPPORTED_PROVIDER_IDS: + return None, None, None + if not provider_model: + return None, None, None + return provider_id, provider_model, None + + def resolve_messages_request( + self, request: MessagesRequest + ) -> RoutedMessagesRequest: + """Return an internal routed request context.""" + resolved = self.resolve(request.model) + routed = request.model_copy(deep=True) + routed.model = resolved.provider_model + return RoutedMessagesRequest(request=routed, resolved=resolved) + + def resolve_token_count_request( + self, request: TokenCountRequest + ) -> RoutedTokenCountRequest: + """Return an internal token-count request context.""" + resolved = self.resolve(request.model) + routed = request.model_copy( + update={"model": resolved.provider_model}, deep=True + ) + return RoutedTokenCountRequest(request=routed, resolved=resolved) diff --git a/api/models/__init__.py b/api/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f54a732c9e298f571b6a353461ff2dbb96cf80fd --- /dev/null +++ b/api/models/__init__.py @@ -0,0 +1,45 @@ +"""API models exports.""" + +from .anthropic import ( + ContentBlockImage, + ContentBlockRedactedThinking, + ContentBlockText, + ContentBlockThinking, + ContentBlockToolResult, + ContentBlockToolUse, + Message, + MessagesRequest, + Role, + SystemContent, + ThinkingConfig, + TokenCountRequest, + Tool, +) +from .responses import ( + MessagesResponse, + ModelResponse, + ModelsListResponse, + TokenCountResponse, + Usage, +) + +__all__ = [ + "ContentBlockImage", + "ContentBlockRedactedThinking", + "ContentBlockText", + "ContentBlockThinking", + "ContentBlockToolResult", + "ContentBlockToolUse", + "Message", + "MessagesRequest", + "MessagesResponse", + "ModelResponse", + "ModelsListResponse", + "Role", + "SystemContent", + "ThinkingConfig", + "TokenCountRequest", + "TokenCountResponse", + "Tool", + "Usage", +] diff --git a/api/models/__pycache__/__init__.cpython-314.pyc b/api/models/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..23acfbd898788ba04a4257018c51fc0321ec89cb Binary files /dev/null and b/api/models/__pycache__/__init__.cpython-314.pyc differ diff --git a/api/models/__pycache__/anthropic.cpython-314.pyc b/api/models/__pycache__/anthropic.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..854717d9d7f582e62968a49bf99c7047ae00cdea Binary files /dev/null and b/api/models/__pycache__/anthropic.cpython-314.pyc differ diff --git a/api/models/__pycache__/responses.cpython-314.pyc b/api/models/__pycache__/responses.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5dd4f58e3eb83e2bbf31449ced0dfadd9fe41b31 Binary files /dev/null and b/api/models/__pycache__/responses.cpython-314.pyc differ diff --git a/api/models/anthropic.py b/api/models/anthropic.py new file mode 100644 index 0000000000000000000000000000000000000000..5bda06056becf70081b0f3620f970ae6bd941fc8 --- /dev/null +++ b/api/models/anthropic.py @@ -0,0 +1,163 @@ +"""Pydantic models for Anthropic-compatible requests.""" + +from enum import StrEnum +from typing import Any, Literal + +from pydantic import BaseModel, ConfigDict, Field + + +# ============================================================================= +# Content Block Types +# ============================================================================= +class Role(StrEnum): + user = "user" + assistant = "assistant" + system = "system" + + +class _AnthropicBlockBase(BaseModel): + """Pass through provider fields (e.g. ``cache_control``) for native transports.""" + + model_config = ConfigDict(extra="allow") + + +class ContentBlockText(_AnthropicBlockBase): + type: Literal["text"] + text: str + + +class ContentBlockImage(_AnthropicBlockBase): + type: Literal["image"] + source: dict[str, Any] + + +class ContentBlockToolUse(_AnthropicBlockBase): + type: Literal["tool_use"] + id: str + name: str + input: dict[str, Any] + + +class ContentBlockToolResult(_AnthropicBlockBase): + type: Literal["tool_result"] + tool_use_id: str + content: str | list[Any] | dict[str, Any] + + +class ContentBlockThinking(_AnthropicBlockBase): + type: Literal["thinking"] + thinking: str + signature: str | None = None + + +class ContentBlockRedactedThinking(_AnthropicBlockBase): + type: Literal["redacted_thinking"] + data: str + + +class ContentBlockServerToolUse(_AnthropicBlockBase): + """Anthropic server-side tool invocation (e.g. ``web_search``, ``web_fetch``).""" + + type: Literal["server_tool_use"] + id: str + name: str + input: dict[str, Any] + + +class ContentBlockWebSearchToolResult(_AnthropicBlockBase): + type: Literal["web_search_tool_result"] + tool_use_id: str + content: Any + + +class ContentBlockWebFetchToolResult(_AnthropicBlockBase): + type: Literal["web_fetch_tool_result"] + tool_use_id: str + content: Any + + +class SystemContent(_AnthropicBlockBase): + type: Literal["text"] + text: str + + +# ============================================================================= +# Message Types +# ============================================================================= +class Message(BaseModel): + role: Literal["user", "assistant"] + content: ( + str + | list[ + ContentBlockText + | ContentBlockImage + | ContentBlockToolUse + | ContentBlockToolResult + | ContentBlockThinking + | ContentBlockRedactedThinking + | ContentBlockServerToolUse + | ContentBlockWebSearchToolResult + | ContentBlockWebFetchToolResult + ] + ) + reasoning_content: str | None = None + + +class Tool(_AnthropicBlockBase): + name: str + # Anthropic server tools (e.g. web_search beta tools) include a ``type`` and + # may omit ``input_schema`` because the provider owns the schema. + type: str | None = None + description: str | None = None + input_schema: dict[str, Any] | None = None + + +class ThinkingConfig(BaseModel): + enabled: bool | None = True + type: str | None = None + budget_tokens: int | None = None + + +# ============================================================================= +# Request Models +# ============================================================================= +class MessagesRequest(BaseModel): + model_config = ConfigDict(extra="allow") + + model: str + # Internal routing / debug: accepted on parse but not serialized to providers. + original_model: str | None = Field(default=None, exclude=True) + resolved_provider_model: str | None = Field(default=None, exclude=True) + max_tokens: int | None = None + messages: list[Message] + system: str | list[SystemContent] | None = None + stop_sequences: list[str] | None = None + stream: bool | None = True + temperature: float | None = None + top_p: float | None = None + top_k: int | None = None + metadata: dict[str, Any] | None = None + tools: list[Tool] | None = None + tool_choice: dict[str, Any] | None = None + thinking: ThinkingConfig | None = None + # Native Anthropic / SDK client hints: ignored (not forwarded) for OpenAI Chat conversion. + context_management: dict[str, Any] | None = None + output_config: dict[str, Any] | None = None + mcp_servers: list[dict[str, Any]] | None = None + extra_body: dict[str, Any] | None = None + + +class TokenCountRequest(BaseModel): + model_config = ConfigDict(extra="allow") + + model: str + original_model: str | None = Field(default=None, exclude=True) + resolved_provider_model: str | None = Field(default=None, exclude=True) + messages: list[Message] + system: str | list[SystemContent] | None = None + tools: list[Tool] | None = None + thinking: ThinkingConfig | None = None + tool_choice: dict[str, Any] | None = None + context_management: dict[str, Any] | None = None + output_config: dict[str, Any] | None = None + mcp_servers: list[dict[str, Any]] | None = None diff --git a/api/models/responses.py b/api/models/responses.py new file mode 100644 index 0000000000000000000000000000000000000000..8050516ddc3649c12f3fb023b262db0c6fa829c7 --- /dev/null +++ b/api/models/responses.py @@ -0,0 +1,56 @@ +"""Pydantic models for API responses.""" + +from typing import Any, Literal + +from pydantic import BaseModel + +from .anthropic import ( + ContentBlockRedactedThinking, + ContentBlockText, + ContentBlockThinking, + ContentBlockToolUse, +) + + +class TokenCountResponse(BaseModel): + input_tokens: int + + +class ModelResponse(BaseModel): + created_at: str + display_name: str + id: str + type: Literal["model"] = "model" + + +class ModelsListResponse(BaseModel): + data: list[ModelResponse] + first_id: str | None + has_more: bool + last_id: str | None + + +class Usage(BaseModel): + input_tokens: int + output_tokens: int + cache_creation_input_tokens: int = 0 + cache_read_input_tokens: int = 0 + + +class MessagesResponse(BaseModel): + id: str + model: str + role: Literal["assistant"] = "assistant" + content: list[ + ContentBlockText + | ContentBlockToolUse + | ContentBlockThinking + | ContentBlockRedactedThinking + | dict[str, Any] + ] + type: Literal["message"] = "message" + stop_reason: ( + Literal["end_turn", "max_tokens", "stop_sequence", "tool_use"] | None + ) = None + stop_sequence: str | None = None + usage: Usage diff --git a/api/optimization_handlers.py b/api/optimization_handlers.py new file mode 100644 index 0000000000000000000000000000000000000000..862febb29555c79670f6ef4f3a969f0795d4299e --- /dev/null +++ b/api/optimization_handlers.py @@ -0,0 +1,154 @@ +"""Optimization handlers for fast-path API responses. + +Each handler returns a MessagesResponse if the request matches and the +optimization is enabled, otherwise None. +""" + +import uuid + +from loguru import logger + +from config.settings import Settings + +from .command_utils import extract_command_prefix, extract_filepaths_from_command +from .detection import ( + is_filepath_extraction_request, + is_prefix_detection_request, + is_quota_check_request, + is_suggestion_mode_request, + is_title_generation_request, +) +from .models.anthropic import MessagesRequest +from .models.responses import MessagesResponse, Usage + + +def _text_response( + request_data: MessagesRequest, + text: str, + *, + input_tokens: int, + output_tokens: int, +) -> MessagesResponse: + return MessagesResponse( + id=f"msg_{uuid.uuid4()}", + model=request_data.model, + content=[{"type": "text", "text": text}], + stop_reason="end_turn", + usage=Usage(input_tokens=input_tokens, output_tokens=output_tokens), + ) + + +def try_prefix_detection( + request_data: MessagesRequest, settings: Settings +) -> MessagesResponse | None: + """Fast prefix detection - return command prefix without API call.""" + if not settings.fast_prefix_detection: + return None + + is_prefix_req, command = is_prefix_detection_request(request_data) + if not is_prefix_req: + return None + + logger.info("Optimization: Fast prefix detection request") + return _text_response( + request_data, + extract_command_prefix(command), + input_tokens=100, + output_tokens=5, + ) + + +def try_quota_mock( + request_data: MessagesRequest, settings: Settings +) -> MessagesResponse | None: + """Mock quota probe requests.""" + if not settings.enable_network_probe_mock: + return None + if not is_quota_check_request(request_data): + return None + + logger.info("Optimization: Intercepted and mocked quota probe") + return _text_response( + request_data, + "Quota check passed.", + input_tokens=10, + output_tokens=5, + ) + + +def try_title_skip( + request_data: MessagesRequest, settings: Settings +) -> MessagesResponse | None: + """Skip title generation requests.""" + if not settings.enable_title_generation_skip: + return None + if not is_title_generation_request(request_data): + return None + + logger.info("Optimization: Skipped title generation request") + return _text_response( + request_data, + "Conversation", + input_tokens=100, + output_tokens=5, + ) + + +def try_suggestion_skip( + request_data: MessagesRequest, settings: Settings +) -> MessagesResponse | None: + """Skip suggestion mode requests.""" + if not settings.enable_suggestion_mode_skip: + return None + if not is_suggestion_mode_request(request_data): + return None + + logger.info("Optimization: Skipped suggestion mode request") + return _text_response( + request_data, + "", + input_tokens=100, + output_tokens=1, + ) + + +def try_filepath_mock( + request_data: MessagesRequest, settings: Settings +) -> MessagesResponse | None: + """Mock filepath extraction requests.""" + if not settings.enable_filepath_extraction_mock: + return None + + is_fp, cmd, output = is_filepath_extraction_request(request_data) + if not is_fp: + return None + + filepaths = extract_filepaths_from_command(cmd, output) + logger.info("Optimization: Mocked filepath extraction") + return _text_response( + request_data, + filepaths, + input_tokens=100, + output_tokens=10, + ) + + +# Cheapest/most common optimizations first for faster short-circuit. +OPTIMIZATION_HANDLERS = [ + try_quota_mock, + try_prefix_detection, + try_title_skip, + try_suggestion_skip, + try_filepath_mock, +] + + +def try_optimizations( + request_data: MessagesRequest, settings: Settings +) -> MessagesResponse | None: + """Run optimization handlers in order. Returns first match or None.""" + for handler in OPTIMIZATION_HANDLERS: + result = handler(request_data, settings) + if result is not None: + return result + return None diff --git a/api/routes.py b/api/routes.py new file mode 100644 index 0000000000000000000000000000000000000000..cddd65abf5ab84237775712ec58bf2106b523ca8 --- /dev/null +++ b/api/routes.py @@ -0,0 +1,271 @@ +"""FastAPI route handlers.""" + +from fastapi import APIRouter, Depends, HTTPException, Request, Response +from loguru import logger + +from config.settings import Settings +from core.anthropic import get_token_count +from providers.registry import ProviderRegistry + +from . import dependencies +from .dependencies import get_settings, require_api_key +from .gateway_model_ids import gateway_model_id, no_thinking_gateway_model_id +from .models.anthropic import MessagesRequest, TokenCountRequest +from .models.responses import ModelResponse, ModelsListResponse +from .services import ClaudeProxyService +from providers.nvidia_nim import metrics as nvidia_nim_metrics + +router = APIRouter() + +DISCOVERED_MODEL_CREATED_AT = "1970-01-01T00:00:00Z" + + +# The proxy advertises a curated set of provider-backed models. Replace +# the previous hardcoded Claude model list with the requested NVIDIA- +# compatible models so clients only see those options. +REQUESTED_PROVIDER_MODELS = [ + "nvidia_nim/qwen/qwen3-coder-480b-a35b-instruct", + "nvidia_nim/mistralai/mistral-large-3-675b-instruct-2512", + "nvidia_nim/abacusai/dracarys-llama-3.1-70b-instruct", + "nvidia_nim/z-ai/glm4.7", + "nvidia_nim/stepfun-ai/step-3.5-flash", + "nvidia_nim/bytedance/seed-oss-36b-instruct", + "nvidia_nim/mistralai/mistral-nemotron", + "groq/openai/gpt-oss-120b", + "groq/openai/gpt-oss-20b", + "groq/llama-3.3-70b-versatile", + "groq/meta-llama/llama-4-scout-17b-16e-instruct", + "groq/qwen/qwen3-32b", + "cerebras/gpt-oss-120b", + "cerebras/qwen-3-235b-a22b-instruct-2507", + "cerebras/zai-glm-4.7", + "cerebras/llama3.1-8b", +] + + +def get_proxy_service( + request: Request, + settings: Settings = Depends(get_settings), +) -> ClaudeProxyService: + """Build the request service for route handlers.""" + return ClaudeProxyService( + settings, + provider_getter=lambda provider_type: dependencies.resolve_provider( + provider_type, app=request.app, settings=settings + ), + token_counter=get_token_count, + ) + + +def _probe_response(allow: str) -> Response: + """Return an empty success response for compatibility probes.""" + return Response(status_code=204, headers={"Allow": allow}) + + +def _discovered_model_response(model_id: str, *, display_name: str) -> ModelResponse: + return ModelResponse( + id=model_id, + display_name=display_name, + created_at=DISCOVERED_MODEL_CREATED_AT, + ) + + +def _append_unique_model( + models: list[ModelResponse], seen: set[str], model: ModelResponse +) -> None: + if model.id in seen: + return + seen.add(model.id) + models.append(model) + + +def _append_provider_model_variants( + models: list[ModelResponse], + seen: set[str], + provider_model_ref: str, + *, + supports_thinking: bool | None = None, +) -> None: + if supports_thinking is not False: + _append_unique_model( + models, + seen, + _discovered_model_response( + gateway_model_id(provider_model_ref), + display_name=provider_model_ref, + ), + ) + _append_unique_model( + models, + seen, + _discovered_model_response( + no_thinking_gateway_model_id(provider_model_ref), + display_name=f"{provider_model_ref} (no thinking)", + ), + ) + + +def _build_models_list_response( + settings: Settings, provider_registry: ProviderRegistry | None +) -> ModelsListResponse: + models: list[ModelResponse] = [] + seen: set[str] = set() + + # Advertise only the requested provider models (no Claude models, no registry auto-discovery). + # Each ref is added with both thinking and no-thinking variants. + for provider_ref in REQUESTED_PROVIDER_MODELS: + # If the ref already contains a provider prefix, use it as-is; + # otherwise assume it belongs to the NVIDIA NIM provider. + ref = provider_ref if "/" in provider_ref else f"nvidia_nim/{provider_ref}" + supports_thinking = None + if provider_registry is not None: + # model_id for registry lookups should be provider-prefixed + provider, model_id = ref.split("/", 1) if "/" in ref else ("nvidia_nim", ref) + supports_thinking = provider_registry.cached_model_supports_thinking(provider, model_id) + _append_provider_model_variants(models, seen, ref, supports_thinking=supports_thinking) + + # Add a virtual `auto` model that maps to the configured MODEL and enables + # automatic fallback behavior when used by clients. + _append_unique_model( + models, + seen, + ModelResponse( + id=gateway_model_id("auto"), + display_name="auto (use configured fallbacks)", + created_at=DISCOVERED_MODEL_CREATED_AT, + ), + ) + + # Filter out any residual Claude-branded models so the proxy advertises + # only the provider-backed models requested by the user. + filtered = [ + m + for m in models + if "claude" not in (m.id or "").lower() and "claude" not in (m.display_name or "").lower() + ] + # Ensure `auto` model remains available even if filtering removed others. + if not any(m.id == gateway_model_id("auto") for m in filtered): + filtered.append( + ModelResponse( + id=gateway_model_id("auto"), + display_name="auto (use configured fallbacks)", + created_at=DISCOVERED_MODEL_CREATED_AT, + ) + ) + + return ModelsListResponse( + data=filtered, + first_id=filtered[0].id if filtered else None, + has_more=False, + last_id=filtered[-1].id if filtered else None, + ) + + + +# ============================================================================= +# Routes +# ============================================================================= +@router.post("/v1/messages") +async def create_message( + request_data: MessagesRequest, + service: ClaudeProxyService = Depends(get_proxy_service), + _auth=Depends(require_api_key), +): + """Create a message (always streaming).""" + return service.create_message(request_data) + + +@router.api_route("/v1/messages", methods=["HEAD", "OPTIONS"]) +async def probe_messages(_auth=Depends(require_api_key)): + """Respond to Claude compatibility probes for the messages endpoint.""" + return _probe_response("POST, HEAD, OPTIONS") + + +@router.post("/v1/messages/count_tokens") +async def count_tokens( + request_data: TokenCountRequest, + service: ClaudeProxyService = Depends(get_proxy_service), + _auth=Depends(require_api_key), +): + """Count tokens for a request.""" + return service.count_tokens(request_data) + + +@router.api_route("/v1/messages/count_tokens", methods=["HEAD", "OPTIONS"]) +async def probe_count_tokens(_auth=Depends(require_api_key)): + """Respond to Claude compatibility probes for the token count endpoint.""" + return _probe_response("POST, HEAD, OPTIONS") + + +@router.get("/") +async def root( + settings: Settings = Depends(get_settings), _auth=Depends(require_api_key) +): + """Root endpoint.""" + return { + "status": "ok", + "provider": settings.provider_type, + "model": settings.model, + } + + +@router.api_route("/", methods=["HEAD", "OPTIONS"]) +async def probe_root(_auth=Depends(require_api_key)): + """Respond to compatibility probes for the root endpoint.""" + return _probe_response("GET, HEAD, OPTIONS") + + +@router.get("/health") +async def health(): + """Health check endpoint.""" + return {"status": "healthy"} + + +@router.api_route("/health", methods=["HEAD", "OPTIONS"]) +async def probe_health(): + """Respond to compatibility probes for the health endpoint.""" + return _probe_response("GET, HEAD, OPTIONS") + + +@router.get("/v1/models", response_model=ModelsListResponse) +async def list_models( + request: Request, + settings: Settings = Depends(get_settings), + _auth=Depends(require_api_key), +): + """List the model ids this proxy advertises to Claude-compatible clients.""" + registry = getattr(request.app.state, "provider_registry", None) + provider_registry = registry if isinstance(registry, ProviderRegistry) else None + return _build_models_list_response(settings, provider_registry) + + +@router.post("/stop") +async def stop_cli(request: Request, _auth=Depends(require_api_key)): + """Stop all CLI sessions and pending tasks.""" + handler = getattr(request.app.state, "message_handler", None) + if not handler: + # Fallback if messaging not initialized + cli_manager = getattr(request.app.state, "cli_manager", None) + if cli_manager: + await cli_manager.stop_all() + logger.info("STOP_CLI: source=cli_manager cancelled_count=N/A") + return {"status": "stopped", "source": "cli_manager"} + raise HTTPException(status_code=503, detail="Messaging system not initialized") + + count = await handler.stop_all_tasks() + logger.info("STOP_CLI: source=handler cancelled_count={}", count) + return {"status": "stopped", "cancelled_count": count} + + +@router.get("/admin/fallbacks") +async def admin_fallbacks(_auth=Depends(require_api_key)): + """Admin endpoint exposing NVIDIA NIM fallback metrics. + + Protected by the same API key as other endpoints. + """ + try: + data = nvidia_nim_metrics.snapshot() + except Exception as e: + logger.warning("ADMIN_FALLBACKS: failed to read metrics: {}", e) + raise HTTPException(status_code=500, detail="failed to read metrics") + return {"provider": "nvidia_nim", "fallbacks": data} diff --git a/api/runtime.py b/api/runtime.py new file mode 100644 index 0000000000000000000000000000000000000000..f21695eb39efef86a71a3947d50425ecaecb8733 --- /dev/null +++ b/api/runtime.py @@ -0,0 +1,338 @@ +"""Application runtime composition and lifecycle ownership.""" + +from __future__ import annotations + +import asyncio +import os +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any + +from fastapi import FastAPI +from loguru import logger + +from config.settings import Settings, get_settings +from providers.exceptions import ServiceUnavailableError +from providers.registry import ProviderRegistry + +if TYPE_CHECKING: + from cli.manager import CLISessionManager + from messaging.handler import ClaudeMessageHandler + from messaging.platforms.base import MessagingPlatform + from messaging.session import SessionStore + +_SHUTDOWN_TIMEOUT_S = 5.0 + + +async def best_effort( + name: str, + awaitable: Any, + timeout_s: float = _SHUTDOWN_TIMEOUT_S, + *, + log_verbose_errors: bool = False, +) -> None: + """Run a shutdown step with timeout; never raise to callers.""" + try: + await asyncio.wait_for(awaitable, timeout=timeout_s) + except TimeoutError: + logger.warning("Shutdown step timed out: {} ({}s)", name, timeout_s) + except Exception as e: + if log_verbose_errors: + logger.warning( + "Shutdown step failed: {}: {}: {}", + name, + type(e).__name__, + e, + ) + else: + logger.warning( + "Shutdown step failed: {}: exc_type={}", + name, + type(e).__name__, + ) + + +def warn_if_process_auth_token(settings: Settings) -> None: + """Warn when server auth was implicitly inherited from the shell.""" + if settings.uses_process_anthropic_auth_token(): + logger.warning( + "ANTHROPIC_AUTH_TOKEN is set in the process environment but not in " + "a configured .env file. The proxy will require that token. Add " + "ANTHROPIC_AUTH_TOKEN= to .env to disable proxy auth, or set the " + "same token in .env to make server auth explicit." + ) + + +def log_startup_failure(settings: Settings, exc: Exception) -> None: + """Log startup failures without traceback noise unless verbose diagnostics are enabled.""" + message = startup_failure_message(settings, exc) + logger.error("Startup failed:\n{}", message) + + +def startup_failure_message(settings: Settings, exc: Exception) -> str: + """Return a concise startup failure message for logs and ASGI lifespan failure.""" + if isinstance(exc, ServiceUnavailableError): + return exc.message.strip() or "Server startup failed." + + if settings.log_api_error_tracebacks: + return f"{type(exc).__name__}: {exc}" + + return f"Server startup failed: exc_type={type(exc).__name__}" + + +def _should_continue_after_model_validation_failure(exc: Exception) -> bool: + """Return whether a model-validation failure should be downgraded to a warning. + + Provider discovery can fail transiently or due to local environment issues + (for example, a missing runtime dependency in the provider's process path). + We keep startup alive in those cases so the configured proxy can still serve + requests and advertise the models that are already known from settings. + """ + if not isinstance(exc, ServiceUnavailableError): + return False + + message = (exc.message or str(exc)).lower() + return "problem=query failure:" in message + + +@dataclass(slots=True) +class AppRuntime: + """Own optional messaging, CLI, session, and provider runtime resources.""" + + app: FastAPI + settings: Settings + _provider_registry: ProviderRegistry | None = field(default=None, init=False) + messaging_platform: MessagingPlatform | None = None + message_handler: ClaudeMessageHandler | None = None + cli_manager: CLISessionManager | None = None + + @classmethod + def for_app( + cls, + app: FastAPI, + settings: Settings | None = None, + ) -> AppRuntime: + return cls(app=app, settings=settings or get_settings()) + + async def startup(self) -> None: + logger.info("Starting Claude Code Proxy...") + self._provider_registry = ProviderRegistry() + self.app.state.provider_registry = self._provider_registry + try: + warn_if_process_auth_token(self.settings) + try: + # Use a reasonable timeout for startup validation to prevent blocking healthy checks. + await asyncio.wait_for( + self._provider_registry.validate_configured_models(self.settings), + timeout=15.0, + ) + except Exception as exc: + logger.warning( + "Startup model validation skipped or timed out: continuing in lazy mode. " + "Reason: {}", + str(exc) or type(exc).__name__, + ) + self._provider_registry.start_model_list_refresh(self.settings) + await self._start_messaging_if_configured() + self._publish_state() + except Exception as exc: + log_startup_failure(self.settings, exc) + await best_effort( + "provider_registry.cleanup", + self._provider_registry.cleanup(), + log_verbose_errors=self.settings.log_api_error_tracebacks, + ) + raise + + async def shutdown(self) -> None: + verbose = self.settings.log_api_error_tracebacks + if self.message_handler is not None: + try: + self.message_handler.session_store.flush_pending_save() + except Exception as e: + if verbose: + logger.warning("Session store flush on shutdown: {}", e) + else: + logger.warning( + "Session store flush on shutdown: exc_type={}", + type(e).__name__, + ) + + logger.info("Shutdown requested, cleaning up...") + if self.messaging_platform: + await best_effort( + "messaging_platform.stop", + self.messaging_platform.stop(), + log_verbose_errors=verbose, + ) + if self.cli_manager: + await best_effort( + "cli_manager.stop_all", + self.cli_manager.stop_all(), + log_verbose_errors=verbose, + ) + if self._provider_registry is not None: + await best_effort( + "provider_registry.cleanup", + self._provider_registry.cleanup(), + log_verbose_errors=verbose, + ) + await self._shutdown_limiter() + logger.info("Server shut down cleanly") + + async def _start_messaging_if_configured(self) -> None: + try: + from messaging.platforms.factory import ( + MessagingPlatformOptions, + create_messaging_platform, + ) + + self.messaging_platform = create_messaging_platform( + self.settings.messaging_platform, + MessagingPlatformOptions( + telegram_bot_token=self.settings.telegram_bot_token, + allowed_telegram_user_id=self.settings.allowed_telegram_user_id, + discord_bot_token=self.settings.discord_bot_token, + allowed_discord_channels=self.settings.allowed_discord_channels, + voice_note_enabled=self.settings.voice_note_enabled, + whisper_model=self.settings.whisper_model, + whisper_device=self.settings.whisper_device, + hf_token=self.settings.hf_token, + nvidia_nim_api_key=self.settings.nvidia_nim_api_key_qwen, + messaging_rate_limit=self.settings.messaging_rate_limit, + messaging_rate_window=self.settings.messaging_rate_window, + log_raw_messaging_content=self.settings.log_raw_messaging_content, + log_api_error_tracebacks=self.settings.log_api_error_tracebacks, + ), + ) + + if self.messaging_platform: + await self._start_message_handler() + + except ImportError as e: + if self.settings.log_api_error_tracebacks: + logger.warning("Messaging module import error: {}", e) + else: + logger.warning( + "Messaging module import error: exc_type={}", + type(e).__name__, + ) + except Exception as e: + if self.settings.log_api_error_tracebacks: + logger.error("Failed to start messaging platform: {}", e) + import traceback + + logger.error(traceback.format_exc()) + else: + logger.error( + "Failed to start messaging platform: exc_type={}", + type(e).__name__, + ) + + async def _start_message_handler(self) -> None: + from cli.manager import CLISessionManager + from messaging.handler import ClaudeMessageHandler + from messaging.session import SessionStore + + workspace = ( + os.path.abspath(self.settings.allowed_dir) + if self.settings.allowed_dir + else os.getcwd() + ) + os.makedirs(workspace, exist_ok=True) + + data_path = os.path.abspath(self.settings.claude_workspace) + os.makedirs(data_path, exist_ok=True) + + api_url = f"http://{self.settings.host}:{self.settings.port}/v1" + allowed_dirs = [workspace] if self.settings.allowed_dir else [] + plans_dir_abs = os.path.abspath( + os.path.join(self.settings.claude_workspace, "plans") + ) + plans_directory = os.path.relpath(plans_dir_abs, workspace) + self.cli_manager = CLISessionManager( + workspace_path=workspace, + api_url=api_url, + allowed_dirs=allowed_dirs, + plans_directory=plans_directory, + claude_bin=self.settings.claude_cli_bin, + log_raw_cli_diagnostics=self.settings.log_raw_cli_diagnostics, + log_messaging_error_details=self.settings.log_messaging_error_details, + ) + + session_store = SessionStore( + storage_path=os.path.join(data_path, "sessions.json"), + message_log_cap=self.settings.max_message_log_entries_per_chat, + ) + platform = self.messaging_platform + assert platform is not None + self.message_handler = ClaudeMessageHandler( + platform=platform, + cli_manager=self.cli_manager, + session_store=session_store, + debug_platform_edits=self.settings.debug_platform_edits, + debug_subagent_stack=self.settings.debug_subagent_stack, + log_raw_messaging_content=self.settings.log_raw_messaging_content, + log_raw_cli_diagnostics=self.settings.log_raw_cli_diagnostics, + log_messaging_error_details=self.settings.log_messaging_error_details, + ) + self._restore_tree_state(session_store) + + platform.on_message(self.message_handler.handle_message) + await platform.start() + logger.info(f"{platform.name} platform started with message handler") + + def _restore_tree_state(self, session_store: SessionStore) -> None: + saved_trees = session_store.get_all_trees() + if not saved_trees: + return + if self.message_handler is None: + return + + logger.info(f"Restoring {len(saved_trees)} conversation trees...") + from messaging.trees.queue_manager import TreeQueueManager + + self.message_handler.replace_tree_queue( + TreeQueueManager.from_dict( + { + "trees": saved_trees, + "node_to_tree": session_store.get_node_mapping(), + }, + queue_update_callback=self.message_handler.update_queue_positions, + node_started_callback=self.message_handler.mark_node_processing, + ) + ) + if self.message_handler.tree_queue.cleanup_stale_nodes() > 0: + tree_data = self.message_handler.tree_queue.to_dict() + session_store.sync_from_tree_data( + tree_data["trees"], tree_data["node_to_tree"] + ) + + def _publish_state(self) -> None: + self.app.state.messaging_platform = self.messaging_platform + self.app.state.message_handler = self.message_handler + self.app.state.cli_manager = self.cli_manager + + async def _shutdown_limiter(self) -> None: + verbose = self.settings.log_api_error_tracebacks + try: + from messaging.limiter import MessagingRateLimiter + except Exception as e: + if verbose: + logger.debug( + "Rate limiter shutdown skipped (import failed): {}: {}", + type(e).__name__, + e, + ) + else: + logger.debug( + "Rate limiter shutdown skipped (import failed): exc_type={}", + type(e).__name__, + ) + return + + await best_effort( + "MessagingRateLimiter.shutdown_instance", + MessagingRateLimiter.shutdown_instance(), + timeout_s=2.0, + log_verbose_errors=verbose, + ) diff --git a/api/services.py b/api/services.py new file mode 100644 index 0000000000000000000000000000000000000000..57cb1b3f578fc802c3c61a20af0ceec8498d6c9e --- /dev/null +++ b/api/services.py @@ -0,0 +1,305 @@ +"""Application services for the Claude-compatible API.""" + +from __future__ import annotations + +import traceback +import uuid +from collections.abc import AsyncIterator, Callable +from typing import Any + +from fastapi import HTTPException +from fastapi.responses import StreamingResponse +from loguru import logger + +from config.settings import Settings +from core.anthropic import get_token_count, get_user_facing_error_message +from core.anthropic.sse import ANTHROPIC_SSE_RESPONSE_HEADERS, format_sse_event +from providers.base import BaseProvider +from providers.exceptions import ( + InvalidRequestError, + OverloadedError, + ProviderError, + RateLimitError, +) + +from .model_router import ModelRouter +from .models.anthropic import MessagesRequest, TokenCountRequest +from .models.responses import TokenCountResponse +from .optimization_handlers import try_optimizations +from .web_tools.egress import WebFetchEgressPolicy +from .web_tools.request import ( + is_web_server_tool_request, + openai_chat_upstream_server_tool_error, +) + +TokenCounter = Callable[[list[Any], str | list[Any] | None, list[Any] | None], int] + +ProviderGetter = Callable[[str], BaseProvider] + +# Providers that use ``/chat/completions`` + Anthropic-to-OpenAI conversion (not native Messages). +_OPENAI_CHAT_UPSTREAM_IDS = frozenset({"nvidia_nim", "groq", "cerebras"}) + + +def anthropic_sse_streaming_response( + body: AsyncIterator[str], +) -> StreamingResponse: + """Return a :class:`StreamingResponse` for Anthropic-style SSE streams.""" + return StreamingResponse( + body, + media_type="text/event-stream", + headers=ANTHROPIC_SSE_RESPONSE_HEADERS, + ) + + +def _http_status_for_unexpected_service_exception(_exc: BaseException) -> int: + """HTTP status for uncaught non-provider failures (stable client contract).""" + return 500 + + +def _log_unexpected_service_exception( + settings: Settings, + exc: BaseException, + *, + context: str, + request_id: str | None = None, +) -> None: + """Log service-layer failures without echoing exception text unless opted in.""" + if settings.log_api_error_tracebacks: + if request_id is not None: + logger.error("{} request_id={}: {}", context, request_id, exc) + else: + logger.error("{}: {}", context, exc) + logger.error(traceback.format_exc()) + return + if request_id is not None: + logger.error( + "{} request_id={} exc_type={}", + context, + request_id, + type(exc).__name__, + ) + else: + logger.error("{} exc_type={}", context, type(exc).__name__) + + +def _require_non_empty_messages(messages: list[Any]) -> None: + if not messages: + raise InvalidRequestError("messages cannot be empty") + + +class ClaudeProxyService: + """Coordinate request optimization, model routing, token count, and providers.""" + + def __init__( + self, + settings: Settings, + provider_getter: ProviderGetter, + model_router: ModelRouter | None = None, + token_counter: TokenCounter = get_token_count, + ): + self._settings = settings + self._provider_getter = provider_getter + self._model_router = model_router or ModelRouter(settings) + self._token_counter = token_counter + + def create_message(self, request_data: MessagesRequest) -> object: + """Create a message response or streaming response with optional failover.""" + from .web_tools.streaming import stream_web_server_tool_response + try: + _require_non_empty_messages(request_data.messages) + + candidates = self._model_router.resolve_candidates(request_data.model) + if not candidates: + raise InvalidRequestError(f"No configured models available for '{request_data.model}'") + + # For 'auto' requests with multiple candidates, we wrap the stream in a failover loop. + if len(candidates) > 1: + return anthropic_sse_streaming_response( + self._stream_with_fallbacks(candidates, request_data) + ) + + # Standard path for single-model requests + return self._create_single_message(candidates[0], request_data) + + except ProviderError: + raise + except Exception as e: + _log_unexpected_service_exception( + self._settings, e, context="CREATE_MESSAGE_ERROR" + ) + raise HTTPException( + status_code=_http_status_for_unexpected_service_exception(e), + detail=get_user_facing_error_message(e), + ) from e + + def _create_single_message( + self, resolved: ResolvedModel, request_data: MessagesRequest + ) -> object: + """Create a single message response from a resolved model.""" + routed_request = request_data.model_copy(deep=True) + routed_request.model = resolved.provider_model + + if resolved.provider_id in _OPENAI_CHAT_UPSTREAM_IDS: + tool_err = openai_chat_upstream_server_tool_error( + routed_request, + web_tools_enabled=self._settings.enable_web_server_tools, + ) + if tool_err is not None: + raise InvalidRequestError(tool_err) + + if self._settings.enable_web_server_tools and is_web_server_tool_request( + routed_request + ): + input_tokens = self._token_counter( + routed_request.messages, routed_request.system, routed_request.tools + ) + logger.info("Optimization: Handling Anthropic web server tool") + egress = WebFetchEgressPolicy( + allow_private_network_targets=self._settings.web_fetch_allow_private_networks, + allowed_schemes=self._settings.web_fetch_allowed_scheme_set(), + ) + return anthropic_sse_streaming_response( + stream_web_server_tool_response( + routed_request, + input_tokens=input_tokens, + web_fetch_egress=egress, + verbose_client_errors=self._settings.log_api_error_tracebacks, + ), + ) + + optimized = try_optimizations(routed_request, self._settings) + if optimized is not None: + return optimized + + provider = self._provider_getter(resolved.provider_id) + provider.preflight_stream( + routed_request, + thinking_enabled=resolved.thinking_enabled, + ) + + request_id = f"req_{uuid.uuid4().hex[:12]}" + logger.info( + "API_REQUEST: request_id={} model={} messages={}", + request_id, + routed_request.model, + len(routed_request.messages), + ) + + input_tokens = self._token_counter( + routed_request.messages, routed_request.system, routed_request.tools + ) + return anthropic_sse_streaming_response( + provider.stream_response( + routed_request, + input_tokens=input_tokens, + request_id=request_id, + thinking_enabled=resolved.thinking_enabled, + ), + ) + + async def _stream_with_fallbacks( + self, candidates: list[ResolvedModel], request_data: MessagesRequest + ) -> AsyncIterator[str]: + """Iterate through candidates until one succeeds or all fail.""" + last_exc: Exception | None = None + + for i, resolved in enumerate(candidates): + try: + provider = self._provider_getter(resolved.provider_id) + routed_request = request_data.model_copy(deep=True) + routed_request.model = resolved.provider_model + + provider.preflight_stream( + routed_request, + thinking_enabled=resolved.thinking_enabled, + ) + + request_id = f"req_{uuid.uuid4().hex[:12]}" + logger.info( + "API_REQUEST (auto fallback {}/{}): request_id={} provider={} model={}", + i + 1, + len(candidates), + request_id, + resolved.provider_id, + resolved.provider_model, + ) + + input_tokens = self._token_counter( + routed_request.messages, routed_request.system, routed_request.tools + ) + + # Attempt to stream from this provider. + async for event in provider.stream_response( + routed_request, + input_tokens=input_tokens, + request_id=request_id, + thinking_enabled=resolved.thinking_enabled, + ): + yield event + # CRITICAL: If we have yielded even one event, we have committed to this provider. + # We must not fallback to another candidate mid-stream. + return # Success, exit the fallback loop. + + except (RateLimitError, OverloadedError) as e: + logger.warning( + "Provider '{}' is rate limited or overloaded ({}). Trying next candidate...", + resolved.provider_id, + e.status_code, + ) + last_exc = e + continue + except Exception as e: + logger.error( + "Provider '{}' failed with unexpected error: {}. Trying next candidate...", + resolved.provider_id, + e, + ) + last_exc = e + continue + + err_msg = str(last_exc) if last_exc else "No candidates succeeded" + yield format_sse_event( + "error", + { + "type": "error", + "error": { + "type": "api_error", + "message": f"All fallback candidates failed: {err_msg}", + }, + }, + ) + if last_exc: + raise last_exc + raise InvalidRequestError("No candidates succeeded") + + def count_tokens(self, request_data: TokenCountRequest) -> TokenCountResponse: + """Count tokens for a request after applying configured model routing.""" + request_id = f"req_{uuid.uuid4().hex[:12]}" + with logger.contextualize(request_id=request_id): + try: + _require_non_empty_messages(request_data.messages) + routed = self._model_router.resolve_token_count_request(request_data) + tokens = self._token_counter( + routed.request.messages, routed.request.system, routed.request.tools + ) + logger.info( + "COUNT_TOKENS: request_id={} model={} messages={} input_tokens={}", + request_id, + routed.request.model, + len(routed.request.messages), + tokens, + ) + return TokenCountResponse(input_tokens=tokens) + except ProviderError: + raise + except Exception as e: + _log_unexpected_service_exception( + self._settings, + e, + context="COUNT_TOKENS_ERROR", + request_id=request_id, + ) + raise HTTPException( + status_code=_http_status_for_unexpected_service_exception(e), + detail=get_user_facing_error_message(e), + ) from e diff --git a/api/validation_log.py b/api/validation_log.py new file mode 100644 index 0000000000000000000000000000000000000000..9ccdff0f32301e26c9eabf7c203b6a1d61011de1 --- /dev/null +++ b/api/validation_log.py @@ -0,0 +1,48 @@ +"""Safe metadata summaries for HTTP 422 validation logging (no raw text content).""" + +from __future__ import annotations + +from typing import Any + + +def summarize_request_validation_body( + body: Any, +) -> tuple[list[dict[str, Any]], list[str]]: + """Return message shape summary and tool name list for debug logs.""" + messages = body.get("messages") if isinstance(body, dict) else None + message_summary: list[dict[str, Any]] = [] + if isinstance(messages, list): + for msg in messages: + if not isinstance(msg, dict): + message_summary.append({"message_kind": type(msg).__name__}) + continue + content = msg.get("content") + item: dict[str, Any] = { + "role": msg.get("role"), + "content_kind": type(content).__name__, + } + if isinstance(content, list): + item["block_types"] = [ + block.get("type", "dict") + if isinstance(block, dict) + else type(block).__name__ + for block in content[:12] + ] + item["block_keys"] = [ + sorted(str(key) for key in block)[:12] + for block in content[:5] + if isinstance(block, dict) + ] + elif isinstance(content, str): + item["content_length"] = len(content) + message_summary.append(item) + + tool_names: list[str] = [] + if isinstance(body, dict) and isinstance(body.get("tools"), list): + tool_names = [ + str(tool.get("name", "")) + for tool in body["tools"] + if isinstance(tool, dict) + ] + + return message_summary, tool_names diff --git a/api/web_server_tools.py b/api/web_server_tools.py new file mode 100644 index 0000000000000000000000000000000000000000..cedaf95601f37ad9625606011dec866ddd33d584 --- /dev/null +++ b/api/web_server_tools.py @@ -0,0 +1,22 @@ +"""Compatibility re-exports for :mod:`api.web_tools` (web_search / web_fetch).""" + +from __future__ import annotations + +import httpx + +from api.web_tools.egress import ( + WebFetchEgressPolicy, + WebFetchEgressViolation, + enforce_web_fetch_egress, +) +from api.web_tools.request import is_web_server_tool_request +from api.web_tools.streaming import stream_web_server_tool_response + +__all__ = [ + "WebFetchEgressPolicy", + "WebFetchEgressViolation", + "enforce_web_fetch_egress", + "httpx", + "is_web_server_tool_request", + "stream_web_server_tool_response", +] diff --git a/api/web_tools/__init__.py b/api/web_tools/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e0fd14c44de6bc85362f42483c8fb5b532be18e7 --- /dev/null +++ b/api/web_tools/__init__.py @@ -0,0 +1,17 @@ +"""Submodules for Anthropic web server tool handling (search/fetch, egress, streaming).""" + +from .egress import ( + WebFetchEgressPolicy, + WebFetchEgressViolation, + enforce_web_fetch_egress, +) +from .request import is_web_server_tool_request +from .streaming import stream_web_server_tool_response + +__all__ = [ + "WebFetchEgressPolicy", + "WebFetchEgressViolation", + "enforce_web_fetch_egress", + "is_web_server_tool_request", + "stream_web_server_tool_response", +] diff --git a/api/web_tools/__pycache__/__init__.cpython-314.pyc b/api/web_tools/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..83f038107f3df9e631ea821279707e9157246b06 Binary files /dev/null and b/api/web_tools/__pycache__/__init__.cpython-314.pyc differ diff --git a/api/web_tools/__pycache__/constants.cpython-314.pyc b/api/web_tools/__pycache__/constants.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1310586bc33adde607053bfaa0305a25a84c94ab Binary files /dev/null and b/api/web_tools/__pycache__/constants.cpython-314.pyc differ diff --git a/api/web_tools/__pycache__/egress.cpython-314.pyc b/api/web_tools/__pycache__/egress.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..859ac45f3241641b17101be71bdccfc45622d012 Binary files /dev/null and b/api/web_tools/__pycache__/egress.cpython-314.pyc differ diff --git a/api/web_tools/__pycache__/parsers.cpython-314.pyc b/api/web_tools/__pycache__/parsers.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6c880c56181aff718e15f7d24629af82adc2ca99 Binary files /dev/null and b/api/web_tools/__pycache__/parsers.cpython-314.pyc differ diff --git a/api/web_tools/__pycache__/request.cpython-314.pyc b/api/web_tools/__pycache__/request.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..db610523a382a655a90a4eb2fd9d529537d60531 Binary files /dev/null and b/api/web_tools/__pycache__/request.cpython-314.pyc differ diff --git a/api/web_tools/__pycache__/streaming.cpython-314.pyc b/api/web_tools/__pycache__/streaming.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..292c1281d6fb13bd4f61c95d858db1b9a6a962ed Binary files /dev/null and b/api/web_tools/__pycache__/streaming.cpython-314.pyc differ diff --git a/api/web_tools/constants.py b/api/web_tools/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..e7b2c0170650f485ee1c83ec3a388dacd6e92a4c --- /dev/null +++ b/api/web_tools/constants.py @@ -0,0 +1,15 @@ +"""Limits and defaults for outbound web server tool HTTP.""" + +_REQUEST_TIMEOUT_S = 20.0 +_MAX_SEARCH_RESULTS = 10 +_MAX_FETCH_CHARS = 24_000 +# Hard cap on raw bytes read from HTTP responses before decode / HTML parse (memory bound). +_MAX_WEB_FETCH_RESPONSE_BYTES = 2 * 1024 * 1024 +# Drain at most this many bytes from redirect responses before following Location. +_REDIRECT_RESPONSE_BODY_CAP_BYTES = 65_536 +_MAX_WEB_FETCH_REDIRECTS = 10 +_WEB_FETCH_REDIRECT_STATUSES = frozenset({301, 302, 303, 307, 308}) + +_WEB_TOOL_HTTP_HEADERS = { + "User-Agent": "Mozilla/5.0 compatible; free-claude-code/2.0", +} diff --git a/api/web_tools/egress.py b/api/web_tools/egress.py new file mode 100644 index 0000000000000000000000000000000000000000..30b29d5b3c8cec2eb4ff14eb211f0d5c27993dba --- /dev/null +++ b/api/web_tools/egress.py @@ -0,0 +1,99 @@ +"""Egress policy for user-controlled web_fetch URLs (SSRF guard).""" + +from __future__ import annotations + +import ipaddress +import socket +from dataclasses import dataclass +from urllib.parse import urlparse + + +@dataclass(frozen=True, slots=True) +class WebFetchEgressPolicy: + """Egress rules for user-influenced web_fetch URLs.""" + + allow_private_network_targets: bool + allowed_schemes: frozenset[str] + + +class WebFetchEgressViolation(ValueError): + """Raised when a web_fetch URL is rejected by egress policy (SSRF guard).""" + + +def _port_for_url(parsed) -> int: + if parsed.port is not None: + return parsed.port + return 443 if (parsed.scheme or "").lower() == "https" else 80 + + +def _stream_getaddrinfo_or_raise(host: str, port: int) -> list[tuple]: + try: + return socket.getaddrinfo( + host, port, type=socket.SOCK_STREAM, proto=socket.IPPROTO_TCP + ) + except OSError as exc: + raise WebFetchEgressViolation( + f"Could not resolve host {host!r}: {exc}" + ) from exc + + +def get_validated_stream_addrinfos_for_egress( + url: str, policy: WebFetchEgressPolicy +) -> list[tuple]: + """Resolve and validate a URL for web_fetch, returning getaddrinfo rows for pinning. + + Each HTTP connect pins to only these `getaddrinfo` results so a malicious DNS + server cannot rebind to a disallowed address between resolution and the TCP + connect (used by :func:`api.web_tools.outbound._run_web_fetch`). + """ + parsed = urlparse(url) + scheme = (parsed.scheme or "").lower() + if scheme not in policy.allowed_schemes: + raise WebFetchEgressViolation( + f"URL scheme {scheme!r} is not allowed for web_fetch" + ) + + host = parsed.hostname + if host is None or host == "": + raise WebFetchEgressViolation("web_fetch URL must include a host") + + port = _port_for_url(parsed) + + if policy.allow_private_network_targets: + return _stream_getaddrinfo_or_raise(host, port) + + host_lower = host.lower() + if host_lower == "localhost" or host_lower.endswith(".localhost"): + raise WebFetchEgressViolation("localhost targets are not allowed for web_fetch") + if host_lower.endswith(".local"): + raise WebFetchEgressViolation(".local hostnames are not allowed for web_fetch") + + try: + parsed_ip = ipaddress.ip_address(host) + except ValueError: + parsed_ip = None + + if parsed_ip is not None: + if not parsed_ip.is_global: + raise WebFetchEgressViolation( + f"Non-public IP host {host!r} is not allowed for web_fetch" + ) + return _stream_getaddrinfo_or_raise(host, port) + + infos = _stream_getaddrinfo_or_raise(host, port) + for *_, sockaddr in infos: + addr = sockaddr[0] + try: + resolved = ipaddress.ip_address(addr) + except ValueError: + continue + if not resolved.is_global: + raise WebFetchEgressViolation( + f"Host {host!r} resolves to a non-public address ({resolved})" + ) + return infos + + +def enforce_web_fetch_egress(url: str, policy: WebFetchEgressPolicy) -> None: + """Validate ``url`` (scheme, host, and resolved addresses) for web_fetch.""" + get_validated_stream_addrinfos_for_egress(url, policy) diff --git a/api/web_tools/outbound.py b/api/web_tools/outbound.py new file mode 100644 index 0000000000000000000000000000000000000000..4f93f9bf014311b44e0c254d7b178c5bf486fe79 --- /dev/null +++ b/api/web_tools/outbound.py @@ -0,0 +1,278 @@ +"""Outbound HTTP for web_search / web_fetch (client, body caps, logging).""" + +from __future__ import annotations + +import asyncio +import socket +from collections.abc import AsyncIterator +from urllib.parse import urljoin, urlparse + +import aiohttp +import httpx +from aiohttp import ClientSession, ClientTimeout, TCPConnector +from aiohttp.abc import AbstractResolver, ResolveResult +from loguru import logger + +from . import constants +from .constants import ( + _MAX_FETCH_CHARS, + _MAX_SEARCH_RESULTS, + _REDIRECT_RESPONSE_BODY_CAP_BYTES, + _REQUEST_TIMEOUT_S, + _WEB_FETCH_REDIRECT_STATUSES, + _WEB_TOOL_HTTP_HEADERS, +) +from .egress import ( + WebFetchEgressPolicy, + WebFetchEgressViolation, + get_validated_stream_addrinfos_for_egress, +) +from .parsers import HTMLTextParser, SearchResultParser + + +def _safe_public_host_for_logs(url: str) -> str: + host = urlparse(url).hostname or "" + return host[:253] + + +def _log_web_tool_failure( + tool_name: str, + error: BaseException, + *, + fetch_url: str | None = None, +) -> None: + exc_type = type(error).__name__ + if isinstance(error, WebFetchEgressViolation): + host = _safe_public_host_for_logs(fetch_url) if fetch_url else "" + logger.warning( + "web_tool_egress_rejected tool={} exc_type={} host={!r}", + tool_name, + exc_type, + host, + ) + return + if tool_name == "web_fetch" and fetch_url: + logger.warning( + "web_tool_failure tool={} exc_type={} host={!r}", + tool_name, + exc_type, + _safe_public_host_for_logs(fetch_url), + ) + else: + logger.warning("web_tool_failure tool={} exc_type={}", tool_name, exc_type) + + +def _web_tool_client_error_summary( + tool_name: str, + error: BaseException, + *, + verbose: bool, +) -> str: + if verbose: + return f"{tool_name} failed: {type(error).__name__}" + return "Web tool request failed." + + +async def _iter_response_body_under_cap( + response: httpx.Response, max_bytes: int +) -> AsyncIterator[bytes]: + if max_bytes <= 0: + return + received = 0 + async for chunk in response.aiter_bytes(chunk_size=65_536): + if received >= max_bytes: + break + remaining = max_bytes - received + if len(chunk) <= remaining: + received += len(chunk) + yield chunk + if received >= max_bytes: + break + else: + yield chunk[:remaining] + break + + +async def _drain_response_body_capped(response: httpx.Response, max_bytes: int) -> None: + async for _ in _iter_response_body_under_cap(response, max_bytes): + pass + + +async def _read_response_body_capped(response: httpx.Response, max_bytes: int) -> bytes: + return b"".join( + [piece async for piece in _iter_response_body_under_cap(response, max_bytes)] + ) + + +_NUMERIC_RESOLVE_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV +_NAME_RESOLVE_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV + + +def getaddrinfo_rows_to_resolve_results( + host: str, addrinfos: list[tuple] +) -> list[ResolveResult]: + """Map :func:`socket.getaddrinfo` rows to aiohttp :class:`ResolveResult` (ThreadedResolver logic).""" + out: list[ResolveResult] = [] + for family, _type, proto, _canon, sockaddr in addrinfos: + if family == socket.AF_INET6: + if len(sockaddr) < 3: + continue + if sockaddr[3]: + resolved_host, port = socket.getnameinfo(sockaddr, _NAME_RESOLVE_FLAGS) + else: + resolved_host, port = sockaddr[:2] + else: + assert family == socket.AF_INET, family + resolved_host, port = sockaddr[0], sockaddr[1] + resolved_host = str(resolved_host) + port = int(port) + out.append( + ResolveResult( + hostname=host, + host=resolved_host, + port=int(port), + family=family, + proto=proto, + flags=_NUMERIC_RESOLVE_FLAGS, + ) + ) + return out + + +class _PinnedEgressStaticResolver(AbstractResolver): + """Return only pre-validated :class:`ResolveResult` for the outbound request.""" + + def __init__(self, results: list[ResolveResult]) -> None: + self._results = results + + async def resolve( + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> list[ResolveResult]: + return self._results + + async def close(self) -> None: # pragma: no cover - aiohttp contract + return + + +async def _read_aiohttp_body_capped( + response: aiohttp.ClientResponse, max_bytes: int +) -> bytes: + received = 0 + parts: list[bytes] = [] + async for chunk in response.content.iter_chunked(65_536): + if received >= max_bytes: + break + remaining = max_bytes - received + if len(chunk) <= remaining: + received += len(chunk) + parts.append(chunk) + else: + parts.append(chunk[:remaining]) + break + return b"".join(parts) + + +async def _drain_aiohttp_body_capped( + response: aiohttp.ClientResponse, max_bytes: int +) -> None: + if max_bytes <= 0: + return + received = 0 + async for chunk in response.content.iter_chunked(65_536): + received += len(chunk) + if received >= max_bytes: + break + + +async def _run_web_search(query: str) -> list[dict[str, str]]: + async with ( + httpx.AsyncClient( + timeout=_REQUEST_TIMEOUT_S, + follow_redirects=True, + headers=_WEB_TOOL_HTTP_HEADERS, + ) as client, + client.stream( + "GET", + "https://lite.duckduckgo.com/lite/", + params={"q": query}, + ) as response, + ): + response.raise_for_status() + body_bytes = await _read_response_body_capped( + response, constants._MAX_WEB_FETCH_RESPONSE_BYTES + ) + text = body_bytes.decode("utf-8", errors="replace") + parser = SearchResultParser() + parser.feed(text) + return parser.results[:_MAX_SEARCH_RESULTS] + + +async def _run_web_fetch(url: str, egress: WebFetchEgressPolicy) -> dict[str, str]: + """Fetch URL with manual redirects; each hop is DNS-pinned to validated addresses.""" + current_url = url + redirect_hops = 0 + timeout = ClientTimeout(total=_REQUEST_TIMEOUT_S) + + while True: + addr_infos = await asyncio.to_thread( + get_validated_stream_addrinfos_for_egress, current_url, egress + ) + host = urlparse(current_url).hostname or "" + results = getaddrinfo_rows_to_resolve_results(host, addr_infos) + resolver = _PinnedEgressStaticResolver(results) + connector = TCPConnector( + resolver=resolver, + force_close=True, + ) + try: + async with ( + ClientSession( + timeout=timeout, + headers=_WEB_TOOL_HTTP_HEADERS, + connector=connector, + ) as session, + session.get(current_url, allow_redirects=False) as response, + ): + if response.status in _WEB_FETCH_REDIRECT_STATUSES: + await _drain_aiohttp_body_capped( + response, _REDIRECT_RESPONSE_BODY_CAP_BYTES + ) + if redirect_hops >= constants._MAX_WEB_FETCH_REDIRECTS: + raise WebFetchEgressViolation( + "web_fetch exceeded maximum redirects " + f"({constants._MAX_WEB_FETCH_REDIRECTS})" + ) + location = response.headers.get("location") + if not location or not location.strip(): + raise WebFetchEgressViolation( + "web_fetch redirect response missing Location header" + ) + current_url = urljoin(str(response.url), location.strip()) + redirect_hops += 1 + continue + response.raise_for_status() + content_type = response.headers.get("content-type", "text/plain") + final_url = str(response.url) + encoding = response.get_encoding() or "utf-8" + body_bytes = await _read_aiohttp_body_capped( + response, constants._MAX_WEB_FETCH_RESPONSE_BYTES + ) + finally: + await connector.close() + + break + + text = body_bytes.decode(encoding, errors="replace") + title = final_url + data = text + if "html" in content_type.lower(): + parser = HTMLTextParser() + parser.feed(text) + title = parser.title or final_url + data = "\n".join(parser.text_parts) + return { + "url": final_url, + "title": title, + "media_type": "text/plain", + "data": data[:_MAX_FETCH_CHARS], + } diff --git a/api/web_tools/parsers.py b/api/web_tools/parsers.py new file mode 100644 index 0000000000000000000000000000000000000000..198b41238b3fed324bbfaa6ce871f4da27b97af0 --- /dev/null +++ b/api/web_tools/parsers.py @@ -0,0 +1,104 @@ +"""HTML parsing for web_search / web_fetch.""" + +from __future__ import annotations + +import html +import re +from html.parser import HTMLParser +from typing import Any +from urllib.parse import parse_qs, unquote, urlparse + + +class SearchResultParser(HTMLParser): + """DuckDuckGo lite HTML: extract result links and titles.""" + + def __init__(self) -> None: + super().__init__() + self.results: list[dict[str, str]] = [] + self._href: str | None = None + self._title_parts: list[str] = [] + + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: + if tag != "a": + return + href = dict(attrs).get("href") + if not href or "uddg=" not in href: + return + parsed = urlparse(href) + query = parse_qs(parsed.query) + uddg = query.get("uddg", [""])[0] + if not uddg: + return + self._href = unquote(uddg) + self._title_parts = [] + + def handle_data(self, data: str) -> None: + if self._href is not None: + self._title_parts.append(data) + + def handle_endtag(self, tag: str) -> None: + if tag != "a" or self._href is None: + return + title = " ".join("".join(self._title_parts).split()) + if title and not any(result["url"] == self._href for result in self.results): + self.results.append({"title": html.unescape(title), "url": self._href}) + self._href = None + self._title_parts = [] + + +class HTMLTextParser(HTMLParser): + """Strip scripts/styles and collect visible text + title for fetch previews.""" + + def __init__(self) -> None: + super().__init__() + self.title = "" + self.text_parts: list[str] = [] + self._in_title = False + self._skip_depth = 0 + + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: + if tag in {"script", "style", "noscript"}: + self._skip_depth += 1 + elif tag == "title": + self._in_title = True + + def handle_endtag(self, tag: str) -> None: + if tag in {"script", "style", "noscript"} and self._skip_depth: + self._skip_depth -= 1 + elif tag == "title": + self._in_title = False + + def handle_data(self, data: str) -> None: + text = " ".join(data.split()) + if not text: + return + if self._in_title: + self.title = f"{self.title} {text}".strip() + elif not self._skip_depth: + self.text_parts.append(text) + + +def content_text(content: Any) -> str: + if isinstance(content, str): + return content + if isinstance(content, list): + parts = [] + for item in content: + if isinstance(item, dict): + parts.append(str(item.get("text", ""))) + else: + parts.append(str(getattr(item, "text", ""))) + return "\n".join(part for part in parts if part) + return str(content) + + +def extract_query(text: str) -> str: + match = re.search(r"query:\s*(.+)", text, flags=re.IGNORECASE | re.DOTALL) + if match: + return match.group(1).strip().strip("\"'") + return text.strip() + + +def extract_url(text: str) -> str: + match = re.search(r"https?://\S+", text) + return match.group(0).rstrip(").,]") if match else text.strip() diff --git a/api/web_tools/request.py b/api/web_tools/request.py new file mode 100644 index 0000000000000000000000000000000000000000..062c0f790faaae39175b847fa55f43e925e04e95 --- /dev/null +++ b/api/web_tools/request.py @@ -0,0 +1,86 @@ +"""Detect forced Anthropic web server tool requests.""" + +from __future__ import annotations + +from api.models.anthropic import MessagesRequest, Tool + + +def request_text(request: MessagesRequest) -> str: + """Join all user/assistant message content into one string for tool input parsing.""" + from .parsers import content_text + + return "\n".join(content_text(message.content) for message in request.messages) + + +def forced_tool_turn_text(request: MessagesRequest) -> str: + """Text for parsing forced server-tool inputs: latest user turn only (avoids stale history).""" + if not request.messages: + return "" + + from .parsers import content_text + + for message in reversed(request.messages): + if message.role == "user": + return content_text(message.content) + return "" + + +def forced_server_tool_name(request: MessagesRequest) -> str | None: + """Return web_search or web_fetch only when tool_choice forces that server tool.""" + tc = request.tool_choice + if not isinstance(tc, dict): + return None + if tc.get("type") != "tool": + return None + name = tc.get("name") + if name in {"web_search", "web_fetch"}: + return str(name) + return None + + +def has_tool_named(request: MessagesRequest, name: str) -> bool: + return any(tool.name == name for tool in request.tools or []) + + +def is_web_server_tool_request(request: MessagesRequest) -> bool: + """True when the client forces a web server tool via tool_choice (not merely listed).""" + forced = forced_server_tool_name(request) + if forced is None: + return False + return has_tool_named(request, forced) + + +def is_anthropic_server_tool_definition(tool: Tool) -> bool: + """Whether ``tool`` refers to an Anthropic server tool (web_search / web_fetch family).""" + name = (tool.name or "").strip() + if name in ("web_search", "web_fetch"): + return True + typ = tool.type + if isinstance(typ, str): + return typ.startswith("web_search") or typ.startswith("web_fetch") + return False + + +def has_listed_anthropic_server_tools(request: MessagesRequest) -> bool: + """True when tools include web_search / web_fetch-style entries (listed, forced or not).""" + return any(is_anthropic_server_tool_definition(t) for t in (request.tools or [])) + + +def openai_chat_upstream_server_tool_error( + request: MessagesRequest, *, web_tools_enabled: bool +) -> str | None: + """Return a user-facing error when OpenAI Chat upstream cannot satisfy server-tool semantics.""" + forced = forced_server_tool_name(request) + if forced and not web_tools_enabled: + return ( + f"tool_choice forces Anthropic server tool {forced!r}, but local web server tools are " + "disabled (ENABLE_WEB_SERVER_TOOLS=false). Enable them to use this tool." + ) + if not forced and has_listed_anthropic_server_tools(request): + return ( + "OpenAI Chat upstreams (NVIDIA NIM) cannot use listed Anthropic server tools " + "(web_search / web_fetch) without the local web server tool handler. " + "Set ENABLE_WEB_SERVER_TOOLS=true and force the tool with " + "tool_choice, or remove these tools from the request." + ) + return None diff --git a/api/web_tools/streaming.py b/api/web_tools/streaming.py new file mode 100644 index 0000000000000000000000000000000000000000..7befab7811a75c686e911be2777099fb3a14373e --- /dev/null +++ b/api/web_tools/streaming.py @@ -0,0 +1,206 @@ +"""SSE streaming for local web_search / web_fetch server tool results.""" + +from __future__ import annotations + +import uuid +from collections.abc import AsyncIterator +from datetime import UTC, datetime +from typing import Any + +from api.models.anthropic import MessagesRequest +from core.anthropic.server_tool_sse import ( + SERVER_TOOL_USE, + WEB_FETCH_TOOL_ERROR, + WEB_FETCH_TOOL_RESULT, + WEB_SEARCH_TOOL_RESULT, + WEB_SEARCH_TOOL_RESULT_ERROR, +) +from core.anthropic.sse import format_sse_event + +from .constants import _MAX_FETCH_CHARS +from .egress import WebFetchEgressPolicy +from .parsers import extract_query, extract_url +from .request import ( + forced_server_tool_name, + forced_tool_turn_text, + has_tool_named, +) + + +def _search_summary(query: str, results: list[dict[str, str]]) -> str: + if not results: + return f"No web search results found for: {query}" + lines = [f"Search results for: {query}"] + for index, result in enumerate(results, start=1): + lines.append(f"{index}. {result['title']}\n{result['url']}") + return "\n\n".join(lines) + + +async def stream_web_server_tool_response( + request: MessagesRequest, + input_tokens: int, + *, + web_fetch_egress: WebFetchEgressPolicy, + verbose_client_errors: bool = False, +) -> AsyncIterator[str]: + """Stream a minimal Anthropic-shaped turn for forced `web_search` / `web_fetch` (local fallback). + + When `ENABLE_WEB_SERVER_TOOLS` is on, this is a proxy-side execution path — not a full + hosted Anthropic citation or encrypted-content pipeline. + """ + from . import outbound + tool_name = forced_server_tool_name(request) + if tool_name is None or not has_tool_named(request, tool_name): + return + + text = forced_tool_turn_text(request) + message_id = f"msg_{uuid.uuid4()}" + tool_id = f"srvtoolu_{uuid.uuid4().hex}" + usage_key = ( + "web_search_requests" if tool_name == "web_search" else "web_fetch_requests" + ) + tool_input = ( + {"query": extract_query(text)} + if tool_name == "web_search" + else {"url": extract_url(text)} + ) + _result_block_for_tool = { + "web_search": WEB_SEARCH_TOOL_RESULT, + "web_fetch": WEB_FETCH_TOOL_RESULT, + } + _error_payload_type_for_tool = { + "web_search": WEB_SEARCH_TOOL_RESULT_ERROR, + "web_fetch": WEB_FETCH_TOOL_ERROR, + } + + yield format_sse_event( + "message_start", + { + "type": "message_start", + "message": { + "id": message_id, + "type": "message", + "role": "assistant", + "content": [], + "model": request.model, + "stop_reason": None, + "stop_sequence": None, + "usage": {"input_tokens": input_tokens, "output_tokens": 1}, + }, + }, + ) + yield format_sse_event( + "content_block_start", + { + "type": "content_block_start", + "index": 0, + "content_block": { + "type": SERVER_TOOL_USE, + "id": tool_id, + "name": tool_name, + "input": tool_input, + }, + }, + ) + yield format_sse_event( + "content_block_stop", {"type": "content_block_stop", "index": 0} + ) + + try: + if tool_name == "web_search": + query = str(tool_input["query"]) + results = await outbound._run_web_search(query) + result_content: Any = [ + { + "type": "web_search_result", + "title": result["title"], + "url": result["url"], + } + for result in results + ] + summary = _search_summary(query, results) + result_block_type = WEB_SEARCH_TOOL_RESULT + else: + fetched = await outbound._run_web_fetch( + str(tool_input["url"]), web_fetch_egress + ) + result_content = { + "type": "web_fetch_result", + "url": fetched["url"], + "content": { + "type": "document", + "source": { + "type": "text", + "media_type": fetched["media_type"], + "data": fetched["data"], + }, + "title": fetched["title"], + "citations": {"enabled": True}, + }, + "retrieved_at": datetime.now(UTC).isoformat(), + } + summary = fetched["data"][:_MAX_FETCH_CHARS] + result_block_type = WEB_FETCH_TOOL_RESULT + except Exception as error: + fetch_url = str(tool_input["url"]) if tool_name == "web_fetch" else None + outbound._log_web_tool_failure(tool_name, error, fetch_url=fetch_url) + result_block_type = _result_block_for_tool[tool_name] + result_content = { + "type": _error_payload_type_for_tool[tool_name], + "error_code": "unavailable", + } + summary = outbound._web_tool_client_error_summary( + tool_name, error, verbose=verbose_client_errors + ) + + output_tokens = max(1, len(summary) // 4) + + yield format_sse_event( + "content_block_start", + { + "type": "content_block_start", + "index": 1, + "content_block": { + "type": result_block_type, + "tool_use_id": tool_id, + "content": result_content, + }, + }, + ) + yield format_sse_event( + "content_block_stop", {"type": "content_block_stop", "index": 1} + ) + # Model-facing summary: stream as normal text deltas (CLI/transcript code reads `text_delta`, + # not eager `text` on `content_block_start`). + yield format_sse_event( + "content_block_start", + { + "type": "content_block_start", + "index": 2, + "content_block": {"type": "text", "text": ""}, + }, + ) + yield format_sse_event( + "content_block_delta", + { + "type": "content_block_delta", + "index": 2, + "delta": {"type": "text_delta", "text": summary}, + }, + ) + yield format_sse_event( + "content_block_stop", {"type": "content_block_stop", "index": 2} + ) + yield format_sse_event( + "message_delta", + { + "type": "message_delta", + "delta": {"stop_reason": "end_turn", "stop_sequence": None}, + "usage": { + "input_tokens": input_tokens, + "output_tokens": output_tokens, + "server_tool_use": {usage_key: 1}, + }, + }, + ) + yield format_sse_event("message_stop", {"type": "message_stop"}) diff --git a/cli/__init__.py b/cli/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2df9787dfba001bcca824a3d51c362931bde994c --- /dev/null +++ b/cli/__init__.py @@ -0,0 +1,6 @@ +"""CLI integration for Claude Code.""" + +from .manager import CLISessionManager +from .session import CLISession + +__all__ = ["CLISession", "CLISessionManager"] diff --git a/cli/entrypoints.py b/cli/entrypoints.py new file mode 100644 index 0000000000000000000000000000000000000000..8eb18fa7dfbabf838732ba529506d681f6933a40 --- /dev/null +++ b/cli/entrypoints.py @@ -0,0 +1,60 @@ +"""CLI entry points for the installed package.""" + +from __future__ import annotations + +from pathlib import Path + + +def _load_env_template() -> str: + """Load the canonical root env template from package resources or source.""" + import importlib.resources + + packaged = importlib.resources.files("cli").joinpath("env.example") + if packaged.is_file(): + return packaged.read_text("utf-8") + + source_template = Path(__file__).resolve().parents[1] / ".env.example" + if source_template.is_file(): + return source_template.read_text(encoding="utf-8") + + raise FileNotFoundError("Could not find bundled or source .env.example template.") + + +def serve() -> None: + """Start the FastAPI server (registered as `free-claude-code` script).""" + import uvicorn + + from cli.process_registry import kill_all_best_effort + from config.settings import get_settings + + settings = get_settings() + try: + uvicorn.run( + "api.app:create_asgi_app", + factory=True, + host=settings.host, + port=settings.port, + log_level="debug", + timeout_graceful_shutdown=5, + ) + finally: + kill_all_best_effort() + + +def init() -> None: + """Scaffold config at ~/.config/free-claude-code/.env (registered as `fcc-init`).""" + config_dir = Path.home() / ".config" / "free-claude-code" + env_file = config_dir / ".env" + + if env_file.exists(): + print(f"Config already exists at {env_file}") + print("Delete it first if you want to reset to defaults.") + return + + config_dir.mkdir(parents=True, exist_ok=True) + template = _load_env_template() + env_file.write_text(template, encoding="utf-8") + print(f"Config created at {env_file}") + print( + "Edit it to set your API keys and model preferences, then run: free-claude-code" + ) diff --git a/cli/manager.py b/cli/manager.py new file mode 100644 index 0000000000000000000000000000000000000000..cb27419f38457e7d7b6b6711afb4599c99da530e --- /dev/null +++ b/cli/manager.py @@ -0,0 +1,163 @@ +""" +CLI Session Manager for Multi-Instance Claude CLI Support + +Manages a pool of CLISession instances, each handling one conversation. +This enables true parallel processing where multiple conversations run +simultaneously in separate CLI processes. +""" + +import asyncio +import uuid + +from loguru import logger + +from .session import CLISession + + +class CLISessionManager: + """ + Manages multiple CLISession instances for parallel conversation processing. + + Each new conversation gets its own CLISession with its own subprocess. + Replies to existing conversations reuse the same CLISession instance. + """ + + def __init__( + self, + workspace_path: str, + api_url: str, + allowed_dirs: list[str] | None = None, + plans_directory: str | None = None, + claude_bin: str = "claude", + *, + log_raw_cli_diagnostics: bool = False, + log_messaging_error_details: bool = False, + ): + """ + Initialize the session manager. + + Args: + workspace_path: Working directory for CLI processes + api_url: API URL for the proxy + allowed_dirs: Directories the CLI is allowed to access + plans_directory: Directory for Claude Code CLI plan files (passed via --settings) + """ + self.workspace = workspace_path + self.api_url = api_url + self.allowed_dirs = allowed_dirs or [] + self.plans_directory = plans_directory + self.claude_bin = claude_bin + self._log_raw_cli_diagnostics = log_raw_cli_diagnostics + self._log_messaging_error_details = log_messaging_error_details + + self._sessions: dict[str, CLISession] = {} + self._pending_sessions: dict[str, CLISession] = {} + self._temp_to_real: dict[str, str] = {} + self._real_to_temp: dict[str, str] = {} + self._lock = asyncio.Lock() + + logger.info("CLISessionManager initialized") + + async def get_or_create_session( + self, session_id: str | None = None + ) -> tuple[CLISession, str, bool]: + """ + Get an existing session or create a new one. + + Returns: + Tuple of (CLISession instance, session_id, is_new_session) + """ + async with self._lock: + if session_id: + lookup_id = self._temp_to_real.get(session_id, session_id) + + if lookup_id in self._sessions: + return self._sessions[lookup_id], lookup_id, False + if lookup_id in self._pending_sessions: + return self._pending_sessions[lookup_id], lookup_id, False + + temp_id = session_id if session_id else f"pending_{uuid.uuid4().hex[:8]}" + + new_session = CLISession( + workspace_path=self.workspace, + api_url=self.api_url, + allowed_dirs=self.allowed_dirs, + plans_directory=self.plans_directory, + claude_bin=self.claude_bin, + log_raw_cli_diagnostics=self._log_raw_cli_diagnostics, + ) + self._pending_sessions[temp_id] = new_session + logger.info(f"Created new session: {temp_id}") + + return new_session, temp_id, True + + async def register_real_session_id( + self, temp_id: str, real_session_id: str + ) -> bool: + """Register the real session ID from CLI output.""" + async with self._lock: + if temp_id not in self._pending_sessions: + logger.warning(f"Temp session {temp_id} not found") + return False + + session = self._pending_sessions.pop(temp_id) + self._sessions[real_session_id] = session + self._temp_to_real[temp_id] = real_session_id + self._real_to_temp[real_session_id] = temp_id + + logger.info(f"Registered session: {temp_id} -> {real_session_id}") + return True + + async def remove_session(self, session_id: str) -> bool: + """Remove a session from the manager.""" + async with self._lock: + if session_id in self._pending_sessions: + session = self._pending_sessions.pop(session_id) + await session.stop() + return True + + if session_id in self._sessions: + session = self._sessions.pop(session_id) + await session.stop() + temp_id = self._real_to_temp.pop(session_id, None) + if temp_id is not None: + self._temp_to_real.pop(temp_id, None) + return True + + return False + + async def stop_all(self): + """Stop all sessions.""" + async with self._lock: + all_sessions = list(self._sessions.values()) + list( + self._pending_sessions.values() + ) + for session in all_sessions: + try: + await session.stop() + except Exception as e: + if self._log_messaging_error_details: + logger.error( + "Error stopping session: {}: {}", + type(e).__name__, + e, + ) + else: + logger.error( + "Error stopping session: exc_type={}", + type(e).__name__, + ) + + self._sessions.clear() + self._pending_sessions.clear() + self._temp_to_real.clear() + self._real_to_temp.clear() + logger.info("All sessions stopped") + + def get_stats(self) -> dict: + """Get session statistics.""" + return { + "active_sessions": len(self._sessions), + "pending_sessions": len(self._pending_sessions), + "busy_count": sum(1 for s in self._sessions.values() if s.is_busy), + } diff --git a/cli/process_registry.py b/cli/process_registry.py new file mode 100644 index 0000000000000000000000000000000000000000..1dadda0f323144ecc83e4643125af922cc63091a --- /dev/null +++ b/cli/process_registry.py @@ -0,0 +1,74 @@ +"""Track and clean up spawned CLI subprocesses. + +This is a safety net for cases where the server is interrupted (Ctrl+C) and the +FastAPI lifespan cleanup doesn't run to completion. We only track processes we +spawn so we don't accidentally kill unrelated system processes. +""" + +from __future__ import annotations + +import atexit +import os +import subprocess +import threading + +from loguru import logger + +_lock = threading.Lock() +_pids: set[int] = set() +_atexit_registered = False + + +def ensure_atexit_registered() -> None: + global _atexit_registered + with _lock: + if _atexit_registered: + return + atexit.register(kill_all_best_effort) + _atexit_registered = True + + +def register_pid(pid: int) -> None: + if not pid: + return + ensure_atexit_registered() + with _lock: + _pids.add(int(pid)) + + +def unregister_pid(pid: int) -> None: + if not pid: + return + with _lock: + _pids.discard(int(pid)) + + +def kill_all_best_effort() -> None: + """Kill any still-running registered pids (best-effort).""" + with _lock: + pids = list(_pids) + _pids.clear() + + if not pids: + return + + if os.name == "nt": + for pid in pids: + try: + # /T kills child processes, /F forces termination. + subprocess.run( + ["taskkill", "/PID", str(pid), "/T", "/F"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=False, + ) + except Exception as e: + logger.debug("process_registry: taskkill failed pid=%s: %s", pid, e) + return + + # Best-effort fallback for non-Windows. + for pid in pids: + try: + os.kill(pid, 9) + except Exception as e: + logger.debug("process_registry: kill failed pid=%s: %s", pid, e) diff --git a/cli/session.py b/cli/session.py new file mode 100644 index 0000000000000000000000000000000000000000..d0079756e6550c4fa34f3b260b4ab84dc65d8aa6 --- /dev/null +++ b/cli/session.py @@ -0,0 +1,334 @@ +"""Claude Code CLI session management.""" + +import asyncio +import json +import os +from collections.abc import AsyncGenerator +from dataclasses import dataclass, field +from typing import Any + +from loguru import logger + +from .process_registry import register_pid, unregister_pid + +# Cap stderr capture so a runaway child cannot exhaust memory; pipe is still drained. +_MAX_STDERR_CAPTURE_BYTES = 256 * 1024 + + +@dataclass(frozen=True, slots=True) +class ClaudeCliConfig: + """Configuration for a managed Claude CLI subprocess.""" + + workspace_path: str + api_url: str + allowed_dirs: list[str] = field(default_factory=list) + plans_directory: str | None = None + claude_bin: str = "claude" + + +class CLISession: + """Manages a single persistent Claude Code CLI subprocess.""" + + def __init__( + self, + workspace_path: str, + api_url: str, + allowed_dirs: list[str] | None = None, + plans_directory: str | None = None, + claude_bin: str = "claude", + *, + log_raw_cli_diagnostics: bool = False, + ): + self.config = ClaudeCliConfig( + workspace_path=os.path.normpath(os.path.abspath(workspace_path)), + api_url=api_url, + allowed_dirs=[os.path.normpath(d) for d in (allowed_dirs or [])], + plans_directory=plans_directory, + claude_bin=claude_bin, + ) + self.workspace = self.config.workspace_path + self.api_url = self.config.api_url + self.allowed_dirs = self.config.allowed_dirs + self.plans_directory = self.config.plans_directory + self.claude_bin = self.config.claude_bin + self._log_raw_cli_diagnostics = log_raw_cli_diagnostics + self.process: asyncio.subprocess.Process | None = None + self.current_session_id: str | None = None + self._is_busy = False + self._cli_lock = asyncio.Lock() + + @staticmethod + async def _drain_stderr_bounded( + process: asyncio.subprocess.Process, + *, + max_bytes: int = _MAX_STDERR_CAPTURE_BYTES, + ) -> bytes: + """Read stderr concurrently with stdout to avoid subprocess pipe deadlocks. + + Retains at most ``max_bytes`` for logging; any excess is discarded, but + the pipe is read until EOF so a noisy child cannot fill the buffer and + block forever. + """ + if not process.stderr: + return b"" + parts: list[bytes] = [] + received = 0 + while True: + chunk = await process.stderr.read(65_536) + if not chunk: + break + if received < max_bytes: + take = min(len(chunk), max_bytes - received) + if take: + parts.append(chunk[:take]) + received += take + # If already at cap, keep reading and discarding until EOF. + return b"".join(parts) + + @property + def is_busy(self) -> bool: + """Check if a task is currently running.""" + return self._is_busy + + async def start_task( + self, prompt: str, session_id: str | None = None, fork_session: bool = False + ) -> AsyncGenerator[dict]: + """ + Start a new task or continue an existing session. + + Args: + prompt: The user's message/prompt + session_id: Optional session ID to resume + + Yields: + Event dictionaries from the CLI + """ + async with self._cli_lock: + self._is_busy = True + env = os.environ.copy() + + if "ANTHROPIC_API_KEY" not in env: + env["ANTHROPIC_API_KEY"] = "sk-placeholder-key-for-proxy" + + env["ANTHROPIC_API_URL"] = self.api_url + if self.api_url.endswith("/v1"): + env["ANTHROPIC_BASE_URL"] = self.api_url[:-3] + else: + env["ANTHROPIC_BASE_URL"] = self.api_url + + env["TERM"] = "dumb" + env["PYTHONIOENCODING"] = "utf-8" + + # Build command + if session_id and not session_id.startswith("pending_"): + cmd = [ + self.claude_bin, + "--resume", + session_id, + ] + if fork_session: + cmd.append("--fork-session") + cmd += [ + "-p", + prompt, + "--output-format", + "stream-json", + "--dangerously-skip-permissions", + "--verbose", + ] + logger.info(f"Resuming Claude session {session_id}") + else: + cmd = [ + self.claude_bin, + "-p", + prompt, + "--output-format", + "stream-json", + "--dangerously-skip-permissions", + "--verbose", + ] + logger.info("Starting new Claude session") + + if self.allowed_dirs: + for d in self.allowed_dirs: + cmd.extend(["--add-dir", d]) + + if self.plans_directory is not None: + settings_json = json.dumps({"plansDirectory": self.plans_directory}) + cmd.extend(["--settings", settings_json]) + + try: + self.process = await asyncio.create_subprocess_exec( + *cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + cwd=self.workspace, + env=env, + ) + if self.process and self.process.pid: + register_pid(self.process.pid) + + if not self.process or not self.process.stdout: + yield {"type": "exit", "code": 1} + return + + session_id_extracted = False + buffer = bytearray() + stderr_task: asyncio.Task[bytes] | None = None + if self.process.stderr: + stderr_task = asyncio.create_task( + self._drain_stderr_bounded(self.process) + ) + + try: + while True: + chunk = await self.process.stdout.read(65536) + if not chunk: + if buffer: + line_str = buffer.decode( + "utf-8", errors="replace" + ).strip() + if line_str: + async for event in self._handle_line_gen( + line_str, session_id_extracted + ): + if event.get("type") == "session_info": + session_id_extracted = True + yield event + break + + buffer.extend(chunk) + + while True: + newline_pos = buffer.find(b"\n") + if newline_pos == -1: + break + + line = buffer[:newline_pos] + buffer = buffer[newline_pos + 1 :] + + line_str = line.decode("utf-8", errors="replace").strip() + if line_str: + async for event in self._handle_line_gen( + line_str, session_id_extracted + ): + if event.get("type") == "session_info": + session_id_extracted = True + yield event + except asyncio.CancelledError: + # Cancelling the handler task should not leave a Claude CLI + # subprocess running in the background. + await asyncio.shield(self.stop()) + raise + finally: + stderr_bytes = b"" + if stderr_task is not None: + stderr_bytes = await stderr_task + + stderr_text = None + if stderr_bytes: + stderr_text = stderr_bytes.decode("utf-8", errors="replace").strip() + if stderr_text: + if self._log_raw_cli_diagnostics: + logger.error("Claude CLI stderr: {}", stderr_text) + else: + logger.error( + "Claude CLI stderr: bytes={} text_chars={}", + len(stderr_bytes), + len(stderr_text), + ) + logger.info("CLI_SESSION: Yielding error event from stderr") + yield {"type": "error", "error": {"message": stderr_text}} + + return_code = await self.process.wait() + logger.info( + f"Claude CLI exited with code {return_code}, stderr_present={bool(stderr_text)}" + ) + if return_code != 0 and not stderr_text: + logger.warning( + f"CLI_SESSION: Process exited with code {return_code} but no stderr captured" + ) + yield { + "type": "exit", + "code": return_code, + "stderr": stderr_text, + } + finally: + self._is_busy = False + if self.process and self.process.pid: + unregister_pid(self.process.pid) + + async def _handle_line_gen( + self, line_str: str, session_id_extracted: bool + ) -> AsyncGenerator[dict]: + """Process a single line and yield events.""" + try: + event = json.loads(line_str) + if not session_id_extracted: + extracted_id = self._extract_session_id(event) + if extracted_id: + self.current_session_id = extracted_id + logger.info(f"Extracted session ID: {extracted_id}") + yield {"type": "session_info", "session_id": extracted_id} + + yield event + except json.JSONDecodeError: + if self._log_raw_cli_diagnostics: + logger.debug("Non-JSON output: {}", line_str) + else: + logger.debug("Non-JSON CLI line: char_len={}", len(line_str)) + yield {"type": "raw", "content": line_str} + + def _extract_session_id(self, event: Any) -> str | None: + """Extract session ID from CLI event.""" + if not isinstance(event, dict): + return None + + if "session_id" in event: + return event["session_id"] + if "sessionId" in event: + return event["sessionId"] + + for key in ["init", "system", "result", "metadata"]: + if key in event and isinstance(event[key], dict): + nested = event[key] + if "session_id" in nested: + return nested["session_id"] + if "sessionId" in nested: + return nested["sessionId"] + + if "conversation" in event and isinstance(event["conversation"], dict): + conv = event["conversation"] + if "id" in conv: + return conv["id"] + + return None + + async def stop(self): + """Stop the CLI process.""" + if self.process and self.process.returncode is None: + try: + logger.info(f"Stopping Claude CLI process {self.process.pid}") + self.process.terminate() + try: + await asyncio.wait_for(self.process.wait(), timeout=5.0) + except TimeoutError: + self.process.kill() + await self.process.wait() + if self.process and self.process.pid: + unregister_pid(self.process.pid) + return True + except Exception as e: + if self._log_raw_cli_diagnostics: + logger.error( + "Error stopping process: {}: {}", + type(e).__name__, + e, + ) + else: + logger.error( + "Error stopping process: exc_type={}", + type(e).__name__, + ) + return False + return False diff --git a/config/__init__.py b/config/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..87b0151f3a49b1bb8d4ac1a0c2c88b22e330000e --- /dev/null +++ b/config/__init__.py @@ -0,0 +1,5 @@ +"""Configuration management.""" + +from .settings import Settings, get_settings + +__all__ = ["Settings", "get_settings"] diff --git a/config/__pycache__/__init__.cpython-312.pyc b/config/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0732d497bdc6eb394345eb4d8985fcad57092de0 Binary files /dev/null and b/config/__pycache__/__init__.cpython-312.pyc differ diff --git a/config/__pycache__/__init__.cpython-314.pyc b/config/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2165bcbb71c0dc3c3d168a7c57c4df2f54800598 Binary files /dev/null and b/config/__pycache__/__init__.cpython-314.pyc differ diff --git a/config/__pycache__/constants.cpython-312.pyc b/config/__pycache__/constants.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..432b0ddcd71d7c69e8fcd4638bb3bd651dfcc942 Binary files /dev/null and b/config/__pycache__/constants.cpython-312.pyc differ diff --git a/config/__pycache__/constants.cpython-314.pyc b/config/__pycache__/constants.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..14f1baebde863da1b4a22b0133ab0b0ddf0e5cd7 Binary files /dev/null and b/config/__pycache__/constants.cpython-314.pyc differ diff --git a/config/__pycache__/logging_config.cpython-314.pyc b/config/__pycache__/logging_config.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79557f51d57b23be37875a37be7db3d9ebc39289 Binary files /dev/null and b/config/__pycache__/logging_config.cpython-314.pyc differ diff --git a/config/__pycache__/nim.cpython-312.pyc b/config/__pycache__/nim.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ed8d20de8f76f379e306e75a188831a5394f531 Binary files /dev/null and b/config/__pycache__/nim.cpython-312.pyc differ diff --git a/config/__pycache__/nim.cpython-314.pyc b/config/__pycache__/nim.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8677041ebf211c4f07bacaade49448229b5cdb10 Binary files /dev/null and b/config/__pycache__/nim.cpython-314.pyc differ diff --git a/config/__pycache__/provider_catalog.cpython-312.pyc b/config/__pycache__/provider_catalog.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b1a70268b117cf0d5903d6a313ecea4fdcb0fad Binary files /dev/null and b/config/__pycache__/provider_catalog.cpython-312.pyc differ diff --git a/config/__pycache__/provider_catalog.cpython-314.pyc b/config/__pycache__/provider_catalog.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c48e7e512f8eaf09cc41e4c6016525307b1a0bf7 Binary files /dev/null and b/config/__pycache__/provider_catalog.cpython-314.pyc differ diff --git a/config/__pycache__/provider_ids.cpython-312.pyc b/config/__pycache__/provider_ids.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..282018c86e88220456bdc456d2beb40a9c3026d4 Binary files /dev/null and b/config/__pycache__/provider_ids.cpython-312.pyc differ diff --git a/config/__pycache__/provider_ids.cpython-314.pyc b/config/__pycache__/provider_ids.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..44d5a5ab94d505fdea327971008dfdac84c95eea Binary files /dev/null and b/config/__pycache__/provider_ids.cpython-314.pyc differ diff --git a/config/__pycache__/settings.cpython-312.pyc b/config/__pycache__/settings.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a97026058190c69bde2e7b49ba00cafed450094e Binary files /dev/null and b/config/__pycache__/settings.cpython-312.pyc differ diff --git a/config/__pycache__/settings.cpython-314.pyc b/config/__pycache__/settings.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bd1610df6545cc2e0ad2a02889b475cbd60ae37b Binary files /dev/null and b/config/__pycache__/settings.cpython-314.pyc differ diff --git a/config/constants.py b/config/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..987027ae80d3c48284df75d7c0aac97c41f2b5f5 --- /dev/null +++ b/config/constants.py @@ -0,0 +1,10 @@ +"""Shared defaults used by config models and provider adapters.""" + +# HTTP client connect timeout (seconds). Keep aligned with README.md and .env.example. +HTTP_CONNECT_TIMEOUT_DEFAULT = 10.0 + +# Anthropic Messages API default when the client omits max_tokens. +ANTHROPIC_DEFAULT_MAX_OUTPUT_TOKENS = 81920 + +# Max bytes read from a non-200 native messages response when verbose error logging is on. +NATIVE_MESSAGES_ERROR_BODY_LOG_CAP_BYTES = 4096 diff --git a/config/logging_config.py b/config/logging_config.py new file mode 100644 index 0000000000000000000000000000000000000000..6d6e94c15b60e0db9a302ca748512e61915150b4 --- /dev/null +++ b/config/logging_config.py @@ -0,0 +1,129 @@ +"""Loguru-based structured logging configuration. + +All logs are written to server.log as JSON lines for full traceability. +Stdlib logging is intercepted and funneled to loguru. +Context vars (request_id, node_id, chat_id) from contextualize() are +included at top level for easy grep/filter. +""" + +import json +import logging +import re +from pathlib import Path + +from loguru import logger + +_configured = False + +# Context keys we promote to top-level JSON for traceability +_CONTEXT_KEYS = ("request_id", "node_id", "chat_id") + +_TELEGRAM_BOT_RE = re.compile( + r"(https?://api\.telegram\.org/)bot([0-9]+:[A-Za-z0-9_-]+)(/?)", + re.IGNORECASE, +) +# Authorization: Bearer (HTTP client / proxy debug lines) +_AUTH_BEARER_RE = re.compile( + r"(\bAuthorization\s*:\s*Bearer\s+)([^\s'\"]+)", + re.IGNORECASE, +) + + +def _redact_sensitive_substrings(message: str) -> str: + """Remove obvious API tokens and secrets before JSON log line emission.""" + text = _TELEGRAM_BOT_RE.sub(r"\1bot\3", message) + return _AUTH_BEARER_RE.sub(r"\1", text) + + +def _serialize_with_context(record) -> str: + """Format record as JSON with context vars at top level. + Returns a format template; we inject _json into record for output. + """ + extra = record.get("extra", {}) + out = { + "time": str(record["time"]), + "level": record["level"].name, + "message": _redact_sensitive_substrings(str(record["message"])), + "module": record["name"], + "function": record["function"], + "line": record["line"], + } + for key in _CONTEXT_KEYS: + if key in extra and extra[key] is not None: + out[key] = extra[key] + record["_json"] = json.dumps(out, default=str) + return "{_json}\n" + + +class InterceptHandler(logging.Handler): + """Redirect stdlib logging to loguru.""" + + def emit(self, record: logging.LogRecord) -> None: + try: + level = logger.level(record.levelname).name + except ValueError: + level = record.levelno + + frame, depth = logging.currentframe(), 2 + while frame is not None and frame.f_code.co_filename == logging.__file__: + frame = frame.f_back + depth += 1 + + logger.opt(depth=depth, exception=record.exc_info).log( + level, record.getMessage() + ) + + +def configure_logging( + log_file: str, *, force: bool = False, verbose_third_party: bool = False +) -> None: + """Configure loguru with JSON output to log_file and intercept stdlib logging. + + Idempotent: skips if already configured (e.g. hot reload). + Use force=True to reconfigure (e.g. in tests with a different log path). + + When ``verbose_third_party`` is false, noisy HTTP and Telegram loggers are capped + at WARNING unless explicitly configured otherwise. + """ + global _configured + if _configured and not force: + return + _configured = True + + # Remove default loguru handler (writes to stderr) + logger.remove() + + # Truncate log file on fresh start for clean debugging if possible + try: + Path(log_file).write_text("") + except PermissionError: + # File might be open by another process (e.g. redirection) + pass + + # Add file sink: JSON lines, DEBUG level, context vars at top level + logger.add( + log_file, + level="DEBUG", + format=_serialize_with_context, + encoding="utf-8", + mode="a", + rotation="50 MB", + ) + + # Intercept stdlib logging: route all root logger output to loguru + intercept = InterceptHandler() + logging.root.handlers = [intercept] + logging.root.setLevel(logging.DEBUG) + + third_party = ( + "httpx", + "httpcore", + "httpcore.http11", + "httpcore.connection", + "telegram", + "telegram.ext", + ) + for name in third_party: + logging.getLogger(name).setLevel( + logging.WARNING if not verbose_third_party else logging.NOTSET + ) diff --git a/config/nim.py b/config/nim.py new file mode 100644 index 0000000000000000000000000000000000000000..5bf7761c10dc87d44d17fd9ca427fa35627eae03 --- /dev/null +++ b/config/nim.py @@ -0,0 +1,118 @@ +"""NVIDIA NIM settings (fixed values, no env config).""" + +from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator + +from config.constants import ANTHROPIC_DEFAULT_MAX_OUTPUT_TOKENS + + +class NimSettings(BaseModel): + """Fixed NVIDIA NIM settings (not configurable via env).""" + + temperature: float = Field( + 1.0, ge=0.0, le=2.0, description="Sampling temperature, must be >=0 and <=2." + ) + top_p: float = Field( + 1.0, ge=0.0, le=1.0, description="Nucleus sampling probability. [0,1]" + ) + top_k: int = -1 + max_tokens: int = Field( + ANTHROPIC_DEFAULT_MAX_OUTPUT_TOKENS, + ge=1, + description="Maximum number of tokens in output.", + ) + presence_penalty: float = Field(0.0, ge=-2.0, le=2.0) + frequency_penalty: float = Field(0.0, ge=-2.0, le=2.0) + min_p: float = Field( + 0.0, ge=0.0, le=1.0, description="Minimum probability threshold [0,1]." + ) + repetition_penalty: float = Field( + 1.0, ge=0.0, description="Penalty for repeated tokens. Must be >=0." + ) + seed: int | None = None + stop: str | None = None + parallel_tool_calls: bool = True + ignore_eos: bool = False + min_tokens: int = Field(0, ge=0, description="Minimum tokens in the response.") + chat_template: str | None = None + request_id: str | None = None + + model_config = ConfigDict(extra="forbid") + + @field_validator("top_k", mode="before") + @classmethod + def validate_top_k(cls, v, info: ValidationInfo): + if v is None or v == "": + return -1 + int_v = int(v) + if int_v < -1: + raise ValueError(f"{info.field_name} must be -1 or >= 0") + return int_v + + @field_validator( + "temperature", + "top_p", + "min_p", + "presence_penalty", + "frequency_penalty", + "repetition_penalty", + mode="before", + ) + @classmethod + def validate_float_fields(cls, v, info: ValidationInfo): + field_defaults = { + "temperature": 1.0, + "top_p": 1.0, + "min_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "repetition_penalty": 1.0, + } + if v is None or v == "": + key = info.field_name or "temperature" + return field_defaults.get(key, 1.0) + try: + val = float(v) + except (TypeError, ValueError) as err: + raise ValueError( + f"{info.field_name} must be a float. Got {type(v).__name__}." + ) from err + return val + + @field_validator("max_tokens", "min_tokens", mode="before") + @classmethod + def validate_int_fields(cls, v, info: ValidationInfo): + field_defaults = { + "max_tokens": ANTHROPIC_DEFAULT_MAX_OUTPUT_TOKENS, + "min_tokens": 0, + } + if v is None or v == "": + key = info.field_name or "max_tokens" + return field_defaults.get(key, ANTHROPIC_DEFAULT_MAX_OUTPUT_TOKENS) + try: + val = int(v) + except (TypeError, ValueError) as err: + raise ValueError( + f"{info.field_name} must be an int. Got {type(v).__name__}." + ) from err + return val + + @field_validator("seed", mode="before") + @classmethod + def parse_optional_int(cls, v, info: ValidationInfo): + if v == "" or v is None: + return None + try: + return int(v) + except (TypeError, ValueError) as err: + raise ValueError( + f"{info.field_name} must be an int or empty/None." + ) from err + + @field_validator("stop", "chat_template", "request_id", mode="before") + @classmethod + def parse_optional_str(cls, v, info: ValidationInfo): + if v == "": + return None + if v is not None and not isinstance(v, str): + return str(v) + return v diff --git a/config/provider_catalog.py b/config/provider_catalog.py new file mode 100644 index 0000000000000000000000000000000000000000..699df0390d054415354ae772db80a3bdd2718e5f --- /dev/null +++ b/config/provider_catalog.py @@ -0,0 +1,71 @@ +"""Neutral provider catalog: IDs, credentials, defaults, proxy and capability metadata. + +Adapter factories live in :mod:`providers.registry`; this module stays free of +provider implementation imports (see contract tests). +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Literal + +TransportType = Literal["openai_chat", "anthropic_messages"] + +# Default upstream base URLs (also re-exported via :mod:`providers.defaults`) +NVIDIA_NIM_DEFAULT_BASE = "https://integrate.api.nvidia.com/v1" +ZEN_DEFAULT_BASE = "https://opencode.ai/zen" + + +@dataclass(frozen=True, slots=True) +class ProviderDescriptor: + """Metadata for building :class:`~providers.base.ProviderConfig` and factory wiring.""" + + provider_id: str + transport_type: TransportType + capabilities: tuple[str, ...] + credential_env: str | None = None + credential_url: str | None = None + credential_attr: str | None = None + static_credential: str | None = None + default_base_url: str | None = None + base_url_attr: str | None = None + proxy_attr: str | None = None + + +PROVIDER_CATALOG: dict[str, ProviderDescriptor] = { + "nvidia_nim": ProviderDescriptor( + provider_id="nvidia_nim", + transport_type="openai_chat", + credential_env="NVIDIA_NIM_API_KEY_QWEN", + credential_url="https://build.nvidia.com/settings/api-keys", + credential_attr="nvidia_nim_api_key_qwen", + default_base_url=NVIDIA_NIM_DEFAULT_BASE, + proxy_attr="nvidia_nim_proxy", + capabilities=("chat", "streaming", "tools", "thinking", "rate_limit"), + ), + "groq": ProviderDescriptor( + provider_id="groq", + transport_type="openai_chat", + credential_env="GROQ_API_KEY", + credential_url="https://console.groq.com/keys", + credential_attr="groq_api_key", + default_base_url="https://api.groq.com/openai/v1", + capabilities=("chat", "streaming", "tools", "rate_limit"), + ), + "zen": ProviderDescriptor( + provider_id="zen", + transport_type="openai_chat", + credential_env="ZEN_API_KEY", + credential_url="https://opencode.ai/settings", + credential_attr="zen_api_key", + default_base_url=ZEN_DEFAULT_BASE, + base_url_attr="zen_base_url", + capabilities=("chat", "streaming", "tools", "thinking"), + ), +} + +# Order matches docs; must match PROVIDER_CATALOG keys. +SUPPORTED_PROVIDER_IDS: tuple[str, ...] = ("nvidia_nim", "zen") + +if len(set(SUPPORTED_PROVIDER_IDS)) != len(SUPPORTED_PROVIDER_IDS): + raise AssertionError("Duplicate provider ids in PROVIDER_CATALOG key order") diff --git a/config/provider_ids.py b/config/provider_ids.py new file mode 100644 index 0000000000000000000000000000000000000000..fd08ab7aba292215a7929a1c80fbe9a8440ef724 --- /dev/null +++ b/config/provider_ids.py @@ -0,0 +1,7 @@ +"""Canonical provider id tuple (re-exported from the provider catalog).""" + +from __future__ import annotations + +from .provider_catalog import SUPPORTED_PROVIDER_IDS + +__all__ = ("SUPPORTED_PROVIDER_IDS",) diff --git a/config/settings.py b/config/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..1df2f2a4285f86c18826776dc6ec407d161f5093 --- /dev/null +++ b/config/settings.py @@ -0,0 +1,584 @@ +"""Centralized configuration using Pydantic Settings.""" + +import os +from collections.abc import Mapping +from dataclasses import dataclass +from functools import lru_cache +from pathlib import Path +from typing import Any + +from dotenv import dotenv_values +from pydantic import Field, field_validator, model_validator +from pydantic_settings import BaseSettings, SettingsConfigDict + +from .constants import HTTP_CONNECT_TIMEOUT_DEFAULT +from .nim import NimSettings +from .provider_ids import SUPPORTED_PROVIDER_IDS + + +@dataclass(frozen=True, slots=True) +class ConfiguredChatModelRef: + """A unique configured chat model reference and the env keys that set it.""" + + model_ref: str + provider_id: str + model_id: str + sources: tuple[str, ...] + + +def _env_files() -> tuple[Path, ...]: + """Return env file paths in priority order (later overrides earlier).""" + files: list[Path] = [ + Path.home() / ".config" / "free-claude-code" / ".env", + Path(".env"), + ] + if explicit := os.environ.get("FCC_ENV_FILE"): + files.append(Path(explicit)) + return tuple(files) + + +def _configured_env_files(model_config: Mapping[str, Any]) -> tuple[Path, ...]: + """Return the currently configured env files for Settings.""" + configured = model_config.get("env_file") + if configured is None: + return () + if isinstance(configured, (str, Path)): + return (Path(configured),) + return tuple(Path(item) for item in configured) + + +def _env_file_contains_key(path: Path, key: str) -> bool: + """Check whether a dotenv-style file defines the given key.""" + return _env_file_value(path, key) is not None + + +def _env_file_value(path: Path, key: str) -> str | None: + """Return a dotenv value when the file explicitly defines the key.""" + if not path.is_file(): + return None + + try: + values = dotenv_values(path) + except OSError: + return None + + if key not in values: + return None + value = values[key] + return "" if value is None else value + + +def _env_file_override(model_config: Mapping[str, Any], key: str) -> str | None: + """Return the last configured dotenv value that explicitly defines a key.""" + configured_value: str | None = None + for env_file in _configured_env_files(model_config): + value = _env_file_value(env_file, key) + if value is not None: + configured_value = value + return configured_value + + +def _removed_env_var_message(model_config: Mapping[str, Any]) -> str | None: + """Return a migration error for removed env vars, if present.""" + removed_keys = ("NIM_ENABLE_THINKING", "ENABLE_THINKING") + replacement = ( + "ENABLE_MODEL_THINKING, ENABLE_OPUS_THINKING, " + "ENABLE_SONNET_THINKING, or ENABLE_HAIKU_THINKING" + ) + + for removed_key in removed_keys: + if removed_key in os.environ: + return ( + f"{removed_key} has been removed in this release. " + f"Rename it to {replacement}." + ) + + for env_file in _configured_env_files(model_config): + if _env_file_contains_key(env_file, removed_key): + return ( + f"{removed_key} has been removed in this release. " + f"Rename it to {replacement}. Found in {env_file}." + ) + + return None + + +class Settings(BaseSettings): + """Application settings loaded from environment variables.""" + + # ==================== Messaging Platform Selection ==================== + # Valid: "telegram" | "discord" | "none" + messaging_platform: str = Field( + default="discord", validation_alias="MESSAGING_PLATFORM" + ) + messaging_rate_limit: int = Field( + default=1, validation_alias="MESSAGING_RATE_LIMIT" + ) + messaging_rate_window: float = Field( + default=1.0, validation_alias="MESSAGING_RATE_WINDOW" + ) + + # ==================== NVIDIA NIM Config ==================== + nvidia_nim_api_key_qwen: str = Field( + default="", validation_alias="NVIDIA_NIM_API_KEY_QWEN" + ) + nvidia_nim_api_key_glm: str = Field( + default="", validation_alias="NVIDIA_NIM_API_KEY_GLM" + ) + nvidia_nim_api_key_stepfun: str = Field( + default="", validation_alias="NVIDIA_NIM_API_KEY_STEPFUN" + ) + nvidia_nim_api_key_seed_oss: str = Field( + default="", validation_alias="NVIDIA_NIM_API_KEY_SEED_OSS" + ) + nvidia_nim_api_key_dracarys: str = Field( + default="", validation_alias="NVIDIA_NIM_API_KEY_DRACARYS" + ) + nvidia_nim_api_key_nemotron: str = Field( + default="", validation_alias="NVIDIA_NIM_API_KEY_NEMOTRON" + ) + nvidia_nim_api_key_mistral_large: str = Field( + default="", validation_alias="NVIDIA_NIM_API_KEY_MISTRAL_LARGE" + ) + # ==================== Zen/OpenCode Config ==================== + zen_api_key: str = Field(default="", validation_alias="ZEN_API_KEY") + zen_base_url: str = Field( + default="https://opencode.ai/zen", validation_alias="ZEN_BASE_URL" + ) + # Comma-separated list of fallback models (provider-prefixed or bare model ids) + # AUTO_MODEL_PRIORITY="nvidia_nim/qwen/qwen3-coder-480b-a35b-instruct,nvidia_nim/z-ai/glm4.7,nvidia_nim/stepfun-ai/step-3.5-flash,nvidia_nim/bytedance/seed-oss-36b-instruct" + nvidia_nim_fallback_models: str = Field( + default="", validation_alias="NVIDIA_NIM_FALLBACK_MODELS" + ) + + # ==================== Model ==================== + # All Claude model requests are mapped to this single model (fallback) + # Format: provider_type/model/name + model: str = Field(default="nvidia_nim/z-ai/glm4.7", validation_alias="MODEL") + + # Per-model overrides (optional, falls back to MODEL) + model_opus: str | None = Field(default=None, validation_alias="MODEL_OPUS") + model_sonnet: str | None = Field(default=None, validation_alias="MODEL_SONNET") + model_haiku: str | None = Field(default=None, validation_alias="MODEL_HAIKU") + + # Optional CSV list of preferred provider/model refs used by the virtual + # `auto` model. Format: provider/model/name entries separated by commas. + # Example: "nvidia_nim/z-ai/glm4.7,nvidia_nim/stepfun-ai/step-3.5-flash" + auto_model_order: str = Field(default="", validation_alias="AUTO_MODEL_PRIORITY") + + # ==================== Per-Provider Proxy ==================== + nvidia_nim_proxy: str = Field(default="", validation_alias="NVIDIA_NIM_PROXY") + + # ==================== Provider Rate Limiting ==================== + provider_rate_limit: int = Field(default=40, validation_alias="PROVIDER_RATE_LIMIT") + provider_rate_window: int = Field( + default=60, validation_alias="PROVIDER_RATE_WINDOW" + ) + provider_max_concurrency: int = Field( + default=5, validation_alias="PROVIDER_MAX_CONCURRENCY" + ) + enable_model_thinking: bool = Field( + default=True, validation_alias="ENABLE_MODEL_THINKING" + ) + enable_opus_thinking: bool | None = Field( + default=None, validation_alias="ENABLE_OPUS_THINKING" + ) + enable_sonnet_thinking: bool | None = Field( + default=None, validation_alias="ENABLE_SONNET_THINKING" + ) + enable_haiku_thinking: bool | None = Field( + default=None, validation_alias="ENABLE_HAIKU_THINKING" + ) + + # ==================== HTTP Client Timeouts ==================== + http_read_timeout: float = Field( + default=120.0, validation_alias="HTTP_READ_TIMEOUT" + ) + http_write_timeout: float = Field( + default=10.0, validation_alias="HTTP_WRITE_TIMEOUT" + ) + http_connect_timeout: float = Field( + default=HTTP_CONNECT_TIMEOUT_DEFAULT, + validation_alias="HTTP_CONNECT_TIMEOUT", + ) + + # ==================== Fast Prefix Detection ==================== + fast_prefix_detection: bool = True + + # ==================== Optimizations ==================== + enable_network_probe_mock: bool = True + enable_title_generation_skip: bool = True + enable_suggestion_mode_skip: bool = True + enable_filepath_extraction_mock: bool = True + + # ==================== Local web server tools (web_search / web_fetch) ==================== + # Off by default: these tools perform outbound HTTP from the proxy (SSRF risk). + enable_web_server_tools: bool = Field( + default=False, validation_alias="ENABLE_WEB_SERVER_TOOLS" + ) + # Comma-separated URL schemes allowed for web_fetch (default: http,https). + web_fetch_allowed_schemes: str = Field( + default="http,https", validation_alias="WEB_FETCH_ALLOWED_SCHEMES" + ) + # When true, skip private/loopback/link-local IP blocking for web_fetch (lab only). + web_fetch_allow_private_networks: bool = Field( + default=False, validation_alias="WEB_FETCH_ALLOW_PRIVATE_NETWORKS" + ) + + # ==================== Debug / diagnostic logging (avoid sensitive content) ==================== + # When false (default), API and SSE helpers log only metadata (counts, lengths, ids). + log_raw_api_payloads: bool = Field( + default=False, validation_alias="LOG_RAW_API_PAYLOADS" + ) + log_raw_sse_events: bool = Field( + default=False, validation_alias="LOG_RAW_SSE_EVENTS" + ) + # When false (default), unhandled exceptions log only type + route metadata (no message/traceback). + log_api_error_tracebacks: bool = Field( + default=False, validation_alias="LOG_API_ERROR_TRACEBACKS" + ) + # When false (default), messaging logs omit text/transcription previews (metadata only). + log_raw_messaging_content: bool = Field( + default=False, validation_alias="LOG_RAW_MESSAGING_CONTENT" + ) + # When true, log full Claude CLI stderr, non-JSON lines, and parser error text. + log_raw_cli_diagnostics: bool = Field( + default=False, validation_alias="LOG_RAW_CLI_DIAGNOSTICS" + ) + # When true, log exception text / CLI error strings in messaging (may leak user content). + log_messaging_error_details: bool = Field( + default=False, validation_alias="LOG_MESSAGING_ERROR_DETAILS" + ) + debug_platform_edits: bool = Field( + default=False, validation_alias="DEBUG_PLATFORM_EDITS" + ) + debug_subagent_stack: bool = Field( + default=False, validation_alias="DEBUG_SUBAGENT_STACK" + ) + + # ==================== NIM Settings ==================== + nim: NimSettings = Field(default_factory=NimSettings) + + # ==================== Voice Note Transcription ==================== + voice_note_enabled: bool = Field( + default=True, validation_alias="VOICE_NOTE_ENABLED" + ) + # Device: "cpu" | "cuda" | "nvidia_nim" + # - "cpu"/"cuda": local Whisper (requires voice_local extra: uv sync --extra voice_local) + # - "nvidia_nim": NVIDIA NIM Whisper API (requires voice extra: uv sync --extra voice) + whisper_device: str = Field(default="cpu", validation_alias="WHISPER_DEVICE") + # Whisper model ID or short name (for local Whisper) or NVIDIA NIM model (for nvidia_nim) + # Local Whisper: "tiny", "base", "small", "medium", "large-v2", "large-v3", "large-v3-turbo" + # NVIDIA NIM: "nvidia/parakeet-ctc-1.1b-asr", "openai/whisper-large-v3", etc. + whisper_model: str = Field(default="base", validation_alias="WHISPER_MODEL") + # Hugging Face token for faster model downloads (optional, for local Whisper) + hf_token: str = Field(default="", validation_alias="HF_TOKEN") + + # ==================== Bot Wrapper Config ==================== + telegram_bot_token: str | None = None + allowed_telegram_user_id: str | None = None + discord_bot_token: str | None = Field( + default=None, validation_alias="DISCORD_BOT_TOKEN" + ) + allowed_discord_channels: str | None = Field( + default=None, validation_alias="ALLOWED_DISCORD_CHANNELS" + ) + claude_workspace: str = "./agent_workspace" + allowed_dir: str = "" + claude_cli_bin: str = Field(default="claude", validation_alias="CLAUDE_CLI_BIN") + max_message_log_entries_per_chat: int | None = Field( + default=None, validation_alias="MAX_MESSAGE_LOG_ENTRIES_PER_CHAT" + ) + + # ==================== Server ==================== + host: str = "0.0.0.0" + port: int = 8082 + log_file: str = "server.log" + # Optional server API key to protect endpoints (Anthropic-style) + # Set via env `ANTHROPIC_AUTH_TOKEN`. When empty, no auth is required. + anthropic_auth_token: str = Field( + default="", validation_alias="ANTHROPIC_AUTH_TOKEN" + ) + + # When true, only advertise models explicitly configured via MODEL / MODEL_OPUS / MODEL_SONNET / MODEL_HAIKU + # Useful if your provider exposes hundreds of models and you want the picker to show only your selected ones. + advertise_only_configured_models: bool = Field( + default=False, validation_alias="ADVERTISE_ONLY_CONFIGURED_MODELS" + ) + + @model_validator(mode="before") + @classmethod + def reject_removed_env_vars(cls, data: Any) -> Any: + """Fail fast when removed environment variables are still configured.""" + if message := _removed_env_var_message(cls.model_config): + raise ValueError(message) + return data + + # Handle empty strings for optional string fields + @field_validator( + "telegram_bot_token", + "allowed_telegram_user_id", + "discord_bot_token", + "allowed_discord_channels", + "model_opus", + "model_sonnet", + "model_haiku", + "enable_opus_thinking", + "enable_sonnet_thinking", + "enable_haiku_thinking", + mode="before", + ) + @classmethod + def parse_optional_str(cls, v: Any) -> Any: + if v == "": + return None + return v + + @field_validator("max_message_log_entries_per_chat", mode="before") + @classmethod + def parse_optional_log_cap(cls, v: Any) -> Any: + if v == "" or v is None: + return None + return v + + @field_validator("whisper_device") + @classmethod + def validate_whisper_device(cls, v: str) -> str: + if v not in ("cpu", "cuda", "nvidia_nim"): + raise ValueError( + f"whisper_device must be 'cpu', 'cuda', or 'nvidia_nim', got {v!r}" + ) + return v + + @field_validator("messaging_platform") + @classmethod + def validate_messaging_platform(cls, v: str) -> str: + if v not in ("telegram", "discord", "none"): + raise ValueError( + f"messaging_platform must be 'telegram', 'discord', or 'none', got {v!r}" + ) + return v + + @field_validator("messaging_rate_limit") + @classmethod + def validate_messaging_rate_limit(cls, v: int) -> int: + if v <= 0: + raise ValueError("messaging_rate_limit must be > 0") + return v + + @field_validator("messaging_rate_window") + @classmethod + def validate_messaging_rate_window(cls, v: float) -> float: + if v <= 0: + raise ValueError("messaging_rate_window must be > 0") + return float(v) + + @field_validator("web_fetch_allowed_schemes") + @classmethod + def validate_web_fetch_allowed_schemes(cls, v: str) -> str: + schemes = [part.strip().lower() for part in v.split(",") if part.strip()] + if not schemes: + raise ValueError("web_fetch_allowed_schemes must list at least one scheme") + for scheme in schemes: + if not scheme.isascii() or not scheme.isalpha(): + raise ValueError( + f"Invalid URL scheme in web_fetch_allowed_schemes: {scheme!r}" + ) + return ",".join(schemes) + + + @field_validator("model", "model_opus", "model_sonnet", "model_haiku") + @classmethod + def validate_model_format(cls, v: str | None) -> str | None: + if v is None: + return None + if "/" not in v: + raise ValueError( + f"Model must be prefixed with provider type. " + f"Valid providers: {', '.join(SUPPORTED_PROVIDER_IDS)}. " + f"Format: provider_type/model/name" + ) + provider = v.split("/", 1)[0] + if provider not in SUPPORTED_PROVIDER_IDS: + supported = ", ".join(f"'{p}'" for p in SUPPORTED_PROVIDER_IDS) + raise ValueError(f"Invalid provider: '{provider}'. Supported: {supported}") + return v + + @model_validator(mode="after") + def check_nvidia_nim_api_key(self) -> "Settings": + if ( + self.voice_note_enabled + and self.whisper_device == "nvidia_nim" + and not self.nvidia_nim_api_key_qwen.strip() + ): + raise ValueError( + "NVIDIA_NIM_API_KEY_QWEN is required when WHISPER_DEVICE is 'nvidia_nim'. " + "Set it in your .env file." + ) + return self + + @model_validator(mode="after") + def prefer_dotenv_anthropic_auth_token(self) -> "Settings": + """Let explicit .env auth config override stale shell/client tokens.""" + dotenv_value = _env_file_override(self.model_config, "ANTHROPIC_AUTH_TOKEN") + if dotenv_value is not None: + self.anthropic_auth_token = dotenv_value + return self + + def uses_process_anthropic_auth_token(self) -> bool: + """Return whether proxy auth came from process env, not dotenv config.""" + if _env_file_override(self.model_config, "ANTHROPIC_AUTH_TOKEN") is not None: + return False + return bool(os.environ.get("ANTHROPIC_AUTH_TOKEN")) + + @property + def provider_type(self) -> str: + """Extract provider type from the default model string.""" + return Settings.parse_provider_type(self.model) + + @property + def model_name(self) -> str: + """Extract the actual model name from the default model string.""" + return Settings.parse_model_name(self.model) + + def resolve_model(self, claude_model_name: str) -> str: + """Resolve a Claude model name to the configured provider/model string. + + Classifies the incoming Claude model (opus/sonnet/haiku) and + returns the model-specific override if configured, otherwise the fallback MODEL. + """ + name_lower = claude_model_name.lower() + if "opus" in name_lower and self.model_opus is not None: + return self.model_opus + if "haiku" in name_lower and self.model_haiku is not None: + return self.model_haiku + if "sonnet" in name_lower and self.model_sonnet is not None: + return self.model_sonnet + return self.model + + def configured_chat_model_refs(self) -> tuple[ConfiguredChatModelRef, ...]: + """Return unique configured chat provider/model refs with source env keys.""" + model_refs = [m.strip() for m in (self.model or "").split(",") if m.strip()] + candidates = [("MODEL", m) for m in model_refs] + candidates.extend([ + ("MODEL_OPUS", self.model_opus), + ("MODEL_SONNET", self.model_sonnet), + ("MODEL_HAIKU", self.model_haiku), + ]) + sources_by_ref: dict[str, list[str]] = {} + for source, model_ref in candidates: + if model_ref is None: + continue + sources_by_ref.setdefault(model_ref, []).append(source) + + return tuple( + ConfiguredChatModelRef( + model_ref=model_ref, + provider_id=Settings.parse_provider_type(model_ref), + model_id=Settings.parse_model_name(model_ref), + sources=tuple(sources), + ) + for model_ref, sources in sources_by_ref.items() + ) + + def resolve_thinking(self, claude_model_name: str) -> bool: + """Resolve whether thinking is enabled for an incoming Claude model name.""" + name_lower = claude_model_name.lower() + if "opus" in name_lower and self.enable_opus_thinking is not None: + return self.enable_opus_thinking + if "haiku" in name_lower and self.enable_haiku_thinking is not None: + return self.enable_haiku_thinking + if "sonnet" in name_lower and self.enable_sonnet_thinking is not None: + return self.enable_sonnet_thinking + return self.enable_model_thinking + + def web_fetch_allowed_scheme_set(self) -> frozenset[str]: + """Return normalized schemes allowed for web_fetch.""" + return frozenset( + part.strip().lower() + for part in self.web_fetch_allowed_schemes.split(",") + if part.strip() + ) + + @staticmethod + def parse_provider_type(model_string: str) -> str: + """Extract provider type from any 'provider/model' string.""" + return model_string.split("/", 1)[0] + + @staticmethod + def parse_model_name(model_string: str) -> str: + """Extract model name from any 'provider/model' string.""" + return model_string.split("/", 1)[1] + + def provider_is_configured(self, provider_id: str) -> bool: + """Return whether a given provider appears configured in settings. + + This is a heuristic check used by the `auto` resolver to prefer providers + that have credentials or base URLs present. + """ + if provider_id == "nvidia_nim": + return bool( + self.nvidia_nim_api_key_qwen.strip() + or self.nvidia_nim_api_key_glm.strip() + or self.nvidia_nim_api_key_stepfun.strip() + or self.nvidia_nim_api_key_seed_oss.strip() + or self.nvidia_nim_api_key_dracarys.strip() + or self.nvidia_nim_api_key_nemotron.strip() + or self.nvidia_nim_api_key_mistral_large.strip() + ) + if provider_id == "zen": + return bool(self.zen_api_key.strip()) + # conservative default: assume not configured + return False + + def nvidia_nim_api_key_for_model(self, model_name: str) -> str: + """Return the NVIDIA API key that should be used for a specific model id.""" + model_name = model_name.strip().lower() + if model_name.startswith("z-ai/glm"): + return self.nvidia_nim_api_key_glm.strip() or self.nvidia_nim_api_key_qwen.strip() + if model_name.startswith("stepfun-ai/step-"): + return ( + self.nvidia_nim_api_key_stepfun.strip() + or self.nvidia_nim_api_key_qwen.strip() + ) + if model_name.startswith("bytedance/seed-oss"): + return ( + self.nvidia_nim_api_key_seed_oss.strip() + or self.nvidia_nim_api_key_qwen.strip() + ) + if model_name.startswith("abacusai/dracarys"): + return ( + self.nvidia_nim_api_key_dracarys.strip() + or self.nvidia_nim_api_key_qwen.strip() + ) + if model_name.startswith("mistralai/mistral-nemotron"): + return ( + self.nvidia_nim_api_key_nemotron.strip() + or self.nvidia_nim_api_key_qwen.strip() + ) + if model_name.startswith("mistralai/mistral-large"): + return ( + self.nvidia_nim_api_key_mistral_large.strip() + or self.nvidia_nim_api_key_qwen.strip() + ) + return self.nvidia_nim_api_key_qwen.strip() + + def resolve_api_key_for_model(self, provider_id: str, model_id: str) -> str: + """Return the API key for a given provider and model.""" + if provider_id == "nvidia_nim": + return self.nvidia_nim_api_key_for_model(model_id) + if provider_id == "zen": + return self.zen_api_key.strip() + return "" + + model_config = SettingsConfigDict( + env_file=_env_files(), + env_file_encoding="utf-8", + extra="ignore", + ) + + +@lru_cache +def get_settings() -> Settings: + """Get cached settings instance.""" + return Settings() diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..716557d1e0be136f9b8ca088a153d439f6c2f419 --- /dev/null +++ b/core/__init__.py @@ -0,0 +1 @@ +"""Neutral shared application core.""" diff --git a/core/__pycache__/__init__.cpython-314.pyc b/core/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0107022962f25c84912d9a8287597b3ed77ac7c3 Binary files /dev/null and b/core/__pycache__/__init__.cpython-314.pyc differ diff --git a/core/__pycache__/rate_limit.cpython-314.pyc b/core/__pycache__/rate_limit.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d264ebcda56a9271acc85afa2b2f996535d35e44 Binary files /dev/null and b/core/__pycache__/rate_limit.cpython-314.pyc differ diff --git a/core/anthropic/__init__.py b/core/anthropic/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c4bf7af289fcb5ff60f08a9bf077d91b46d2148f --- /dev/null +++ b/core/anthropic/__init__.py @@ -0,0 +1,46 @@ +"""Anthropic protocol helpers shared across API, providers, and integrations.""" + +from .content import extract_text_from_content, get_block_attr, get_block_type +from .conversion import ( + AnthropicToOpenAIConverter, + OpenAIConversionError, + ReasoningReplayMode, + build_base_request_body, +) +from .errors import ( + append_request_id, + format_user_error_preview, + get_user_facing_error_message, +) +from .native_messages_request import sanitize_native_messages_thinking_policy +from .provider_stream_error import iter_provider_stream_error_sse_events +from .sse import ContentBlockManager, SSEBuilder, format_sse_event, map_stop_reason +from .thinking import ContentChunk, ContentType, ThinkTagParser +from .tokens import get_token_count +from .tools import HeuristicToolParser +from .utils import set_if_not_none + +__all__ = [ + "AnthropicToOpenAIConverter", + "ContentBlockManager", + "ContentChunk", + "ContentType", + "HeuristicToolParser", + "OpenAIConversionError", + "ReasoningReplayMode", + "SSEBuilder", + "ThinkTagParser", + "append_request_id", + "build_base_request_body", + "extract_text_from_content", + "format_sse_event", + "format_user_error_preview", + "get_block_attr", + "get_block_type", + "get_token_count", + "get_user_facing_error_message", + "iter_provider_stream_error_sse_events", + "map_stop_reason", + "sanitize_native_messages_thinking_policy", + "set_if_not_none", +] diff --git a/core/anthropic/__pycache__/__init__.cpython-314.pyc b/core/anthropic/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e721def60fedd1b8f4359d9c6eda63bf507bce2 Binary files /dev/null and b/core/anthropic/__pycache__/__init__.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/content.cpython-314.pyc b/core/anthropic/__pycache__/content.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d62f4036db93e810084f0a94ff99eef9844bef4 Binary files /dev/null and b/core/anthropic/__pycache__/content.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/conversion.cpython-314.pyc b/core/anthropic/__pycache__/conversion.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c610cde83ffc66916fcfc687ce00dbd23474519 Binary files /dev/null and b/core/anthropic/__pycache__/conversion.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/errors.cpython-314.pyc b/core/anthropic/__pycache__/errors.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73ec8e743599e4576b653383b9d835eb959e9e0d Binary files /dev/null and b/core/anthropic/__pycache__/errors.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/native_messages_request.cpython-314.pyc b/core/anthropic/__pycache__/native_messages_request.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c07f4e2c79431a33c1a56faa3b6cab5625ee704e Binary files /dev/null and b/core/anthropic/__pycache__/native_messages_request.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/provider_stream_error.cpython-314.pyc b/core/anthropic/__pycache__/provider_stream_error.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e341036876a52e1a001fee74a10a49c69c1c742 Binary files /dev/null and b/core/anthropic/__pycache__/provider_stream_error.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/server_tool_sse.cpython-314.pyc b/core/anthropic/__pycache__/server_tool_sse.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f79d1b3c42b8e826337cb23f924eca9fad272d7 Binary files /dev/null and b/core/anthropic/__pycache__/server_tool_sse.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/sse.cpython-314.pyc b/core/anthropic/__pycache__/sse.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4d3a913557a16fefc2c97784aec48f10c2ffaa9b Binary files /dev/null and b/core/anthropic/__pycache__/sse.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/thinking.cpython-314.pyc b/core/anthropic/__pycache__/thinking.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7822b3f090b0e4e896473dbecc18491beed78d43 Binary files /dev/null and b/core/anthropic/__pycache__/thinking.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/tokens.cpython-314.pyc b/core/anthropic/__pycache__/tokens.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb3471caba93683a2559323b9f24746a2400f860 Binary files /dev/null and b/core/anthropic/__pycache__/tokens.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/tools.cpython-314.pyc b/core/anthropic/__pycache__/tools.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3546bfdbaa6dcbba044e90a56c03e8fb610882cf Binary files /dev/null and b/core/anthropic/__pycache__/tools.cpython-314.pyc differ diff --git a/core/anthropic/__pycache__/utils.cpython-314.pyc b/core/anthropic/__pycache__/utils.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf5add15eca3e8eeb59e9277bb779c859e208052 Binary files /dev/null and b/core/anthropic/__pycache__/utils.cpython-314.pyc differ diff --git a/core/anthropic/content.py b/core/anthropic/content.py new file mode 100644 index 0000000000000000000000000000000000000000..e16aaaf2e370433ed6b2b26049856efd1528146f --- /dev/null +++ b/core/anthropic/content.py @@ -0,0 +1,31 @@ +"""Content block helpers for Anthropic-compatible payloads.""" + +from typing import Any + + +def get_block_attr(block: Any, attr: str, default: Any = None) -> Any: + """Get an attribute from a Pydantic model, lightweight object, or dict.""" + if hasattr(block, attr): + return getattr(block, attr) + if isinstance(block, dict): + return block.get(attr, default) + return default + + +def get_block_type(block: Any) -> str | None: + """Return a content block type when present.""" + return get_block_attr(block, "type") + + +def extract_text_from_content(content: Any) -> str: + """Extract concatenated text from message content.""" + if isinstance(content, str): + return content + if isinstance(content, list): + parts: list[str] = [] + for block in content: + text = get_block_attr(block, "text", "") + if isinstance(text, str) and text: + parts.append(text) + return "".join(parts) + return "" diff --git a/core/anthropic/conversion.py b/core/anthropic/conversion.py new file mode 100644 index 0000000000000000000000000000000000000000..84ba6140575930277f2fc2e4a74a6380f019035a --- /dev/null +++ b/core/anthropic/conversion.py @@ -0,0 +1,592 @@ +"""Message and tool format converters.""" + +import json +from dataclasses import dataclass, field +from enum import StrEnum +from typing import Any + +from pydantic import BaseModel + +from .content import get_block_attr, get_block_type +from .utils import set_if_not_none + + +class OpenAIConversionError(Exception): + """Raised when Anthropic content cannot be converted to OpenAI chat without data loss.""" + + +class ReasoningReplayMode(StrEnum): + """How assistant reasoning history is replayed to OpenAI-compatible providers.""" + + DISABLED = "disabled" + THINK_TAGS = "think_tags" + REASONING_CONTENT = "reasoning_content" + + +def _openai_reject_native_only_top_level_fields(request_data: Any) -> None: + """OpenAI chat providers may only convert known top-level request fields. + + First-class model fields (e.g. ``context_management``) are not forwarded to + the OpenAI API but are allowed so clients do not hit spurious 400s. + Unknown extra keys (``__pydantic_extra__``) are still rejected. + """ + if not isinstance(request_data, BaseModel): + return + extra = getattr(request_data, "__pydantic_extra__", None) + if not extra: + return + raise OpenAIConversionError( + "OpenAI chat conversion does not support these top-level request fields: " + f"{sorted(str(k) for k in extra)}. Use a native Anthropic transport provider." + ) + + +def _tool_name(tool: Any) -> str: + return str(getattr(tool, "name", "") or "") + + +def _tool_input_schema(tool: Any) -> dict[str, Any]: + schema = getattr(tool, "input_schema", None) + if isinstance(schema, dict): + return schema + return {"type": "object", "properties": {}} + + +def _serialize_tool_result_content(tool_content: Any) -> str: + """Serialize tool_result content for OpenAI ``role: tool`` messages (stable JSON for structured values).""" + if tool_content is None: + return "" + if isinstance(tool_content, str): + return tool_content + if isinstance(tool_content, dict): + return json.dumps(tool_content, ensure_ascii=False) + if isinstance(tool_content, list): + parts: list[str] = [] + for item in tool_content: + if isinstance(item, dict) and item.get("type") == "text": + parts.append(str(item.get("text", ""))) + elif isinstance(item, dict): + parts.append(json.dumps(item, ensure_ascii=False)) + else: + parts.append(str(item)) + return "\n".join(parts) + return str(tool_content) + + +def _clean_reasoning_content(value: Any) -> str | None: + if not isinstance(value, str): + return None + return value if value else None + + +def _think_tag_content(reasoning: str) -> str: + return f"\n{reasoning}\n" + + +@dataclass +class _PendingAfterTools: + """Assistant content that appears after ``tool_use`` in an Anthropic message. + + OpenAI ``chat.completions`` cannot place assistant text after ``tool_calls`` in the + same message, so it is deferred until the corresponding ``role: tool`` results have + been replayed in order. + """ + + # Tool use IDs still missing a ``role: tool`` result before post-tool text may be replayed. + remaining_tool_ids: set[str] = field(default_factory=set) + deferred_blocks: list[Any] = field(default_factory=list) + top_level_reasoning: str | None = None + reasoning_replay: ReasoningReplayMode = ReasoningReplayMode.THINK_TAGS + # True after deferred assistant text has been added to the OpenAI transcript. + deferred_emitted: bool = False + + def needs_deferred(self) -> bool: + return bool(self.deferred_blocks) and not self.deferred_emitted + + +def _index_first_tool_use(blocks: list[Any]) -> int | None: + for i, block in enumerate(blocks): + if get_block_type(block) == "tool_use": + return i + return None + + +def _iter_tool_uses_in_order(blocks: list[Any]) -> list[dict[str, Any]]: + tool_calls: list[dict[str, Any]] = [] + for block in blocks: + if get_block_type(block) == "tool_use": + tool_input = get_block_attr(block, "input", {}) + tool_calls.append( + { + "id": get_block_attr(block, "id"), + "type": "function", + "function": { + "name": get_block_attr(block, "name"), + "arguments": json.dumps(tool_input) + if isinstance(tool_input, dict) + else str(tool_input), + }, + } + ) + return tool_calls + + +def _deferred_post_tool_blocks( + content: list[Any], *, first_tool_index: int +) -> list[Any]: + return [ + b + for i, b in enumerate(content) + if i > first_tool_index and get_block_type(b) != "tool_use" + ] + + +def _assert_no_forbidden_assistant_block(block: Any) -> None: + block_type = get_block_type(block) + if block_type == "image": + raise OpenAIConversionError( + "Assistant image blocks are not supported for OpenAI chat conversion." + ) + if block_type in ( + "server_tool_use", + "web_search_tool_result", + "web_fetch_tool_result", + ): + raise OpenAIConversionError( + "OpenAI chat conversion does not support Anthropic server tool blocks " + f"({block_type!r} in an assistant message). Use a native Anthropic transport provider." + ) + + +class AnthropicToOpenAIConverter: + """Convert Anthropic message format to OpenAI-compatible format.""" + + @staticmethod + def convert_messages( + messages: list[Any], + *, + reasoning_replay: ReasoningReplayMode = ReasoningReplayMode.THINK_TAGS, + ) -> list[dict[str, Any]]: + result: list[dict[str, Any]] = [] + pending: _PendingAfterTools | None = None + + for msg in messages: + role = msg.role + content = msg.content + reasoning_content = _clean_reasoning_content( + getattr(msg, "reasoning_content", None) + ) + + if role == "assistant" and isinstance(content, list): + if pending is not None and pending.needs_deferred(): + # Orphan: expected tool result; emit deferred to avoid a stuck session. + result.extend( + AnthropicToOpenAIConverter._deferred_post_tool_to_messages( + pending, + ) + ) + pending.deferred_emitted = True + pending = None + + if (first_i := _index_first_tool_use(content)) is not None: + for block in content: + if get_block_type(block) == "tool_use": + continue + _assert_no_forbidden_assistant_block(block) + out, new_pending = ( + AnthropicToOpenAIConverter._convert_assistant_message_with_split( + content, + first_tool_index=first_i, + reasoning_content=reasoning_content, + reasoning_replay=reasoning_replay, + ) + ) + result.extend(out) + if new_pending is not None: + pending = new_pending + else: + for block in content: + _assert_no_forbidden_assistant_block(block) + result.extend( + AnthropicToOpenAIConverter._convert_assistant_message( + content, + reasoning_content=reasoning_content, + reasoning_replay=reasoning_replay, + ) + ) + elif isinstance(content, str): + if role == "user" and pending is not None and pending.needs_deferred(): + result.extend( + AnthropicToOpenAIConverter._deferred_post_tool_to_messages( + pending + ) + ) + pending.deferred_emitted = True + pending = None + converted = {"role": role, "content": content} + if role == "assistant" and reasoning_content: + if reasoning_replay == ReasoningReplayMode.REASONING_CONTENT: + converted["reasoning_content"] = reasoning_content + elif reasoning_replay == ReasoningReplayMode.THINK_TAGS: + content_parts = [_think_tag_content(reasoning_content)] + if content: + content_parts.append(content) + converted["content"] = "\n\n".join(content_parts) + result.append(converted) + elif isinstance(content, list): + if role == "user": + if pending is not None and pending.needs_deferred(): + if not pending.remaining_tool_ids: + result.extend( + AnthropicToOpenAIConverter._deferred_post_tool_to_messages( + pending + ) + ) + pending.deferred_emitted = True + pending = None + result.extend( + AnthropicToOpenAIConverter._convert_user_message( + content + ) + ) + else: + pieces = AnthropicToOpenAIConverter._convert_user_message_with_injection( + content, pending + ) + result.extend(pieces["messages"]) + if pieces["cleared_pending"]: + pending = None + else: + result.extend( + AnthropicToOpenAIConverter._convert_user_message(content) + ) + else: + if role == "user" and pending is not None and pending.needs_deferred(): + result.extend( + AnthropicToOpenAIConverter._deferred_post_tool_to_messages( + pending + ) + ) + pending.deferred_emitted = True + pending = None + result.append({"role": role, "content": str(content)}) + + if pending is not None and pending.needs_deferred(): + result.extend( + AnthropicToOpenAIConverter._deferred_post_tool_to_messages(pending) + ) + + return result + + @staticmethod + def _convert_assistant_message_with_split( + content: list[Any], + *, + first_tool_index: int, + reasoning_content: str | None, + reasoning_replay: ReasoningReplayMode, + ) -> tuple[list[dict[str, Any]], _PendingAfterTools | None]: + pre = content[:first_tool_index] + tool_calls = _iter_tool_uses_in_order(content) + if not tool_calls: + return ( + AnthropicToOpenAIConverter._convert_assistant_message( + content, + reasoning_content=reasoning_content, + reasoning_replay=reasoning_replay, + ), + None, + ) + deferred_blocks = _deferred_post_tool_blocks( + content, first_tool_index=first_tool_index + ) + + pre_msg: dict[str, Any] + if not pre: + pre_msg = { + "role": "assistant", + "content": "", + } + if reasoning_replay == ReasoningReplayMode.REASONING_CONTENT: + replay = reasoning_content + if replay: + pre_msg["reasoning_content"] = replay + else: + pre_msg = AnthropicToOpenAIConverter._convert_assistant_message( + pre, + reasoning_content=reasoning_content, + reasoning_replay=reasoning_replay, + )[0] + pre_msg["tool_calls"] = tool_calls + if tool_calls and pre_msg.get("content") == " ": + pre_msg["content"] = "" + pnd: _PendingAfterTools | None = None + if deferred_blocks: + res_ids: set[str] = set() + for tc in tool_calls: + tid = tc.get("id") + if tid is not None and str(tid).strip() != "": + res_ids.add(str(tid)) + pnd = _PendingAfterTools( + remaining_tool_ids=res_ids, + deferred_blocks=deferred_blocks, + top_level_reasoning=reasoning_content, + reasoning_replay=reasoning_replay, + ) + return [pre_msg], pnd + + @staticmethod + def _convert_assistant_message( + content: list[Any], + *, + reasoning_content: str | None = None, + reasoning_replay: ReasoningReplayMode = ReasoningReplayMode.THINK_TAGS, + ) -> list[dict[str, Any]]: + content_parts: list[str] = [] + thinking_parts: list[str] = [] + tool_calls: list[dict[str, Any]] = [] + for block in content: + block_type = get_block_type(block) + if block_type == "text": + content_parts.append(get_block_attr(block, "text", "")) + elif block_type == "thinking": + if reasoning_replay == ReasoningReplayMode.DISABLED: + continue + thinking = get_block_attr(block, "thinking", "") + if reasoning_replay == ReasoningReplayMode.THINK_TAGS: + content_parts.append(_think_tag_content(thinking)) + elif reasoning_content is None: + thinking_parts.append(thinking) + elif block_type == "redacted_thinking": + # Opaque provider continuation data; do not materialize as model-visible text + # or reasoning_content for OpenAI chat upstreams. + continue + elif block_type == "tool_use": + tool_input = get_block_attr(block, "input", {}) + tool_calls.append( + { + "id": get_block_attr(block, "id"), + "type": "function", + "function": { + "name": get_block_attr(block, "name"), + "arguments": json.dumps(tool_input) + if isinstance(tool_input, dict) + else str(tool_input), + }, + } + ) + else: + _assert_no_forbidden_assistant_block(block) + + content_str = "\n\n".join(content_parts) + if not content_str and not tool_calls: + content_str = " " + + msg: dict[str, Any] = { + "role": "assistant", + "content": content_str, + } + if tool_calls: + msg["tool_calls"] = tool_calls + if reasoning_replay == ReasoningReplayMode.REASONING_CONTENT: + replay_reasoning = reasoning_content or "\n".join(thinking_parts) + if replay_reasoning: + msg["reasoning_content"] = replay_reasoning + + return [msg] + + @staticmethod + def _deferred_post_tool_to_messages( + pending: _PendingAfterTools, + ) -> list[dict[str, Any]]: + if not pending.deferred_blocks: + return [] + return AnthropicToOpenAIConverter._convert_assistant_message( + pending.deferred_blocks, + reasoning_content=pending.top_level_reasoning, + reasoning_replay=pending.reasoning_replay, + ) + + @staticmethod + def _convert_user_message_with_injection( + content: list[Any], pending: _PendingAfterTools + ) -> dict[str, Any]: + """Convert user list blocks, emitting deferred assistant after all tool results.""" + if not pending.needs_deferred() or not pending.remaining_tool_ids: + return { + "messages": AnthropicToOpenAIConverter._convert_user_message(content), + "cleared_pending": False, + } + + result: list[dict[str, Any]] = [] + text_parts: list[str] = [] + cleared = False + + def flush_text() -> None: + if text_parts: + result.append({"role": "user", "content": "\n".join(text_parts)}) + text_parts.clear() + + for block in content: + block_type = get_block_type(block) + if block_type == "text": + text_parts.append(get_block_attr(block, "text", "")) + elif block_type == "image": + raise OpenAIConversionError( + "User message image blocks are not supported for OpenAI chat " + "conversion; use a vision-capable native Anthropic provider or " + "extend the converter." + ) + elif block_type == "tool_result": + flush_text() + tool_content = get_block_attr(block, "content", "") + serialized = _serialize_tool_result_content(tool_content) + tuid = get_block_attr(block, "tool_use_id") + tuid_s = str(tuid) if tuid is not None else "" + result.append( + { + "role": "tool", + "tool_call_id": tuid, + "content": serialized if serialized else "", + } + ) + if tuid_s in pending.remaining_tool_ids: + pending.remaining_tool_ids.discard(tuid_s) + if not pending.remaining_tool_ids: + result.extend( + AnthropicToOpenAIConverter._deferred_post_tool_to_messages( + pending + ) + ) + pending.deferred_emitted = True + cleared = True + else: + pass + + flush_text() + return {"messages": result, "cleared_pending": cleared} + + @staticmethod + def _convert_user_message(content: list[Any]) -> list[dict[str, Any]]: + result: list[dict[str, Any]] = [] + text_parts: list[str] = [] + + def flush_text() -> None: + if text_parts: + result.append({"role": "user", "content": "\n".join(text_parts)}) + text_parts.clear() + + for block in content: + block_type = get_block_type(block) + + if block_type == "text": + text_parts.append(get_block_attr(block, "text", "")) + elif block_type == "image": + raise OpenAIConversionError( + "User message image blocks are not supported for OpenAI chat " + "conversion; use a vision-capable native Anthropic provider or " + "extend the converter." + ) + elif block_type == "tool_result": + flush_text() + tool_content = get_block_attr(block, "content", "") + serialized = _serialize_tool_result_content(tool_content) + result.append( + { + "role": "tool", + "tool_call_id": get_block_attr(block, "tool_use_id"), + "content": serialized if serialized else "", + } + ) + + flush_text() + return result + + @staticmethod + def convert_tools(tools: list[Any]) -> list[dict[str, Any]]: + return [ + { + "type": "function", + "function": { + "name": tool.name, + "description": tool.description or "", + "parameters": _tool_input_schema(tool), + }, + } + for tool in tools + ] + + @staticmethod + def convert_tool_choice(tool_choice: Any) -> Any: + if not isinstance(tool_choice, dict): + return tool_choice + + choice_type = tool_choice.get("type") + if choice_type == "tool": + name = tool_choice.get("name") + if name: + return {"type": "function", "function": {"name": name}} + if choice_type == "any": + return "required" + if choice_type in {"auto", "none", "required"}: + return choice_type + if choice_type == "function" and isinstance(tool_choice.get("function"), dict): + return tool_choice + + return tool_choice + + @staticmethod + def convert_system_prompt(system: Any) -> dict[str, str] | None: + if isinstance(system, str): + return {"role": "system", "content": system} + if isinstance(system, list): + text_parts = [ + get_block_attr(block, "text", "") + for block in system + if get_block_type(block) == "text" + ] + if text_parts: + return {"role": "system", "content": "\n\n".join(text_parts).strip()} + return None + + +def build_base_request_body( + request_data: Any, + *, + default_max_tokens: int | None = None, + reasoning_replay: ReasoningReplayMode = ReasoningReplayMode.THINK_TAGS, +) -> dict[str, Any]: + """Build the common parts of an OpenAI-format request body.""" + _openai_reject_native_only_top_level_fields(request_data) + messages = AnthropicToOpenAIConverter.convert_messages( + request_data.messages, + reasoning_replay=reasoning_replay, + ) + + system = getattr(request_data, "system", None) + if system: + system_msg = AnthropicToOpenAIConverter.convert_system_prompt(system) + if system_msg: + messages.insert(0, system_msg) + + body: dict[str, Any] = {"model": request_data.model, "messages": messages} + + max_tokens = getattr(request_data, "max_tokens", None) + set_if_not_none(body, "max_tokens", max_tokens or default_max_tokens) + set_if_not_none(body, "temperature", getattr(request_data, "temperature", None)) + set_if_not_none(body, "top_p", getattr(request_data, "top_p", None)) + + stop_sequences = getattr(request_data, "stop_sequences", None) + if stop_sequences: + body["stop"] = stop_sequences + + tools = getattr(request_data, "tools", None) + if tools: + body["tools"] = AnthropicToOpenAIConverter.convert_tools(tools) + tool_choice = getattr(request_data, "tool_choice", None) + if tool_choice: + body["tool_choice"] = AnthropicToOpenAIConverter.convert_tool_choice( + tool_choice + ) + + return body diff --git a/core/anthropic/emitted_sse_tracker.py b/core/anthropic/emitted_sse_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..079dd511bc0276a62bb152a02943ff50bc46a64d --- /dev/null +++ b/core/anthropic/emitted_sse_tracker.py @@ -0,0 +1,97 @@ +"""Track content-block state for native Anthropic SSE strings we emit to clients.""" + +from __future__ import annotations + +import uuid +from collections.abc import Iterator +from contextlib import suppress +from typing import Any + +from core.anthropic.sse import SSEBuilder, format_sse_event +from core.anthropic.stream_contracts import SSEEvent, event_index, parse_sse_lines + + +class EmittedNativeSseTracker: + """Parse emitted SSE frames so mid-stream errors can close blocks and pick a fresh index.""" + + def __init__(self) -> None: + self._buf = "" + self._open_stack: list[int] = [] + self._max_index = -1 + self.message_id: str | None = None + self.model: str = "" + + def feed(self, chunk: str) -> None: + """Record SSE frames completed by ``chunk`` (handles splitting across reads).""" + self._buf += chunk + while True: + sep = self._buf.find("\n\n") + if sep < 0: + break + frame = self._buf[:sep] + self._buf = self._buf[sep + 2 :] + if not frame.strip(): + continue + for event in parse_sse_lines(frame.splitlines()): + self._observe(event) + + def _observe(self, event: SSEEvent) -> None: + if event.event == "message_start": + message = event.data.get("message") + if isinstance(message, dict): + mid = message.get("id") + if isinstance(mid, str) and mid: + self.message_id = mid + model = message.get("model") + if isinstance(model, str) and model: + self.model = model + return + + if event.event == "content_block_start": + idx = event_index(event) + self._max_index = max(self._max_index, idx) + self._open_stack.append(idx) + return + + if event.event == "content_block_stop": + idx = event_index(event) + if self._open_stack and self._open_stack[-1] == idx: + self._open_stack.pop() + else: + with suppress(ValueError): + self._open_stack.remove(idx) + + def next_content_index(self) -> int: + """Next unused content block index based on emitted starts.""" + return self._max_index + 1 + + def iter_close_unclosed_blocks(self) -> Iterator[str]: + """Yield ``content_block_stop`` events for blocks that were started but not stopped.""" + while self._open_stack: + idx = self._open_stack.pop() + yield format_sse_event( + "content_block_stop", + {"type": "content_block_stop", "index": idx}, + ) + + def iter_midstream_error_tail( + self, + error_message: str, + *, + request: Any, + input_tokens: int, + log_raw_sse_events: bool, + ) -> Iterator[str]: + """Close dangling blocks, emit a text error block at a fresh index, then message tail.""" + mid = self.message_id or f"msg_{uuid.uuid4()}" + model = self.model or (getattr(request, "model", "") or "") + sse = SSEBuilder( + mid, + model, + input_tokens, + log_raw_events=log_raw_sse_events, + ) + sse.blocks.next_index = self.next_content_index() + yield from sse.emit_error(error_message) + yield sse.message_delta("end_turn", 1) + yield sse.message_stop() diff --git a/core/anthropic/errors.py b/core/anthropic/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..afe093d6247e3c08a248b58d6a1b89945454237f --- /dev/null +++ b/core/anthropic/errors.py @@ -0,0 +1,70 @@ +"""User-facing error formatting shared by API, providers, and integrations.""" + +import httpx +import openai + + +def get_user_facing_error_message( + e: Exception, + *, + read_timeout_s: float | None = None, +) -> str: + """Return a readable, non-empty error message for users. + + Known transport and OpenAI SDK exception types are mapped to stable wording + before falling back to ``str(e)``, so empty or noisy SDK messages do not skip + the mapped path. + """ + if isinstance(e, httpx.ReadTimeout): + if read_timeout_s is not None: + return f"Provider request timed out after {read_timeout_s:g}s." + return "Provider request timed out." + if isinstance(e, httpx.ConnectTimeout): + return "Could not connect to provider." + if isinstance(e, TimeoutError): + if read_timeout_s is not None: + return f"Provider request timed out after {read_timeout_s:g}s." + return "Request timed out." + + if isinstance(e, openai.RateLimitError): + return "Provider rate limit reached. Please retry shortly." + if isinstance(e, openai.AuthenticationError): + return "Provider authentication failed. Check API key." + if isinstance(e, openai.BadRequestError): + return "Invalid request sent to provider." + + name = type(e).__name__ + status_code = getattr(e, "status_code", None) + if name == "RateLimitError": + return "Provider rate limit reached. Please retry shortly." + if name == "AuthenticationError": + return "Provider authentication failed. Check API key." + if name == "InvalidRequestError": + return "Invalid request sent to provider." + if name == "OverloadedError": + return "Provider is currently overloaded. Please retry." + if name == "APIError": + if status_code in (502, 503, 504): + return "Provider is temporarily unavailable. Please retry." + return "Provider API request failed." + if name.endswith("ProviderError") or name == "ProviderError": + return "Provider request failed." + + message = str(e).strip() + if message: + return message + + return "Provider request failed unexpectedly." + + +def format_user_error_preview(exc: Exception, *, max_len: int = 200) -> str: + """Truncate a user-facing error string for short chat replies.""" + return get_user_facing_error_message(exc)[:max_len] + + +def append_request_id(message: str, request_id: str | None) -> str: + """Append request_id suffix when available.""" + base = message.strip() or "Provider request failed unexpectedly." + if request_id: + return f"{base} (request_id={request_id})" + return base diff --git a/core/anthropic/native_messages_request.py b/core/anthropic/native_messages_request.py new file mode 100644 index 0000000000000000000000000000000000000000..cab77ff84c230948ffdbc24c7809e1836efef799 --- /dev/null +++ b/core/anthropic/native_messages_request.py @@ -0,0 +1,265 @@ +"""Native Anthropic Messages request body construction (JSON-ready dicts). + +Provider adapters supply policy via parameters (defaults, OpenRouter post-steps). +""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import Any + +from pydantic import BaseModel + +_REQUEST_FIELDS = ( + "model", + "messages", + "system", + "max_tokens", + "stop_sequences", + "stream", + "temperature", + "top_p", + "top_k", + "metadata", + "tools", + "tool_choice", + "thinking", + "context_management", + "output_config", + "mcp_servers", + "extra_body", +) + +# Keys that would override routed canonical request fields if merged from ``extra_body``. +_OPENROUTER_EXTRA_BODY_FORBIDDEN_KEYS = frozenset( + { + "model", + "messages", + "system", + "tools", + "tool_choice", + "stream", + "max_tokens", + "temperature", + "top_p", + "top_k", + "metadata", + "stop_sequences", + "context_management", + "output_config", + "mcp_servers", + } +) + + +class OpenRouterExtraBodyError(ValueError): + """``extra_body`` contained reserved keys that would override canonical fields.""" + + +def validate_openrouter_extra_body(extra: Any) -> None: + """Reject ``extra_body`` keys that must not override routed request fields.""" + if not isinstance(extra, dict) or not extra: + return + bad = _OPENROUTER_EXTRA_BODY_FORBIDDEN_KEYS & extra.keys() + if bad: + raise OpenRouterExtraBodyError( + f"extra_body must not override canonical request fields: {sorted(bad)}" + ) + + +_INTERNAL_FIELDS = { + "thinking", + "extra_body", +} + + +def _serialize_value(value: Any) -> Any: + """Convert Pydantic models and lightweight objects into JSON-ready values.""" + if isinstance(value, BaseModel): + return value.model_dump(exclude_none=True) + if isinstance(value, dict): + return { + key: _serialize_value(item) + for key, item in value.items() + if item is not None + } + if isinstance(value, Sequence) and not isinstance(value, str | bytes | bytearray): + return [_serialize_value(item) for item in value] + if value is None or isinstance(value, str | int | float | bool): + return value + if hasattr(value, "__dict__"): + return { + key: _serialize_value(item) + for key, item in vars(value).items() + if not key.startswith("_") and item is not None + } + return value + + +def _dump_request_fields(request_data: Any) -> dict[str, Any]: + """Extract the public request fields (OpenRouter-style explicit field list).""" + if isinstance(request_data, BaseModel): + return request_data.model_dump(exclude_none=True) + + dumped: dict[str, Any] = {} + for field in _REQUEST_FIELDS: + value = getattr(request_data, field, None) + if value is not None: + dumped[field] = _serialize_value(value) + return dumped + + +def dump_raw_messages_request(request_data: Any) -> dict[str, Any]: + """Public JSON-ready dict of Anthropic public request fields (for native adapters).""" + return _dump_request_fields(request_data) + + +def sanitize_native_messages_thinking_policy( + messages: Any, *, thinking_enabled: bool +) -> Any: + """Filter assistant message thinking blocks for upstream native Anthropic JSON. + + When ``thinking_enabled`` is false, remove ``thinking`` and ``redacted_thinking`` + history so disabled policy is not undermined by prior turns. + + When true, keep ``redacted_thinking`` and signed ``thinking``; remove only + unsigned plain ``thinking`` blocks (not replayable). + """ + if not isinstance(messages, list): + return messages + + sanitized_messages: list[Any] = [] + for message in messages: + if not isinstance(message, dict): + sanitized_messages.append(message) + continue + + if message.get("role") != "assistant": + sanitized_messages.append(message) + continue + + content = message.get("content") + if not isinstance(content, list): + sanitized_messages.append(message) + continue + + if not thinking_enabled: + sanitized_content = [ + block + for block in content + if not ( + isinstance(block, dict) + and block.get("type") in ("thinking", "redacted_thinking") + ) + ] + else: + sanitized_content = [ + block + for block in content + if not ( + isinstance(block, dict) + and block.get("type") == "thinking" + and not isinstance(block.get("signature"), str) + ) + ] + + sanitized_message = dict(message) + sanitized_message["content"] = sanitized_content or "" + sanitized_messages.append(sanitized_message) + + return sanitized_messages + + +def _normalize_system_prompt_for_openrouter(system: Any) -> Any: + """Flatten Claude SDK system blocks for OpenRouter's native endpoint.""" + if not isinstance(system, list): + return system + + text_parts: list[str] = [] + for block in system: + if not isinstance(block, dict): + continue + if block.get("type") == "text" and isinstance(block.get("text"), str): + text_parts.append(block["text"]) + return "\n\n".join(text_parts).strip() if text_parts else system + + +def _apply_openrouter_reasoning_policy(body: dict[str, Any], thinking_cfg: Any) -> None: + """Map Anthropic thinking controls onto OpenRouter reasoning controls.""" + reasoning = body.setdefault("reasoning", {"enabled": True}) + if not isinstance(reasoning, dict): + return + reasoning.setdefault("enabled", True) + if not isinstance(thinking_cfg, dict): + return + budget_tokens = thinking_cfg.get("budget_tokens") + if isinstance(budget_tokens, int): + reasoning.setdefault("max_tokens", budget_tokens) + + +def build_base_native_anthropic_request_body( + request: Any, + *, + default_max_tokens: int, + thinking_enabled: bool, +) -> dict[str, Any]: + """Serialize a Pydantic messages request to a generic native Anthropic body.""" + body = request.model_dump(exclude_none=True) + + body.pop("extra_body", None) + + if "thinking" in body: + thinking_cfg = body.pop("thinking") + if thinking_enabled and isinstance(thinking_cfg, dict): + thinking_payload: dict[str, Any] = {"type": "enabled"} + budget_tokens = thinking_cfg.get("budget_tokens") + if isinstance(budget_tokens, int): + thinking_payload["budget_tokens"] = budget_tokens + body["thinking"] = thinking_payload + + if "max_tokens" not in body: + body["max_tokens"] = default_max_tokens + + if "messages" in body: + body["messages"] = sanitize_native_messages_thinking_policy( + body["messages"], + thinking_enabled=thinking_enabled, + ) + + return body + + +def build_openrouter_native_request_body( + request_data: Any, + *, + thinking_enabled: bool, + default_max_tokens: int, +) -> dict[str, Any]: + """Build an Anthropic-format request body for OpenRouter (policy hooks built-in).""" + dumped_request = _dump_request_fields(request_data) + request_extra = dumped_request.pop("extra_body", None) + thinking_cfg = dumped_request.get("thinking") + body: dict[str, Any] = { + key: value + for key, value in dumped_request.items() + if key not in _INTERNAL_FIELDS + } + + if isinstance(request_extra, dict): + validate_openrouter_extra_body(request_extra) + body.update(request_extra) + + body["messages"] = sanitize_native_messages_thinking_policy( + body.get("messages"), + thinking_enabled=thinking_enabled, + ) + if "system" in body: + body["system"] = _normalize_system_prompt_for_openrouter(body["system"]) + body["stream"] = True + if body.get("max_tokens") is None: + body["max_tokens"] = default_max_tokens + + if thinking_enabled: + _apply_openrouter_reasoning_policy(body, thinking_cfg) + + return body diff --git a/core/anthropic/native_sse_block_policy.py b/core/anthropic/native_sse_block_policy.py new file mode 100644 index 0000000000000000000000000000000000000000..7a3fd816a0b4a08b1a49264129a8ee3ca418042f --- /dev/null +++ b/core/anthropic/native_sse_block_policy.py @@ -0,0 +1,313 @@ +"""Shared native Anthropic SSE thinking policy, block remapping, and overlap repair. + +Used by :class:`OpenRouterProvider` and line-mode +:class:`providers.anthropic_messages.AnthropicMessagesTransport` providers. +""" + +from __future__ import annotations + +import copy +import json +from dataclasses import dataclass, field +from typing import Any + +__all__ = [ + "NativeSseBlockPolicyState", + "format_native_sse_event", + "is_terminal_openrouter_done_event", + "parse_native_sse_event", + "transform_native_sse_block_event", +] + + +@dataclass +class _UpstreamBlockState: + """Per-upstream content block: segment index and liveness in the model stream.""" + + block_type: str + down_index: int + open: bool + last_start_block: dict[str, Any] | None = None + + +@dataclass +class NativeSseBlockPolicyState: + """Track per-upstream content blocks and remapped Anthropic ``index`` field.""" + + next_index: int = 0 + by_upstream: dict[int, _UpstreamBlockState] = field(default_factory=dict) + dropped_indexes: set[int] = field(default_factory=set) + pending_suppressed_stops: set[int] = field(default_factory=set) + message_stopped: bool = False + + +def format_native_sse_event(event_name: str | None, data_text: str) -> str: + """Format an SSE event from its event name and data payload.""" + lines: list[str] = [] + if event_name: + lines.append(f"event: {event_name}") + lines.extend(f"data: {line}" for line in data_text.splitlines()) + return "\n".join(lines) + "\n\n" + + +def parse_native_sse_event(event: str) -> tuple[str | None, str]: + """Extract the event name and raw data payload from an SSE event.""" + event_name = None + data_lines: list[str] = [] + for line in event.strip().splitlines(): + if line.startswith("event:"): + event_name = line[6:].strip() + elif line.startswith("data:"): + data_lines.append(line[5:].lstrip()) + return event_name, "\n".join(data_lines) + + +def is_terminal_openrouter_done_event(event_name: str | None, data_text: str) -> bool: + """Return whether an event is OpenAI-style terminal noise (``[DONE]``).""" + return (event_name is None or event_name in {"data", "done"}) and ( + data_text.strip().upper() == "[DONE]" + ) + + +def _delta_type_to_block_kind(delta_type: Any) -> str | None: + """Map a content_block_delta type to a content block kind (text/thinking/tool_use).""" + if not isinstance(delta_type, str): + return None + if delta_type in {"thinking_delta", "signature_delta"}: + return "thinking" + if delta_type == "text_delta": + return "text" + if delta_type == "input_json_delta": + return "tool_use" + return None + + +def _synthetic_start_content_block( + block_kind: str, + *, + upstream_index: int, + stored_tool_block: dict[str, Any] | None = None, +) -> dict[str, Any]: + """Build a `content_block` for a `content_block_start` with empty streaming fields.""" + if block_kind == "tool_use": + if ( + isinstance(stored_tool_block, dict) + and stored_tool_block.get("type") == "tool_use" + ): + tool_id = stored_tool_block.get("id") + name = stored_tool_block.get("name") + inp = stored_tool_block.get("input") + return { + "type": "tool_use", + "id": tool_id + if isinstance(tool_id, str) and tool_id + else f"toolu_or_{upstream_index}", + "name": name if isinstance(name, str) else "", + "input": inp if isinstance(inp, dict) else {}, + } + return { + "type": "tool_use", + "id": f"toolu_or_{upstream_index}", + "name": "", + "input": {}, + } + if block_kind == "thinking": + return {"type": "thinking", "thinking": ""} + if block_kind == "text": + return {"type": "text", "text": ""} + return {"type": "text", "text": ""} + + +def _should_drop_block_type(block_type: Any, *, thinking_enabled: bool) -> bool: + if not isinstance(block_type, str): + return False + if block_type.startswith("redacted_thinking"): + return not thinking_enabled + return not thinking_enabled and "thinking" in block_type + + +def _synthetic_close_other_open_blocks( + state: NativeSseBlockPolicyState, current_upstream: int +) -> str: + """Close every open block except `current_upstream` and track duplicate upstream stops.""" + out: list[str] = [] + for upstream, seg in list(state.by_upstream.items()): + if upstream == current_upstream or not seg.open: + continue + out.append( + format_native_sse_event( + "content_block_stop", + json.dumps( + { + "type": "content_block_stop", + "index": seg.down_index, + } + ), + ) + ) + seg.open = False + state.pending_suppressed_stops.add(upstream) + return "".join(out) + + +def _allocate_new_segment( + state: NativeSseBlockPolicyState, + upstream_index: int, + block_type: str, + *, + last_start_block: dict[str, Any] | None = None, +) -> int: + """Assign a new downstream `index` for a segment and record upstream state.""" + new_idx = state.next_index + state.next_index += 1 + state.by_upstream[upstream_index] = _UpstreamBlockState( + block_type=block_type, + down_index=new_idx, + open=True, + last_start_block=last_start_block, + ) + return new_idx + + +def transform_native_sse_block_event( + event: str, + state: NativeSseBlockPolicyState, + *, + thinking_enabled: bool, +) -> str | None: + """Normalize native Anthropic SSE events and enforce local thinking policy.""" + event_name, data_text = parse_native_sse_event(event) + if not event_name or not data_text: + return event + + try: + payload = json.loads(data_text) + except json.JSONDecodeError: + return event + + if event_name == "content_block_start": + block = payload.get("content_block") + if not isinstance(block, dict): + return event + block_type = block.get("type") + upstream_index = payload.get("index") + if not isinstance(upstream_index, int): + return event + if _should_drop_block_type(block_type, thinking_enabled=thinking_enabled): + state.dropped_indexes.add(upstream_index) + return None + + if not isinstance(block_type, str): + return event + prefix = _synthetic_close_other_open_blocks(state, upstream_index) + stored = copy.deepcopy(block) + new_idx = _allocate_new_segment( + state, + upstream_index, + block_type=block_type, + last_start_block=stored, + ) + payload["index"] = new_idx + return prefix + format_native_sse_event(event_name, json.dumps(payload)) + + if event_name == "content_block_delta": + delta = payload.get("delta") + if not isinstance(delta, dict): + return event + delta_type = delta.get("type") + upstream_index = payload.get("index") + if not isinstance(upstream_index, int): + return event + if upstream_index in state.dropped_indexes: + return None + if _should_drop_block_type(delta_type, thinking_enabled=thinking_enabled): + return None + + block_kind = _delta_type_to_block_kind(delta_type) + if block_kind is None: + return event + + seg = state.by_upstream.get(upstream_index) + if seg and seg.open: + payload["index"] = seg.down_index + return format_native_sse_event(event_name, json.dumps(payload)) + + if seg is not None and not seg.open: + # More deltas for an upstream block after a synthetic (or other) close: + # reopen with a new downstream `index` and emit a synthetic `content_block_start` first. + state.pending_suppressed_stops.discard(upstream_index) + carry = seg.last_start_block + new_idx = _allocate_new_segment( + state, + upstream_index, + block_type=block_kind, + last_start_block=carry, + ) + stored_tool = ( + carry + if isinstance(carry, dict) and carry.get("type") == "tool_use" + else None + ) + start_payload = { + "type": "content_block_start", + "index": new_idx, + "content_block": _synthetic_start_content_block( + block_kind, + upstream_index=upstream_index, + stored_tool_block=stored_tool, + ), + } + prefix = format_native_sse_event( + "content_block_start", json.dumps(start_payload) + ) + payload["index"] = new_idx + return prefix + format_native_sse_event(event_name, json.dumps(payload)) + + # Delta with no prior `content_block_start` in this stream + if block_kind in ("text", "tool_use"): + synthetic_block = _synthetic_start_content_block( + block_kind, + upstream_index=upstream_index, + ) + new_idx = _allocate_new_segment( + state, + upstream_index, + block_type=block_kind, + last_start_block=copy.deepcopy(synthetic_block), + ) + start_payload = { + "type": "content_block_start", + "index": new_idx, + "content_block": synthetic_block, + } + prefix = format_native_sse_event( + "content_block_start", json.dumps(start_payload) + ) + payload["index"] = new_idx + return prefix + format_native_sse_event(event_name, json.dumps(payload)) + # thinking: pass through raw (unusual upstream shape) + return event + + if event_name == "content_block_stop": + upstream_index = payload.get("index") + if not isinstance(upstream_index, int): + return event + if upstream_index in state.dropped_indexes: + return None + if upstream_index in state.pending_suppressed_stops: + state.pending_suppressed_stops.discard(upstream_index) + return None + + seg = state.by_upstream.get(upstream_index) + if seg is not None and seg.open: + payload["index"] = seg.down_index + seg.open = False + return format_native_sse_event(event_name, json.dumps(payload)) + if seg is not None: + # Spurious or duplicate `content_block_stop` for a closed block. + return None + if not thinking_enabled: + return None + return event + + return event diff --git a/core/anthropic/provider_stream_error.py b/core/anthropic/provider_stream_error.py new file mode 100644 index 0000000000000000000000000000000000000000..5534c5016794c0e13911ddd9c78b4b2cf683b909 --- /dev/null +++ b/core/anthropic/provider_stream_error.py @@ -0,0 +1,34 @@ +"""Canonical Anthropic-style SSE sequence for provider-side streaming errors.""" + +from __future__ import annotations + +import uuid +from collections.abc import Iterator +from typing import Any + +from core.anthropic.sse import SSEBuilder + + +def iter_provider_stream_error_sse_events( + *, + request: Any, + input_tokens: int, + error_message: str, + sent_any_event: bool, + log_raw_sse_events: bool, + message_id: str | None = None, +) -> Iterator[str]: + """Yield message_start (if needed), a text block with the error, then message_delta/stop.""" + mid = message_id or f"msg_{uuid.uuid4()}" + model = getattr(request, "model", "") or "" + sse = SSEBuilder( + mid, + model, + input_tokens, + log_raw_events=log_raw_sse_events, + ) + if not sent_any_event: + yield sse.message_start() + yield from sse.emit_error(error_message) + yield sse.message_delta("end_turn", 1) + yield sse.message_stop() diff --git a/core/anthropic/server_tool_sse.py b/core/anthropic/server_tool_sse.py new file mode 100644 index 0000000000000000000000000000000000000000..1c8727d16df102fc17ebb188dc32f345b64e12fd --- /dev/null +++ b/core/anthropic/server_tool_sse.py @@ -0,0 +1,14 @@ +"""SSE content_block ``type`` values for Anthropic web server tools (local handlers). + +Shared by :mod:`api.web_tools` and stream contract tests to avoid drift. +""" + +from __future__ import annotations + +from typing import Final + +SERVER_TOOL_USE: Final = "server_tool_use" +WEB_SEARCH_TOOL_RESULT: Final = "web_search_tool_result" +WEB_FETCH_TOOL_RESULT: Final = "web_fetch_tool_result" +WEB_SEARCH_TOOL_RESULT_ERROR: Final = "web_search_tool_result_error" +WEB_FETCH_TOOL_ERROR: Final = "web_fetch_tool_error" diff --git a/core/anthropic/sse.py b/core/anthropic/sse.py new file mode 100644 index 0000000000000000000000000000000000000000..1a99f6c424929cb3fd499e3c074af092e4fd0ade --- /dev/null +++ b/core/anthropic/sse.py @@ -0,0 +1,416 @@ +"""SSE event builder for Anthropic-format streaming responses.""" + +import hashlib +import json +from collections.abc import Iterator +from dataclasses import dataclass, field + +from loguru import logger + +try: + import tiktoken + + ENCODER = tiktoken.get_encoding("cl100k_base") +except Exception: + ENCODER = None + + +# Standard headers for Anthropic-style ``text/event-stream`` responses from this proxy. +ANTHROPIC_SSE_RESPONSE_HEADERS: dict[str, str] = { + "X-Accel-Buffering": "no", + "Cache-Control": "no-cache", + "Connection": "keep-alive", +} + +STOP_REASON_MAP = { + "stop": "end_turn", + "length": "max_tokens", + "tool_calls": "tool_use", + "content_filter": "end_turn", +} + + +def map_stop_reason(openai_reason: str | None) -> str: + """Map OpenAI finish_reason to Anthropic stop_reason.""" + return ( + STOP_REASON_MAP.get(openai_reason, "end_turn") if openai_reason else "end_turn" + ) + + +def _safe_usage_int(value: object) -> int: + """Coerce streamed usage counters to int; non-integers become 0.""" + return value if isinstance(value, int) else 0 + + +def format_sse_event(event_type: str, data: dict) -> str: + """Format one Anthropic-style SSE event (no logging).""" + return f"event: {event_type}\ndata: {json.dumps(data)}\n\n" + + +@dataclass +class ToolCallState: + """State for a single streaming tool call.""" + + block_index: int + tool_id: str + name: str + contents: list[str] = field(default_factory=list) + started: bool = False + task_arg_buffer: str = "" + task_args_emitted: bool = False + pre_start_args: str = "" + + +@dataclass +class ContentBlockManager: + """Manage content block indices and state.""" + + next_index: int = 0 + thinking_index: int = -1 + text_index: int = -1 + thinking_started: bool = False + text_started: bool = False + tool_states: dict[int, ToolCallState] = field(default_factory=dict) + + def allocate_index(self) -> int: + idx = self.next_index + self.next_index += 1 + return idx + + def ensure_tool_state(self, index: int) -> ToolCallState: + """Create tool stream state for ``index`` when the first tool delta arrives.""" + if index not in self.tool_states: + self.tool_states[index] = ToolCallState(block_index=-1, tool_id="", name="") + return self.tool_states[index] + + def set_stream_tool_id(self, index: int, tool_id: str | None) -> None: + """Record OpenAI tool call id before ``content_block_start`` (split-stream providers).""" + if not tool_id: + return + state = self.ensure_tool_state(index) + state.tool_id = str(tool_id) + + def register_tool_name(self, index: int, name: str) -> None: + """Record tool name fragments as they arrive from chunked OpenAI streams. + + Names may be split across deltas; later chunks can extend (``ab`` + ``c``) + or repeat prefixes, so we merge conservatively. + """ + if index not in self.tool_states: + self.tool_states[index] = ToolCallState( + block_index=-1, tool_id="", name=name + ) + return + state = self.tool_states[index] + prev = state.name + if not prev or name.startswith(prev): + state.name = name + elif not prev.startswith(name): + state.name = prev + name + + def buffer_task_args(self, index: int, args: str) -> dict | None: + state = self.tool_states.get(index) + if state is None or state.task_args_emitted: + return None + + state.task_arg_buffer += args + try: + args_json = json.loads(state.task_arg_buffer) + except Exception: + return None + + _normalize_task_run_in_background(args_json) + + state.task_args_emitted = True + state.task_arg_buffer = "" + return args_json + + def has_emitted_tool_block(self) -> bool: + """True when native OpenAI tool streaming has started a ``tool_use`` block.""" + return any(s.started for s in self.tool_states.values()) + + def flush_task_arg_buffers(self) -> list[tuple[int, str]]: + results: list[tuple[int, str]] = [] + for tool_index, state in list(self.tool_states.items()): + if not state.task_arg_buffer or state.task_args_emitted: + continue + + out = "{}" + try: + args_json = json.loads(state.task_arg_buffer) + _normalize_task_run_in_background(args_json) + out = json.dumps(args_json) + except (json.JSONDecodeError, TypeError, ValueError) as e: + digest = hashlib.sha256( + state.task_arg_buffer.encode("utf-8", errors="replace") + ).hexdigest()[:16] + logger.warning( + "Task args invalid JSON (id={} len={} buffer_sha256_prefix={}): {}", + state.tool_id or "unknown", + len(state.task_arg_buffer), + digest, + e, + ) + + state.task_args_emitted = True + state.task_arg_buffer = "" + results.append((tool_index, out)) + return results + + +def _normalize_task_run_in_background(args_json: dict) -> None: + """Force Claude Code Task subagents to run in foreground (single shared rule).""" + if args_json.get("run_in_background") is not False: + args_json["run_in_background"] = False + + +class SSEBuilder: + """Builder for Anthropic SSE streaming events.""" + + def __init__( + self, + message_id: str, + model: str, + input_tokens: int = 0, + *, + log_raw_events: bool = False, + ): + self.message_id = message_id + self.model = model + self.input_tokens = input_tokens + self._log_raw_events = log_raw_events + self.blocks = ContentBlockManager() + self._accumulated_text_parts: list[str] = [] + self._accumulated_reasoning_parts: list[str] = [] + + def _format_event(self, event_type: str, data: dict) -> str: + event_str = format_sse_event(event_type, data) + if self._log_raw_events: + logger.debug("SSE_EVENT: {} - {}", event_type, event_str.strip()) + else: + logger.debug( + "SSE_EVENT: event_type={} serialized_bytes={}", + event_type, + len(event_str.encode("utf-8")), + ) + return event_str + + def message_start(self) -> str: + safe_input = _safe_usage_int(self.input_tokens) + usage = {"input_tokens": safe_input, "output_tokens": 1} + return self._format_event( + "message_start", + { + "type": "message_start", + "message": { + "id": self.message_id, + "type": "message", + "role": "assistant", + "content": [], + "model": self.model, + "stop_reason": None, + "stop_sequence": None, + "usage": usage, + }, + }, + ) + + def message_delta(self, stop_reason: str, output_tokens: int | None) -> str: + safe_in = _safe_usage_int(self.input_tokens) + safe_out = output_tokens if isinstance(output_tokens, int) else 0 + return self._format_event( + "message_delta", + { + "type": "message_delta", + "delta": {"stop_reason": stop_reason, "stop_sequence": None}, + "usage": { + "input_tokens": safe_in, + "output_tokens": safe_out, + }, + }, + ) + + def message_stop(self) -> str: + return self._format_event("message_stop", {"type": "message_stop"}) + + def content_block_start(self, index: int, block_type: str, **kwargs) -> str: + content_block: dict = {"type": block_type} + if block_type == "thinking": + content_block["thinking"] = kwargs.get("thinking", "") + elif block_type == "text": + content_block["text"] = kwargs.get("text", "") + elif block_type == "tool_use": + content_block["id"] = kwargs.get("id", "") + content_block["name"] = kwargs.get("name", "") + content_block["input"] = kwargs.get("input", {}) + + return self._format_event( + "content_block_start", + { + "type": "content_block_start", + "index": index, + "content_block": content_block, + }, + ) + + def content_block_delta(self, index: int, delta_type: str, content: str) -> str: + delta: dict = {"type": delta_type} + if delta_type == "thinking_delta": + delta["thinking"] = content + elif delta_type == "text_delta": + delta["text"] = content + elif delta_type == "input_json_delta": + delta["partial_json"] = content + + return self._format_event( + "content_block_delta", + { + "type": "content_block_delta", + "index": index, + "delta": delta, + }, + ) + + def content_block_stop(self, index: int) -> str: + return self._format_event( + "content_block_stop", + { + "type": "content_block_stop", + "index": index, + }, + ) + + def start_thinking_block(self) -> str: + self.blocks.thinking_index = self.blocks.allocate_index() + self.blocks.thinking_started = True + return self.content_block_start(self.blocks.thinking_index, "thinking") + + def emit_thinking_delta(self, content: str) -> str: + self._accumulated_reasoning_parts.append(content) + return self.content_block_delta( + self.blocks.thinking_index, "thinking_delta", content + ) + + def stop_thinking_block(self) -> str: + self.blocks.thinking_started = False + return self.content_block_stop(self.blocks.thinking_index) + + def start_text_block(self) -> str: + self.blocks.text_index = self.blocks.allocate_index() + self.blocks.text_started = True + return self.content_block_start(self.blocks.text_index, "text") + + def emit_text_delta(self, content: str) -> str: + self._accumulated_text_parts.append(content) + return self.content_block_delta(self.blocks.text_index, "text_delta", content) + + def stop_text_block(self) -> str: + self.blocks.text_started = False + return self.content_block_stop(self.blocks.text_index) + + def start_tool_block(self, tool_index: int, tool_id: str, name: str) -> str: + block_idx = self.blocks.allocate_index() + if tool_index in self.blocks.tool_states: + state = self.blocks.tool_states[tool_index] + state.block_index = block_idx + state.tool_id = tool_id + state.started = True + else: + self.blocks.tool_states[tool_index] = ToolCallState( + block_index=block_idx, + tool_id=tool_id, + name=name, + started=True, + ) + return self.content_block_start(block_idx, "tool_use", id=tool_id, name=name) + + def emit_tool_delta(self, tool_index: int, partial_json: str) -> str: + state = self.blocks.tool_states[tool_index] + state.contents.append(partial_json) + return self.content_block_delta( + state.block_index, "input_json_delta", partial_json + ) + + def stop_tool_block(self, tool_index: int) -> str: + block_idx = self.blocks.tool_states[tool_index].block_index + return self.content_block_stop(block_idx) + + def ensure_thinking_block(self) -> Iterator[str]: + if self.blocks.text_started: + yield self.stop_text_block() + if not self.blocks.thinking_started: + yield self.start_thinking_block() + + def ensure_text_block(self) -> Iterator[str]: + if self.blocks.thinking_started: + yield self.stop_thinking_block() + if not self.blocks.text_started: + yield self.start_text_block() + + def close_content_blocks(self) -> Iterator[str]: + if self.blocks.thinking_started: + yield self.stop_thinking_block() + if self.blocks.text_started: + yield self.stop_text_block() + + def close_all_blocks(self) -> Iterator[str]: + yield from self.close_content_blocks() + for tool_index, state in list(self.blocks.tool_states.items()): + if state.started: + yield self.stop_tool_block(tool_index) + + def emit_error(self, error_message: str) -> Iterator[str]: + error_index = self.blocks.allocate_index() + yield self.content_block_start(error_index, "text") + yield self.content_block_delta(error_index, "text_delta", error_message) + yield self.content_block_stop(error_index) + + def emit_top_level_error(self, error_message: str) -> str: + """Emit a top-level ``event: error`` (not assistant text) for transport failures.""" + return self._format_event( + "error", + { + "type": "error", + "error": { + "type": "api_error", + "message": error_message, + }, + }, + ) + + @property + def accumulated_text(self) -> str: + return "".join(self._accumulated_text_parts) + + @property + def accumulated_reasoning(self) -> str: + return "".join(self._accumulated_reasoning_parts) + + def estimate_output_tokens(self) -> int: + accumulated_text = self.accumulated_text + accumulated_reasoning = self.accumulated_reasoning + if ENCODER: + text_tokens = len(ENCODER.encode(accumulated_text)) + reasoning_tokens = len(ENCODER.encode(accumulated_reasoning)) + tool_tokens = 0 + started_tool_count = 0 + for state in self.blocks.tool_states.values(): + tool_tokens += len(ENCODER.encode(state.name)) + tool_tokens += len(ENCODER.encode("".join(state.contents))) + tool_tokens += 15 + if state.started: + started_tool_count += 1 + + block_count = ( + (1 if accumulated_reasoning else 0) + + (1 if accumulated_text else 0) + + started_tool_count + ) + return text_tokens + reasoning_tokens + tool_tokens + (block_count * 4) + + text_tokens = len(accumulated_text) // 4 + reasoning_tokens = len(accumulated_reasoning) // 4 + tool_tokens = ( + sum(1 for state in self.blocks.tool_states.values() if state.started) * 50 + ) + return text_tokens + reasoning_tokens + tool_tokens diff --git a/core/anthropic/stream_contracts.py b/core/anthropic/stream_contracts.py new file mode 100644 index 0000000000000000000000000000000000000000..ba2b4d81e107e9e166bb51d9dc78c6c6a7afb1bc --- /dev/null +++ b/core/anthropic/stream_contracts.py @@ -0,0 +1,205 @@ +"""Neutral SSE parsing and Anthropic stream shape assertions. + +Used by default CI contract tests and by opt-in live smoke scenarios. +""" + +from __future__ import annotations + +import json +from collections.abc import Iterable +from dataclasses import dataclass +from typing import Any + +from .server_tool_sse import ( + SERVER_TOOL_USE, + WEB_FETCH_TOOL_RESULT, + WEB_SEARCH_TOOL_RESULT, +) + +# Content blocks that only use content_block_start/stop (no deltas), including +# Anthropic server tools and eager text emitted in a single start event. +_NO_DELTA_BLOCK_KINDS = frozenset( + { + SERVER_TOOL_USE, + WEB_SEARCH_TOOL_RESULT, + WEB_FETCH_TOOL_RESULT, + "text_eager", + "redacted_thinking", + } +) + +_ALLOWED_BLOCK_START_TYPES = frozenset( + { + "text", + "thinking", + "tool_use", + "redacted_thinking", + SERVER_TOOL_USE, + WEB_SEARCH_TOOL_RESULT, + WEB_FETCH_TOOL_RESULT, + } +) + + +@dataclass(frozen=True, slots=True) +class SSEEvent: + event: str + data: dict[str, Any] + raw: str + + +def parse_sse_lines(lines: Iterable[str]) -> list[SSEEvent]: + events: list[SSEEvent] = [] + current_event = "" + data_parts: list[str] = [] + raw_parts: list[str] = [] + + for line in lines: + stripped = line.rstrip("\r\n") + if stripped == "": + _append_event(events, current_event, data_parts, raw_parts) + current_event = "" + data_parts = [] + raw_parts = [] + continue + raw_parts.append(stripped) + if stripped.startswith("event:"): + current_event = stripped.split(":", 1)[1].strip() + elif stripped.startswith("data:"): + data_parts.append(stripped.split(":", 1)[1].strip()) + + _append_event(events, current_event, data_parts, raw_parts) + return events + + +def parse_sse_text(text: str) -> list[SSEEvent]: + return parse_sse_lines(text.splitlines()) + + +def _append_event( + events: list[SSEEvent], + current_event: str, + data_parts: list[str], + raw_parts: list[str], +) -> None: + if not current_event and not data_parts: + return + data_text = "\n".join(data_parts) + data: dict[str, Any] + try: + parsed = json.loads(data_text) if data_text else {} + data = parsed if isinstance(parsed, dict) else {"value": parsed} + except json.JSONDecodeError: + data = {"raw": data_text} + events.append(SSEEvent(current_event, data, "\n".join(raw_parts))) + + +def assert_anthropic_stream_contract( + events: list[SSEEvent], *, allow_error: bool = False +) -> None: + """Check minimal Anthropic-style SSE invariants: start/stop, block nesting. + + Does *not* assert strict event ordering (e.g. :class:`message_delta` vs + content blocks) beyond presence of a final ``message_stop``; stricter + ordering can be tested in product or transport-specific suites. + """ + assert events, "stream produced no SSE events" + event_names = [event.event for event in events] + assert "message_start" in event_names, event_names + assert event_names[-1] == "message_stop", event_names + + open_blocks: dict[int, str] = {} + seen_blocks: set[int] = set() + for event in events: + if event.event == "error" and not allow_error: + raise AssertionError(f"unexpected SSE error event: {event.data}") + + if event.event == "content_block_start": + index = event_index(event) + block = event.data.get("content_block", {}) + assert isinstance(block, dict), event.data + block_type = str(block.get("type", "")) + assert block_type in _ALLOWED_BLOCK_START_TYPES, event.data + assert index not in open_blocks, f"block {index} started twice" + assert index not in seen_blocks, f"block {index} reused after stop" + if block_type == "text" and str(block.get("text", "")).strip(): + storage = "text_eager" + else: + storage = block_type + open_blocks[index] = storage + seen_blocks.add(index) + continue + + if event.event == "content_block_delta": + index = event_index(event) + assert index in open_blocks, f"delta for unopened block {index}" + kind = open_blocks[index] + assert kind not in _NO_DELTA_BLOCK_KINDS, ( + f"unexpected delta for start/stop-only block {kind} at index {index}" + ) + delta = event.data.get("delta", {}) + assert isinstance(delta, dict), event.data + delta_type = str(delta.get("type", "")) + if kind == "thinking": + assert delta_type in ( + "thinking_delta", + "signature_delta", + ), f"block {index} is {kind}, got {delta_type}" + continue + expected = { + "text": "text_delta", + "tool_use": "input_json_delta", + }[kind] + assert delta_type == expected, f"block {index} is {kind}, got {delta_type}" + continue + + if event.event == "content_block_stop": + index = event_index(event) + assert index in open_blocks, f"stop for unopened block {index}" + open_blocks.pop(index) + + assert not open_blocks, f"unclosed blocks: {open_blocks}" + assert seen_blocks, "stream did not emit any content blocks" + + +def event_names(events: list[SSEEvent]) -> list[str]: + return [event.event for event in events] + + +def text_content(events: list[SSEEvent]) -> str: + parts: list[str] = [] + for event in events: + if event.event == "content_block_start": + block = event.data.get("content_block", {}) + if isinstance(block, dict) and block.get("type") == "text": + eager = str(block.get("text", "")) + if eager: + parts.append(eager) + delta = event.data.get("delta", {}) + if isinstance(delta, dict) and delta.get("type") == "text_delta": + parts.append(str(delta.get("text", ""))) + return "".join(parts) + + +def thinking_content(events: list[SSEEvent]) -> str: + parts: list[str] = [] + for event in events: + delta = event.data.get("delta", {}) + if isinstance(delta, dict) and delta.get("type") == "thinking_delta": + parts.append(str(delta.get("thinking", ""))) + return "".join(parts) + + +def has_tool_use(events: list[SSEEvent]) -> bool: + for event in events: + block = event.data.get("content_block", {}) + if isinstance(block, dict) and block.get("type") == "tool_use": + return True + return False + + +def event_index(event: SSEEvent) -> int: + """Return the content block ``index`` field from an SSE payload (strict).""" + value = event.data.get("index") + assert isinstance(value, int), event.data + return value diff --git a/core/anthropic/thinking.py b/core/anthropic/thinking.py new file mode 100644 index 0000000000000000000000000000000000000000..fa793d1bfe44dfb39d775c01b62fcb2778d96c6b --- /dev/null +++ b/core/anthropic/thinking.py @@ -0,0 +1,140 @@ +"""Streaming parser for provider-emitted thinking tags.""" + +from collections.abc import Iterator +from dataclasses import dataclass +from enum import Enum + + +class ContentType(Enum): + """Type of content chunk.""" + + TEXT = "text" + THINKING = "thinking" + + +@dataclass +class ContentChunk: + """A chunk of parsed content.""" + + type: ContentType + content: str + + +class ThinkTagParser: + """ + Streaming parser for ``...`` tags. + + Handles partial tags at chunk boundaries by buffering. + """ + + OPEN_TAG = "" + CLOSE_TAG = "" + + def __init__(self): + self._buffer: str = "" + self._in_think_tag: bool = False + + @property + def in_think_mode(self) -> bool: + """Whether currently inside a think tag.""" + return self._in_think_tag + + def feed(self, content: str) -> Iterator[ContentChunk]: + """Feed content and yield parsed chunks.""" + self._buffer += content + + while self._buffer: + prev_len = len(self._buffer) + if not self._in_think_tag: + chunk = self._parse_outside_think() + else: + chunk = self._parse_inside_think() + + if chunk: + yield chunk + elif len(self._buffer) == prev_len: + break + + def _parse_outside_think(self) -> ContentChunk | None: + """Parse content outside think tags.""" + think_start = self._buffer.find(self.OPEN_TAG) + orphan_close = self._buffer.find(self.CLOSE_TAG) + + if orphan_close != -1 and (think_start == -1 or orphan_close < think_start): + pre_orphan = self._buffer[:orphan_close] + self._buffer = self._buffer[orphan_close + len(self.CLOSE_TAG) :] + if pre_orphan: + return ContentChunk(ContentType.TEXT, pre_orphan) + return None + + if think_start == -1: + last_bracket = self._buffer.rfind("<") + if last_bracket != -1: + potential_tag = self._buffer[last_bracket:] + tag_len = len(potential_tag) + if ( + tag_len < len(self.OPEN_TAG) + and self.OPEN_TAG.startswith(potential_tag) + ) or ( + tag_len < len(self.CLOSE_TAG) + and self.CLOSE_TAG.startswith(potential_tag) + ): + emit = self._buffer[:last_bracket] + self._buffer = self._buffer[last_bracket:] + if emit: + return ContentChunk(ContentType.TEXT, emit) + return None + + emit = self._buffer + self._buffer = "" + if emit: + return ContentChunk(ContentType.TEXT, emit) + return None + + pre_think = self._buffer[:think_start] + self._buffer = self._buffer[think_start + len(self.OPEN_TAG) :] + self._in_think_tag = True + if pre_think: + return ContentChunk(ContentType.TEXT, pre_think) + return None + + def _parse_inside_think(self) -> ContentChunk | None: + """Parse content inside think tags.""" + think_end = self._buffer.find(self.CLOSE_TAG) + + if think_end == -1: + last_bracket = self._buffer.rfind("<") + if last_bracket != -1 and len(self._buffer) - last_bracket < len( + self.CLOSE_TAG + ): + potential_tag = self._buffer[last_bracket:] + if self.CLOSE_TAG.startswith(potential_tag): + emit = self._buffer[:last_bracket] + self._buffer = self._buffer[last_bracket:] + if emit: + return ContentChunk(ContentType.THINKING, emit) + return None + + emit = self._buffer + self._buffer = "" + if emit: + return ContentChunk(ContentType.THINKING, emit) + return None + + thinking_content = self._buffer[:think_end] + self._buffer = self._buffer[think_end + len(self.CLOSE_TAG) :] + self._in_think_tag = False + if thinking_content: + return ContentChunk(ContentType.THINKING, thinking_content) + return None + + def flush(self) -> ContentChunk | None: + """Flush any remaining buffered content.""" + if self._buffer: + chunk_type = ( + ContentType.THINKING if self._in_think_tag else ContentType.TEXT + ) + content = self._buffer + self._buffer = "" + return ContentChunk(chunk_type, content) + return None diff --git a/core/anthropic/tokens.py b/core/anthropic/tokens.py new file mode 100644 index 0000000000000000000000000000000000000000..081d28b5f9100a64aaa0e260c9b5313b9f89be18 --- /dev/null +++ b/core/anthropic/tokens.py @@ -0,0 +1,113 @@ +"""Token estimation for Anthropic-compatible requests.""" + +import json + +import tiktoken +from loguru import logger + +from .content import get_block_attr + +ENCODER = tiktoken.get_encoding("cl100k_base") + + +def get_token_count( + messages: list, + system: str | list | None = None, + tools: list | None = None, +) -> int: + """Estimate token count for a request.""" + total_tokens = 0 + + if system: + if isinstance(system, str): + total_tokens += len(ENCODER.encode(system)) + elif isinstance(system, list): + for block in system: + text = get_block_attr(block, "text", "") + if text: + total_tokens += len(ENCODER.encode(str(text))) + total_tokens += 4 + + for msg in messages: + if isinstance(msg.content, str): + total_tokens += len(ENCODER.encode(msg.content)) + elif isinstance(msg.content, list): + for block in msg.content: + b_type = get_block_attr(block, "type") or None + + if b_type == "text": + text = get_block_attr(block, "text", "") + total_tokens += len(ENCODER.encode(str(text))) + elif b_type == "thinking": + thinking = get_block_attr(block, "thinking", "") + total_tokens += len(ENCODER.encode(str(thinking))) + elif b_type == "tool_use": + name = get_block_attr(block, "name", "") + inp = get_block_attr(block, "input", {}) + block_id = get_block_attr(block, "id", "") + total_tokens += len(ENCODER.encode(str(name))) + total_tokens += len(ENCODER.encode(json.dumps(inp))) + total_tokens += len(ENCODER.encode(str(block_id))) + total_tokens += 15 + elif b_type == "image": + source = get_block_attr(block, "source") + if isinstance(source, dict): + data = source.get("data") or source.get("base64") or "" + if data: + total_tokens += max(85, len(data) // 3000) + else: + total_tokens += 765 + else: + total_tokens += 765 + elif b_type == "tool_result": + content = get_block_attr(block, "content", "") + tool_use_id = get_block_attr(block, "tool_use_id", "") + if isinstance(content, str): + total_tokens += len(ENCODER.encode(content)) + else: + total_tokens += len(ENCODER.encode(json.dumps(content))) + total_tokens += len(ENCODER.encode(str(tool_use_id))) + total_tokens += 8 + elif b_type in ( + "server_tool_use", + "web_search_tool_result", + "web_fetch_tool_result", + ): + if hasattr(block, "model_dump"): + blob: object = block.model_dump() + else: + blob = block + try: + total_tokens += len( + ENCODER.encode( + json.dumps(blob, default=str, ensure_ascii=False) + ) + ) + except (TypeError, ValueError, OverflowError) as e: + logger.debug( + "Block encode fallback b_type={} err={}", b_type, e + ) + total_tokens += len(ENCODER.encode(str(blob))) + total_tokens += 12 + else: + logger.debug( + "Unexpected block type %r, falling back to json/str encoding", + b_type, + ) + try: + total_tokens += len(ENCODER.encode(json.dumps(block))) + except TypeError, ValueError: + total_tokens += len(ENCODER.encode(str(block))) + + if tools: + for tool in tools: + tool_str = ( + tool.name + (tool.description or "") + json.dumps(tool.input_schema) + ) + total_tokens += len(ENCODER.encode(tool_str)) + + total_tokens += len(messages) * 4 + if tools: + total_tokens += len(tools) * 5 + + return max(1, total_tokens) diff --git a/core/anthropic/tools.py b/core/anthropic/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..c09beb548f90ba5be86dcbdffa59d706861b420a --- /dev/null +++ b/core/anthropic/tools.py @@ -0,0 +1,212 @@ +"""Heuristic parser for text-emitted tool calls.""" + +import json +import re +import uuid +from enum import Enum +from typing import Any + +from loguru import logger + +_CONTROL_TOKEN_RE = re.compile(r"<\|[^|>]{1,80}\|>") +_CONTROL_TOKEN_START = "<|" +_CONTROL_TOKEN_END = "|>" + + +class ParserState(Enum): + TEXT = 1 + MATCHING_FUNCTION = 2 + PARSING_PARAMETERS = 3 + + +class HeuristicToolParser: + """ + Stateful parser for raw text tool calls. + + Some OpenAI-compatible models emit tool calls as text rather than structured + chunks. This parser converts the common ``● `` form into + Anthropic-style ``tool_use`` blocks. + """ + + _FUNC_START_PATTERN = re.compile(r"●\s*]+)>") + _PARAM_PATTERN = re.compile( + r"]+)>(.*?)(?:|$)", re.DOTALL + ) + _WEB_TOOL_JSON_PATTERN = re.compile( + r"(?is)\b(?:use\s+)?(?PWebFetch|WebSearch)\b.*?(?P\{.*?\})" + ) + + def __init__(self): + self._state = ParserState.TEXT + self._buffer = "" + self._current_tool_id = None + self._current_function_name = None + self._current_parameters = {} + + def _extract_web_tool_json_calls(self) -> tuple[str, list[dict[str, Any]]]: + detected_tools: list[dict[str, Any]] = [] + + for match in self._WEB_TOOL_JSON_PATTERN.finditer(self._buffer): + try: + tool_input = json.loads(match.group("json")) + except json.JSONDecodeError: + continue + if not isinstance(tool_input, dict): + continue + + tool_name = match.group("tool") + if tool_name == "WebFetch" and "url" not in tool_input: + continue + if tool_name == "WebSearch" and "query" not in tool_input: + continue + + detected_tools.append( + { + "type": "tool_use", + "id": f"toolu_heuristic_{uuid.uuid4().hex[:8]}", + "name": tool_name, + "input": tool_input, + } + ) + logger.debug( + "Heuristic bypass: Detected JSON-style tool call '{}'", + tool_name, + ) + + if not detected_tools: + return self._buffer, [] + + return "", detected_tools + + def _strip_control_tokens(self, text: str) -> str: + return _CONTROL_TOKEN_RE.sub("", text) + + def _split_incomplete_control_token_tail(self) -> str: + start = self._buffer.rfind(_CONTROL_TOKEN_START) + if start == -1: + return "" + end = self._buffer.find(_CONTROL_TOKEN_END, start) + if end != -1: + return "" + + prefix = self._buffer[:start] + self._buffer = self._buffer[start:] + return prefix + + def feed(self, text: str) -> tuple[str, list[dict[str, Any]]]: + """Feed text and return safe text plus detected tool calls.""" + self._buffer += text + self._buffer = self._strip_control_tokens(self._buffer) + self._buffer, detected_tools = self._extract_web_tool_json_calls() + filtered_output_parts: list[str] = [] + + while True: + if self._state == ParserState.TEXT: + if "●" in self._buffer: + idx = self._buffer.find("●") + filtered_output_parts.append(self._buffer[:idx]) + self._buffer = self._buffer[idx:] + self._state = ParserState.MATCHING_FUNCTION + else: + safe_prefix = self._split_incomplete_control_token_tail() + if safe_prefix: + filtered_output_parts.append(safe_prefix) + break + + filtered_output_parts.append(self._buffer) + self._buffer = "" + break + + if self._state == ParserState.MATCHING_FUNCTION: + match = self._FUNC_START_PATTERN.search(self._buffer) + if match: + self._current_function_name = match.group(1).strip() + self._current_tool_id = f"toolu_heuristic_{uuid.uuid4().hex[:8]}" + self._current_parameters = {} + self._buffer = self._buffer[match.end() :] + self._state = ParserState.PARSING_PARAMETERS + logger.debug( + "Heuristic bypass: Detected start of tool call '{}'", + self._current_function_name, + ) + elif len(self._buffer) > 100: + filtered_output_parts.append(self._buffer[0]) + self._buffer = self._buffer[1:] + self._state = ParserState.TEXT + else: + break + + if self._state == ParserState.PARSING_PARAMETERS: + finished_tool_call = False + + while True: + param_match = self._PARAM_PATTERN.search(self._buffer) + if param_match and "" in param_match.group(0): + pre_match_text = self._buffer[: param_match.start()] + if pre_match_text: + filtered_output_parts.append(pre_match_text) + + key = param_match.group(1).strip() + val = param_match.group(2).strip() + self._current_parameters[key] = val + self._buffer = self._buffer[param_match.end() :] + else: + break + + if "●" in self._buffer: + idx = self._buffer.find("●") + if idx > 0: + filtered_output_parts.append(self._buffer[:idx]) + self._buffer = self._buffer[idx:] + finished_tool_call = True + elif len(self._buffer) > 0 and not self._buffer.strip().startswith("<"): + if " list[dict[str, Any]]: + """Flush any remaining tool call in the buffer.""" + self._buffer = self._strip_control_tokens(self._buffer) + detected_tools = [] + if self._state == ParserState.PARSING_PARAMETERS: + partial_matches = re.finditer( + r"]+)>(.*)$", self._buffer, re.DOTALL + ) + for match in partial_matches: + key = match.group(1).strip() + val = match.group(2).strip() + self._current_parameters[key] = val + + detected_tools.append( + { + "type": "tool_use", + "id": self._current_tool_id, + "name": self._current_function_name, + "input": self._current_parameters, + } + ) + self._state = ParserState.TEXT + self._buffer = "" + + return detected_tools diff --git a/core/anthropic/utils.py b/core/anthropic/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..84d33de82ae44d3d193eeba0098f953e0653abc4 --- /dev/null +++ b/core/anthropic/utils.py @@ -0,0 +1,9 @@ +"""Small shared protocol utility helpers.""" + +from typing import Any + + +def set_if_not_none(body: dict[str, Any], key: str, value: Any) -> None: + """Set ``body[key]`` only when value is not None.""" + if value is not None: + body[key] = value diff --git a/core/rate_limit.py b/core/rate_limit.py new file mode 100644 index 0000000000000000000000000000000000000000..8d9bd81c06d75b62906657bb09d74389d8900c06 --- /dev/null +++ b/core/rate_limit.py @@ -0,0 +1,60 @@ +"""Shared strict sliding-window rate limiting primitives.""" + +from __future__ import annotations + +import asyncio +import time +from collections import deque + + +class StrictSlidingWindowLimiter: + """Strict sliding window limiter. + + Guarantees: at most ``rate_limit`` acquisitions in any interval of length + ``rate_window`` (seconds). + + Implemented as an async context manager so call sites can do:: + + async with limiter: + ... + """ + + def __init__(self, rate_limit: int, rate_window: float) -> None: + if rate_limit <= 0: + raise ValueError("rate_limit must be > 0") + if rate_window <= 0: + raise ValueError("rate_window must be > 0") + + self._rate_limit = int(rate_limit) + self._rate_window = float(rate_window) + self._times: deque[float] = deque() + self._lock = asyncio.Lock() + + async def acquire(self) -> None: + while True: + wait_time = 0.0 + async with self._lock: + now = time.monotonic() + cutoff = now - self._rate_window + + while self._times and self._times[0] <= cutoff: + self._times.popleft() + + if len(self._times) < self._rate_limit: + self._times.append(now) + return + + oldest = self._times[0] + wait_time = max(0.0, (oldest + self._rate_window) - now) + + if wait_time > 0: + await asyncio.sleep(wait_time) + else: + await asyncio.sleep(0) + + async def __aenter__(self) -> StrictSlidingWindowLimiter: + await self.acquire() + return self + + async def __aexit__(self, exc_type, exc, tb) -> bool: + return False diff --git a/messaging/__init__.py b/messaging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4aaa736ef6a66c1e0c7114d51ea630dc77754408 --- /dev/null +++ b/messaging/__init__.py @@ -0,0 +1,23 @@ +"""Platform-agnostic messaging layer.""" + +from .event_parser import parse_cli_event +from .handler import ClaudeMessageHandler +from .models import IncomingMessage +from .platforms.base import CLISession, MessagingPlatform, SessionManagerInterface +from .session import SessionStore +from .trees.data import MessageNode, MessageState, MessageTree +from .trees.queue_manager import TreeQueueManager + +__all__ = [ + "CLISession", + "ClaudeMessageHandler", + "IncomingMessage", + "MessageNode", + "MessageState", + "MessageTree", + "MessagingPlatform", + "SessionManagerInterface", + "SessionStore", + "TreeQueueManager", + "parse_cli_event", +] diff --git a/messaging/__pycache__/__init__.cpython-314.pyc b/messaging/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0d9f02d3f87ecad686fade1eda578c88ed9eabc Binary files /dev/null and b/messaging/__pycache__/__init__.cpython-314.pyc differ diff --git a/messaging/__pycache__/cli_event_constants.cpython-314.pyc b/messaging/__pycache__/cli_event_constants.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5af10c1ca2aa3953e0201a9d7d234dc2b84625bb Binary files /dev/null and b/messaging/__pycache__/cli_event_constants.cpython-314.pyc differ diff --git a/messaging/__pycache__/command_dispatcher.cpython-314.pyc b/messaging/__pycache__/command_dispatcher.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..59df396e9082f9e912d9a519c5816328055834dd Binary files /dev/null and b/messaging/__pycache__/command_dispatcher.cpython-314.pyc differ diff --git a/messaging/__pycache__/commands.cpython-314.pyc b/messaging/__pycache__/commands.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a11981bc6f9057df0fed791f7075909768b0fc5a Binary files /dev/null and b/messaging/__pycache__/commands.cpython-314.pyc differ diff --git a/messaging/__pycache__/event_parser.cpython-314.pyc b/messaging/__pycache__/event_parser.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84c85485dd3fc6d990447a74ccd72da7d8d2bd69 Binary files /dev/null and b/messaging/__pycache__/event_parser.cpython-314.pyc differ diff --git a/messaging/__pycache__/handler.cpython-314.pyc b/messaging/__pycache__/handler.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46df4e829bbdb0e90c2859f18afe99fbfff32319 Binary files /dev/null and b/messaging/__pycache__/handler.cpython-314.pyc differ diff --git a/messaging/__pycache__/limiter.cpython-314.pyc b/messaging/__pycache__/limiter.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c3a249d99971aaa9fb8ad69ff0ead0acc00d8d2 Binary files /dev/null and b/messaging/__pycache__/limiter.cpython-314.pyc differ diff --git a/messaging/__pycache__/models.cpython-314.pyc b/messaging/__pycache__/models.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..708bd5e786a6f664106398e5bf45f369e2ad4cfa Binary files /dev/null and b/messaging/__pycache__/models.cpython-314.pyc differ diff --git a/messaging/__pycache__/node_event_pipeline.cpython-314.pyc b/messaging/__pycache__/node_event_pipeline.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ac3d632da3fb9c1bf324e973b59e35a3f105eb0 Binary files /dev/null and b/messaging/__pycache__/node_event_pipeline.cpython-314.pyc differ diff --git a/messaging/__pycache__/safe_diagnostics.cpython-314.pyc b/messaging/__pycache__/safe_diagnostics.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9602f140a20fcb9d602e456412fd20808ad829bf Binary files /dev/null and b/messaging/__pycache__/safe_diagnostics.cpython-314.pyc differ diff --git a/messaging/__pycache__/session.cpython-314.pyc b/messaging/__pycache__/session.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b3f632ed4b55f0c2bccd863fec119e8e3d79fc1 Binary files /dev/null and b/messaging/__pycache__/session.cpython-314.pyc differ diff --git a/messaging/__pycache__/transcript.cpython-314.pyc b/messaging/__pycache__/transcript.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c906880516fcad3bdba0f410d1a94cfb13d7c345 Binary files /dev/null and b/messaging/__pycache__/transcript.cpython-314.pyc differ diff --git a/messaging/__pycache__/ui_updates.cpython-314.pyc b/messaging/__pycache__/ui_updates.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45257be926c9775b859ea6c71111bf5c332335b8 Binary files /dev/null and b/messaging/__pycache__/ui_updates.cpython-314.pyc differ diff --git a/messaging/cli_event_constants.py b/messaging/cli_event_constants.py new file mode 100644 index 0000000000000000000000000000000000000000..3d531ec5c417b17ceb211352fff192b9b623fca7 --- /dev/null +++ b/messaging/cli_event_constants.py @@ -0,0 +1,67 @@ +"""CLI event types and status-line mapping for transcript / UI updates.""" + +from collections.abc import Callable +from typing import Any + +# Status message prefixes used to filter our own messages (ignore echo) +STATUS_MESSAGE_PREFIXES = ( + "⏳", + "💭", + "🔧", + "✅", + "❌", + "🚀", + "🤖", + "📋", + "📊", + "🔄", +) + +# Event types that update the transcript (frozenset for O(1) membership) +TRANSCRIPT_EVENT_TYPES = frozenset( + { + "thinking_start", + "thinking_delta", + "thinking_chunk", + "thinking_stop", + "text_start", + "text_delta", + "text_chunk", + "text_stop", + "tool_use_start", + "tool_use_delta", + "tool_use_stop", + "tool_use", + "tool_result", + "block_stop", + "error", + } +) + +# Event type -> (emoji, label) for status updates (O(1) lookup) +_EVENT_STATUS_MAP: dict[str, tuple[str, str]] = { + "thinking_start": ("🧠", "Claude is thinking..."), + "thinking_delta": ("🧠", "Claude is thinking..."), + "thinking_chunk": ("🧠", "Claude is thinking..."), + "text_start": ("🧠", "Claude is working..."), + "text_delta": ("🧠", "Claude is working..."), + "text_chunk": ("🧠", "Claude is working..."), + "tool_result": ("⏳", "Executing tools..."), +} + + +def get_status_for_event( + ptype: str, + parsed: dict[str, Any], + format_status_fn: Callable[..., str], +) -> str | None: + """Return status string for event type, or None if no status update needed.""" + entry = _EVENT_STATUS_MAP.get(ptype) + if entry is not None: + emoji, label = entry + return format_status_fn(emoji, label) + if ptype in ("tool_use_start", "tool_use_delta", "tool_use"): + if parsed.get("name") == "Task": + return format_status_fn("🤖", "Subagent working...") + return format_status_fn("⏳", "Executing tools...") + return None diff --git a/messaging/command_dispatcher.py b/messaging/command_dispatcher.py new file mode 100644 index 0000000000000000000000000000000000000000..ee6435ef388b55c64ab6081a7fb85163a63c54a7 --- /dev/null +++ b/messaging/command_dispatcher.py @@ -0,0 +1,38 @@ +"""Command parsing and dispatch for messaging handlers.""" + +from __future__ import annotations + +from typing import Any + +from .commands import handle_clear_command, handle_stats_command, handle_stop_command +from .models import IncomingMessage + + +def parse_command_base(text: str | None) -> str: + """Return the slash command without bot mention suffix.""" + parts = (text or "").strip().split() + cmd = parts[0] if parts else "" + return cmd.split("@", 1)[0] if cmd else "" + + +def message_kind_for_command(command_base: str) -> str: + """Return the persistence kind for an incoming message.""" + return "command" if command_base.startswith("/") else "content" + + +async def dispatch_command( + handler: Any, + incoming: IncomingMessage, + command_base: str, +) -> bool: + """Dispatch a known command and return whether it was handled.""" + commands = { + "/clear": handle_clear_command, + "/stop": handle_stop_command, + "/stats": handle_stats_command, + } + command = commands.get(command_base) + if command is None: + return False + await command(handler, incoming) + return True diff --git a/messaging/commands.py b/messaging/commands.py new file mode 100644 index 0000000000000000000000000000000000000000..e3f71d6d02cdc31109ce7f956b6517971c870606 --- /dev/null +++ b/messaging/commands.py @@ -0,0 +1,275 @@ +"""Command handlers for messaging platform commands (/stop, /stats, /clear). + +Extracted from ClaudeMessageHandler to keep handler.py focused on +core message processing logic. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from loguru import logger + +if TYPE_CHECKING: + from messaging.handler import ClaudeMessageHandler + from messaging.models import IncomingMessage + + +async def handle_stop_command( + handler: ClaudeMessageHandler, incoming: IncomingMessage +) -> None: + """Handle /stop command from messaging platform.""" + # Reply-scoped stop: reply "/stop" to stop only that task. + if incoming.is_reply() and incoming.reply_to_message_id: + reply_id = incoming.reply_to_message_id + tree = handler.tree_queue.get_tree_for_node(reply_id) + node_id = handler.tree_queue.resolve_parent_node_id(reply_id) if tree else None + + if not node_id: + msg_id = await handler.platform.queue_send_message( + incoming.chat_id, + handler.format_status( + "⏹", "Stopped.", "Nothing to stop for that message." + ), + fire_and_forget=False, + message_thread_id=incoming.message_thread_id, + ) + handler.record_outgoing_message( + incoming.platform, incoming.chat_id, msg_id, "command" + ) + return + + count = await handler.stop_task(node_id) + noun = "request" if count == 1 else "requests" + msg_id = await handler.platform.queue_send_message( + incoming.chat_id, + handler.format_status("⏹", "Stopped.", f"Cancelled {count} {noun}."), + fire_and_forget=False, + message_thread_id=incoming.message_thread_id, + ) + handler.record_outgoing_message( + incoming.platform, incoming.chat_id, msg_id, "command" + ) + return + + # Global stop: legacy behavior (stop everything) + count = await handler.stop_all_tasks() + msg_id = await handler.platform.queue_send_message( + incoming.chat_id, + handler.format_status( + "⏹", "Stopped.", f"Cancelled {count} pending or active requests." + ), + fire_and_forget=False, + message_thread_id=incoming.message_thread_id, + ) + handler.record_outgoing_message( + incoming.platform, incoming.chat_id, msg_id, "command" + ) + + +async def handle_stats_command( + handler: ClaudeMessageHandler, incoming: IncomingMessage +) -> None: + """Handle /stats command.""" + stats = handler.cli_manager.get_stats() + tree_count = handler.tree_queue.get_tree_count() + ctx = handler.get_render_ctx() + msg_id = await handler.platform.queue_send_message( + incoming.chat_id, + "📊 " + + ctx.bold("Stats") + + "\n" + + ctx.escape_text(f"• Active CLI: {stats['active_sessions']}") + + "\n" + + ctx.escape_text(f"• Message Trees: {tree_count}"), + fire_and_forget=False, + message_thread_id=incoming.message_thread_id, + ) + handler.record_outgoing_message( + incoming.platform, incoming.chat_id, msg_id, "command" + ) + + +async def _delete_message_ids( + handler: ClaudeMessageHandler, chat_id: str, msg_ids: set[str] +) -> None: + """Best-effort delete messages by ID. Sorts numeric IDs descending.""" + if not msg_ids: + return + + def _as_int(s: str) -> int | None: + try: + return int(str(s)) + except Exception: + return None + + numeric: list[tuple[int, str]] = [] + non_numeric: list[str] = [] + for mid in msg_ids: + n = _as_int(mid) + if n is None: + non_numeric.append(mid) + else: + numeric.append((n, mid)) + numeric.sort(reverse=True) + ordered = [mid for _, mid in numeric] + non_numeric + + try: + CHUNK = 100 + for i in range(0, len(ordered), CHUNK): + chunk = ordered[i : i + CHUNK] + await handler.platform.queue_delete_messages( + chat_id, chunk, fire_and_forget=False + ) + except Exception as e: + logger.debug(f"Batch delete failed: {type(e).__name__}: {e}") + + +async def _handle_clear_branch( + handler: ClaudeMessageHandler, + incoming: IncomingMessage, + branch_root_id: str, +) -> None: + """ + Clear a branch (replied-to node + all descendants). + + Order: cancel tasks, delete messages, remove branch, update session store. + """ + tree = handler.tree_queue.get_tree_for_node(branch_root_id) + if not tree: + return + + # 1) Cancel branch tasks (no stop_all) + cancelled = await handler.tree_queue.cancel_branch(branch_root_id) + handler.update_cancelled_nodes_ui(cancelled) + + # 2) Collect message IDs from branch nodes only + msg_ids: set[str] = set() + branch_ids = tree.get_descendants(branch_root_id) + for nid in branch_ids: + node = tree.get_node(nid) + if node: + if node.incoming.message_id: + msg_ids.add(str(node.incoming.message_id)) + if node.status_message_id: + msg_ids.add(str(node.status_message_id)) + if incoming.message_id: + msg_ids.add(str(incoming.message_id)) + + # 3) Delete messages (best-effort) + await _delete_message_ids(handler, incoming.chat_id, msg_ids) + + # 4) Remove branch from tree + removed, root_id, removed_entire_tree = await handler.tree_queue.remove_branch( + branch_root_id + ) + + # 5) Update session store + try: + handler.session_store.remove_node_mappings([n.node_id for n in removed]) + if removed_entire_tree: + handler.session_store.remove_tree(root_id) + else: + updated_tree = handler.tree_queue.get_tree(root_id) + if updated_tree: + handler.session_store.save_tree(root_id, updated_tree.to_dict()) + except Exception as e: + logger.warning(f"Failed to update session store after branch clear: {e}") + + +async def handle_clear_command( + handler: ClaudeMessageHandler, incoming: IncomingMessage +) -> None: + """ + Handle /clear command. + + Reply-scoped: reply to a message to clear that branch (node + descendants). + Standalone: global clear (stop all, delete all chat messages, reset store). + """ + from messaging.trees import TreeQueueManager + + if incoming.is_reply() and incoming.reply_to_message_id: + reply_id = incoming.reply_to_message_id + tree = handler.tree_queue.get_tree_for_node(reply_id) + branch_root_id = ( + handler.tree_queue.resolve_parent_node_id(reply_id) if tree else None + ) + if not branch_root_id: + cancel_fn = getattr(handler.platform, "cancel_pending_voice", None) + if cancel_fn is not None: + cancelled = await cancel_fn(incoming.chat_id, reply_id) + if cancelled is not None: + voice_msg_id, status_msg_id = cancelled + msg_ids_to_del: set[str] = {voice_msg_id, status_msg_id} + if incoming.message_id is not None: + msg_ids_to_del.add(str(incoming.message_id)) + await _delete_message_ids(handler, incoming.chat_id, msg_ids_to_del) + msg_id = await handler.platform.queue_send_message( + incoming.chat_id, + handler.format_status("🗑", "Cleared.", "Voice note cancelled."), + fire_and_forget=False, + message_thread_id=incoming.message_thread_id, + ) + handler.record_outgoing_message( + incoming.platform, incoming.chat_id, msg_id, "command" + ) + return + msg_id = await handler.platform.queue_send_message( + incoming.chat_id, + handler.format_status( + "🗑", "Cleared.", "Nothing to clear for that message." + ), + fire_and_forget=False, + message_thread_id=incoming.message_thread_id, + ) + handler.record_outgoing_message( + incoming.platform, incoming.chat_id, msg_id, "command" + ) + return + await _handle_clear_branch(handler, incoming, branch_root_id) + return + + # Global clear + # 1) Stop tasks first (ensures no more work is running). + await handler.stop_all_tasks() + + # 2) Clear chat: best-effort delete messages we can identify. + msg_ids: set[str] = set() + + # Add any recorded message IDs for this chat (commands, command replies, etc). + try: + for mid in handler.session_store.get_message_ids_for_chat( + incoming.platform, incoming.chat_id + ): + if mid is not None: + msg_ids.add(str(mid)) + except Exception as e: + logger.debug(f"Failed to read message log for /clear: {e}") + + try: + msg_ids.update( + handler.tree_queue.get_message_ids_for_chat( + incoming.platform, incoming.chat_id + ) + ) + except Exception as e: + logger.warning(f"Failed to gather messages for /clear: {e}") + + # Also delete the command message itself. + if incoming.message_id is not None: + msg_ids.add(str(incoming.message_id)) + + await _delete_message_ids(handler, incoming.chat_id, msg_ids) + + # 3) Clear persistent state and reset in-memory queue/tree state. + try: + handler.session_store.clear_all() + except Exception as e: + logger.warning(f"Failed to clear session store: {e}") + + handler.replace_tree_queue( + TreeQueueManager( + queue_update_callback=handler.update_queue_positions, + node_started_callback=handler.mark_node_processing, + ) + ) diff --git a/messaging/event_parser.py b/messaging/event_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..87eb82a86dda7fd2608ffd0dc682ac93148abea7 --- /dev/null +++ b/messaging/event_parser.py @@ -0,0 +1,181 @@ +"""CLI event parser for Claude Code CLI output. + +This parser emits an ordered stream of low-level events suitable for building a +Claude Code-like transcript in messaging UIs. +""" + +from typing import Any + +from loguru import logger + + +def parse_cli_event(event: Any, *, log_raw_cli: bool = False) -> list[dict]: + """ + Parse a CLI event and return a structured result. + + Args: + event: Raw event dictionary from CLI + log_raw_cli: When True, log full error text from the CLI. Default is + metadata-only (lengths / exit codes) to avoid leaking user content. + + Returns: + List of parsed event dicts. Empty list if not recognized. + """ + if not isinstance(event, dict): + return [] + + etype = event.get("type") + results: list[dict[str, Any]] = [] + + # Some CLI/proxy layers emit "system" events that are not user-visible and + # carry no transcript content. Ignore them explicitly to avoid noisy logs. + if etype == "system": + return [] + + # 1. Handle full messages (assistant/user or result) + msg_obj = None + if etype == "assistant" or etype == "user": + msg_obj = event.get("message") + elif etype == "result": + res = event.get("result") + if isinstance(res, dict): + msg_obj = res.get("message") + # Some variants put content directly on the result. + if not msg_obj and isinstance(res.get("content"), list): + msg_obj = {"content": res.get("content")} + if not msg_obj: + msg_obj = event.get("message") + # Some variants put content directly on the event. + if not msg_obj and isinstance(event.get("content"), list): + msg_obj = {"content": event.get("content")} + + if msg_obj and isinstance(msg_obj, dict): + content = msg_obj.get("content", []) + if isinstance(content, list): + # Preserve order exactly as content blocks appear. + for c in content: + if not isinstance(c, dict): + continue + ctype = c.get("type") + if ctype == "text": + results.append({"type": "text_chunk", "text": c.get("text", "")}) + elif ctype == "thinking": + results.append( + {"type": "thinking_chunk", "text": c.get("thinking", "")} + ) + elif ctype == "tool_use": + results.append( + { + "type": "tool_use", + "id": str(c.get("id", "") or "").strip(), + "name": c.get("name", ""), + "input": c.get("input"), + } + ) + elif ctype == "tool_result": + results.append( + { + "type": "tool_result", + "tool_use_id": str(c.get("tool_use_id", "") or "").strip(), + "content": c.get("content"), + "is_error": bool(c.get("is_error", False)), + } + ) + + if results: + return results + + # 2. Handle streaming deltas + if etype == "content_block_delta": + delta = event.get("delta", {}) + if isinstance(delta, dict): + if delta.get("type") == "text_delta": + return [ + { + "type": "text_delta", + "index": event.get("index", -1), + "text": delta.get("text", ""), + } + ] + if delta.get("type") == "thinking_delta": + return [ + { + "type": "thinking_delta", + "index": event.get("index", -1), + "text": delta.get("thinking", ""), + } + ] + if delta.get("type") == "input_json_delta": + return [ + { + "type": "tool_use_delta", + "index": event.get("index", -1), + "partial_json": delta.get("partial_json", ""), + } + ] + + # 3. Handle tool usage start + if etype == "content_block_start": + block = event.get("content_block", {}) + if isinstance(block, dict): + btype = block.get("type") + if btype == "thinking": + return [{"type": "thinking_start", "index": event.get("index", -1)}] + if btype == "text": + return [{"type": "text_start", "index": event.get("index", -1)}] + if btype == "tool_use": + return [ + { + "type": "tool_use_start", + "index": event.get("index", -1), + "id": str(block.get("id", "") or "").strip(), + "name": block.get("name", ""), + "input": block.get("input"), + } + ] + + # 3.5 Handle block stop (to close open streaming segments) + if etype == "content_block_stop": + return [{"type": "block_stop", "index": event.get("index", -1)}] + + # 4. Handle errors and exit + if etype == "error": + err = event.get("error") + msg = err.get("message") if isinstance(err, dict) else str(err) + if log_raw_cli: + logger.info("CLI_PARSER: Parsed error event: {}", msg) + else: + mlen = len(msg) if isinstance(msg, str) else 0 + logger.info("CLI_PARSER: Parsed error event: message_chars={}", mlen) + return [{"type": "error", "message": msg}] + elif etype == "exit": + code = event.get("code", 0) + stderr = event.get("stderr") + if code == 0: + logger.debug(f"CLI_PARSER: Successful exit (code={code})") + return [{"type": "complete", "status": "success"}] + else: + # Non-zero exit is an error + error_msg = stderr if stderr else f"Process exited with code {code}" + if log_raw_cli: + logger.warning( + "CLI_PARSER: Error exit (code={}): {}", + code, + error_msg, + ) + else: + em = error_msg if isinstance(error_msg, str) else str(error_msg) + logger.warning( + "CLI_PARSER: Error exit (code={}): message_chars={}", + code, + len(em), + ) + return [ + {"type": "error", "message": error_msg}, + {"type": "complete", "status": "failed"}, + ] + + # Log unrecognized events for debugging + if etype: + logger.debug(f"CLI_PARSER: Unrecognized event type: {etype}") + return [] diff --git a/messaging/handler.py b/messaging/handler.py new file mode 100644 index 0000000000000000000000000000000000000000..0a5e4e01928bca3f3dc446260bb1d2e25ee15a00 --- /dev/null +++ b/messaging/handler.py @@ -0,0 +1,618 @@ +""" +Claude Message Handler + +Platform-agnostic Claude interaction logic. +Handles the core workflow of processing user messages via Claude CLI. +Uses tree-based queuing for message ordering. +""" + +import asyncio + +from loguru import logger + +from core.anthropic import format_user_error_preview, get_user_facing_error_message + +from .cli_event_constants import STATUS_MESSAGE_PREFIXES +from .command_dispatcher import ( + dispatch_command, + message_kind_for_command, + parse_command_base, +) +from .event_parser import parse_cli_event +from .models import IncomingMessage +from .node_event_pipeline import handle_session_info_event, process_parsed_cli_event +from .platforms.base import MessagingPlatform, SessionManagerInterface +from .rendering.profiles import build_rendering_profile +from .safe_diagnostics import format_exception_for_log +from .session import SessionStore +from .transcript import RenderCtx, TranscriptBuffer +from .trees.queue_manager import ( + MessageNode, + MessageState, + MessageTree, + TreeQueueManager, +) +from .ui_updates import ThrottledTranscriptEditor + + +class ClaudeMessageHandler: + """ + Platform-agnostic handler for Claude interactions. + + Uses a tree-based message queue where: + - New messages create a tree root + - Replies become children of the message being replied to + - Each node has state: PENDING, IN_PROGRESS, COMPLETED, ERROR + - Per-tree queue ensures ordered processing + """ + + def __init__( + self, + platform: MessagingPlatform, + cli_manager: SessionManagerInterface, + session_store: SessionStore, + *, + debug_platform_edits: bool = False, + debug_subagent_stack: bool = False, + log_raw_messaging_content: bool = False, + log_raw_cli_diagnostics: bool = False, + log_messaging_error_details: bool = False, + ): + self.platform = platform + self.cli_manager = cli_manager + self.session_store = session_store + self._debug_platform_edits = debug_platform_edits + self._debug_subagent_stack = debug_subagent_stack + self._log_raw_messaging_content = log_raw_messaging_content + self._log_raw_cli_diagnostics = log_raw_cli_diagnostics + self._log_messaging_error_details = log_messaging_error_details + self._tree_queue = TreeQueueManager( + queue_update_callback=self.update_queue_positions, + node_started_callback=self.mark_node_processing, + ) + self._rendering_profile = build_rendering_profile(platform.name) + + def format_status(self, emoji: str, label: str, suffix: str | None = None) -> str: + return self._rendering_profile.format_status(emoji, label, suffix) + + def _parse_mode(self) -> str | None: + return self._rendering_profile.parse_mode + + def get_render_ctx(self) -> RenderCtx: + return self._rendering_profile.render_ctx + + def _get_limit_chars(self) -> int: + return self._rendering_profile.limit_chars + + @property + def tree_queue(self) -> TreeQueueManager: + """Accessor for the current tree queue manager.""" + return self._tree_queue + + def replace_tree_queue(self, tree_queue: TreeQueueManager) -> None: + """Replace tree queue manager via explicit API.""" + self._tree_queue = tree_queue + self._tree_queue.set_queue_update_callback(self.update_queue_positions) + self._tree_queue.set_node_started_callback(self.mark_node_processing) + + async def handle_message(self, incoming: IncomingMessage) -> None: + """ + Main entry point for handling an incoming message. + + Determines if this is a new conversation or reply, + creates/extends the message tree, and queues for processing. + """ + raw = incoming.text or "" + if self._log_raw_messaging_content: + text_preview = raw[:80] + if len(raw) > 80: + text_preview += "..." + logger.info( + "HANDLER_ENTRY: chat_id={} message_id={} reply_to={} text_preview={!r}", + incoming.chat_id, + incoming.message_id, + incoming.reply_to_message_id, + text_preview, + ) + else: + logger.info( + "HANDLER_ENTRY: chat_id={} message_id={} reply_to={} text_len={}", + incoming.chat_id, + incoming.message_id, + incoming.reply_to_message_id, + len(raw), + ) + + with logger.contextualize( + chat_id=incoming.chat_id, node_id=incoming.message_id + ): + await self._handle_message_impl(incoming) + + async def _handle_message_impl(self, incoming: IncomingMessage) -> None: + """Implementation of handle_message with context bound.""" + cmd_base = parse_command_base(incoming.text) + + # Record incoming message ID for best-effort UI clearing (/clear), even if + # we later ignore this message (status/command/etc). + try: + if incoming.message_id is not None: + self.session_store.record_message_id( + incoming.platform, + incoming.chat_id, + str(incoming.message_id), + direction="in", + kind=message_kind_for_command(cmd_base), + ) + except Exception as e: + logger.debug( + "Failed to record incoming message_id: {}", + format_exception_for_log( + e, log_full_message=self._log_messaging_error_details + ), + ) + + if await dispatch_command(self, incoming, cmd_base): + return + + # Filter out status messages (our own messages) + text = incoming.text or "" + if any(text.startswith(p) for p in STATUS_MESSAGE_PREFIXES): + return + + # Check if this is a reply to an existing node in a tree + parent_node_id = None + tree = None + + if incoming.is_reply() and incoming.reply_to_message_id: + # Look up if the replied-to message is in any tree (could be a node or status message) + reply_id = incoming.reply_to_message_id + tree = self.tree_queue.get_tree_for_node(reply_id) + if tree: + # Resolve to actual node ID (handles status message replies) + parent_node_id = self.tree_queue.resolve_parent_node_id(reply_id) + if parent_node_id: + logger.info(f"Found tree for reply, parent node: {parent_node_id}") + else: + logger.warning( + f"Reply to {incoming.reply_to_message_id} found tree but no valid parent node" + ) + tree = None # Treat as new conversation + + # Generate node ID + node_id = incoming.message_id + + # Use pre-sent status (e.g. voice note) or send new + status_text = self._get_initial_status(tree, parent_node_id) + if incoming.status_message_id: + status_msg_id = incoming.status_message_id + await self.platform.queue_edit_message( + incoming.chat_id, + status_msg_id, + status_text, + parse_mode=self._parse_mode(), + fire_and_forget=False, + ) + else: + status_msg_id = await self.platform.queue_send_message( + incoming.chat_id, + status_text, + reply_to=incoming.message_id, + fire_and_forget=False, + message_thread_id=incoming.message_thread_id, + ) + self.record_outgoing_message( + incoming.platform, incoming.chat_id, status_msg_id, "status" + ) + + # Create or extend tree + if parent_node_id and tree and status_msg_id: + # Reply to existing node - add as child + tree, _node = await self.tree_queue.add_to_tree( + parent_node_id=parent_node_id, + node_id=node_id, + incoming=incoming, + status_message_id=status_msg_id, + ) + # Register status message as a node too for reply chains + self.tree_queue.register_node(status_msg_id, tree.root_id) + self.session_store.register_node(status_msg_id, tree.root_id) + self.session_store.register_node(node_id, tree.root_id) + elif status_msg_id: + # New conversation - create new tree + tree = await self.tree_queue.create_tree( + node_id=node_id, + incoming=incoming, + status_message_id=status_msg_id, + ) + # Register status message + self.tree_queue.register_node(status_msg_id, tree.root_id) + self.session_store.register_node(node_id, tree.root_id) + self.session_store.register_node(status_msg_id, tree.root_id) + + # Persist tree + if tree: + self.session_store.save_tree(tree.root_id, tree.to_dict()) + + # Enqueue for processing + was_queued = await self.tree_queue.enqueue( + node_id=node_id, + processor=self._process_node, + ) + + if was_queued and status_msg_id: + # Update status to show queue position + queue_size = self.tree_queue.get_queue_size(node_id) + await self.platform.queue_edit_message( + incoming.chat_id, + status_msg_id, + self.format_status( + "📋", "Queued", f"(position {queue_size}) - waiting..." + ), + parse_mode=self._parse_mode(), + ) + + async def update_queue_positions(self, tree: MessageTree) -> None: + """Refresh queued status messages after a dequeue.""" + try: + queued_ids = await tree.get_queue_snapshot() + except Exception as e: + logger.warning( + "Failed to read queue snapshot: {}", + format_exception_for_log( + e, log_full_message=self._log_messaging_error_details + ), + ) + return + + if not queued_ids: + return + + position = 0 + for node_id in queued_ids: + node = tree.get_node(node_id) + if not node or node.state != MessageState.PENDING: + continue + position += 1 + self.platform.fire_and_forget( + self.platform.queue_edit_message( + node.incoming.chat_id, + node.status_message_id, + self.format_status( + "📋", "Queued", f"(position {position}) - waiting..." + ), + parse_mode=self._parse_mode(), + ) + ) + + async def mark_node_processing(self, tree: MessageTree, node_id: str) -> None: + """Update the dequeued node's status to processing immediately.""" + node = tree.get_node(node_id) + if not node or node.state == MessageState.ERROR: + return + self.platform.fire_and_forget( + self.platform.queue_edit_message( + node.incoming.chat_id, + node.status_message_id, + self.format_status("🔄", "Processing..."), + parse_mode=self._parse_mode(), + ) + ) + + def _create_transcript_and_render_ctx( + self, + ) -> tuple[TranscriptBuffer, RenderCtx]: + """Create transcript buffer and render context for node processing.""" + transcript = TranscriptBuffer( + show_tool_results=False, + debug_subagent_stack=self._debug_subagent_stack, + ) + return transcript, self.get_render_ctx() + + async def _process_node( + self, + node_id: str, + node: MessageNode, + ) -> None: + """Core task processor - handles a single Claude CLI interaction.""" + incoming = node.incoming + status_msg_id = node.status_message_id + chat_id = incoming.chat_id + + with logger.contextualize(node_id=node_id, chat_id=chat_id): + await self._process_node_impl(node_id, node, chat_id, status_msg_id) + + async def _process_node_impl( + self, + node_id: str, + node: MessageNode, + chat_id: str, + status_msg_id: str, + ) -> None: + """Internal implementation of _process_node with context bound.""" + incoming = node.incoming + + tree = self.tree_queue.get_tree_for_node(node_id) + if tree: + await tree.update_state(node_id, MessageState.IN_PROGRESS) + + transcript, render_ctx = self._create_transcript_and_render_ctx() + + had_transcript_events = False + captured_session_id = None + temp_session_id = None + last_status: str | None = None + + parent_session_id = None + if tree and node.parent_id: + parent_session_id = tree.get_parent_session_id(node_id) + if parent_session_id: + logger.info(f"Will fork from parent session: {parent_session_id}") + + editor = ThrottledTranscriptEditor( + platform=self.platform, + parse_mode=self._parse_mode(), + get_limit_chars=self._get_limit_chars, + transcript=transcript, + render_ctx=render_ctx, + node_id=node_id, + chat_id=chat_id, + status_msg_id=status_msg_id, + debug_platform_edits=self._debug_platform_edits, + log_messaging_error_details=self._log_messaging_error_details, + ) + + async def update_ui(status: str | None = None, force: bool = False) -> None: + await editor.update(status, force=force) + + try: + try: + ( + cli_session, + session_or_temp_id, + is_new, + ) = await self.cli_manager.get_or_create_session( + session_id=parent_session_id + ) + if is_new: + temp_session_id = session_or_temp_id + else: + captured_session_id = session_or_temp_id + except RuntimeError as e: + error_message = get_user_facing_error_message(e) + transcript.apply({"type": "error", "message": error_message}) + await update_ui( + self.format_status("⏳", "Session limit reached"), + force=True, + ) + if tree: + await tree.update_state( + node_id, + MessageState.ERROR, + error_message=error_message, + ) + return + + logger.info(f"HANDLER: Starting CLI task processing for node {node_id}") + event_count = 0 + async for event_data in cli_session.start_task( + incoming.text, + session_id=parent_session_id, + fork_session=bool(parent_session_id), + ): + if not isinstance(event_data, dict): + logger.warning( + f"HANDLER: Non-dict event received: {type(event_data)}" + ) + continue + event_count += 1 + if event_count % 10 == 0: + logger.debug(f"HANDLER: Processed {event_count} events so far") + + ( + captured_session_id, + temp_session_id, + ) = await handle_session_info_event( + event_data, + tree, + node_id, + captured_session_id, + temp_session_id, + cli_manager=self.cli_manager, + session_store=self.session_store, + ) + if event_data.get("type") == "session_info": + continue + + parsed_list = parse_cli_event( + event_data, log_raw_cli=self._log_raw_cli_diagnostics + ) + logger.debug(f"HANDLER: Parsed {len(parsed_list)} events from CLI") + + for parsed in parsed_list: + ( + last_status, + had_transcript_events, + ) = await process_parsed_cli_event( + parsed, + transcript, + update_ui, + last_status, + had_transcript_events, + tree, + node_id, + captured_session_id, + session_store=self.session_store, + format_status=self.format_status, + propagate_error_to_children=self._propagate_error_to_children, + log_messaging_error_details=self._log_messaging_error_details, + ) + + except asyncio.CancelledError: + logger.warning(f"HANDLER: Task cancelled for node {node_id}") + cancel_reason = None + if isinstance(node.context, dict): + cancel_reason = node.context.get("cancel_reason") + + if cancel_reason == "stop": + await update_ui(self.format_status("⏹", "Stopped."), force=True) + else: + transcript.apply({"type": "error", "message": "Task was cancelled"}) + await update_ui(self.format_status("❌", "Cancelled"), force=True) + + # Do not propagate cancellation to children; a reply-scoped "/stop" + # should only stop the targeted task. + if tree: + await tree.update_state( + node_id, MessageState.ERROR, error_message="Cancelled by user" + ) + except Exception as e: + logger.error( + "HANDLER: Task failed with exception: {}", + format_exception_for_log( + e, log_full_message=self._log_messaging_error_details + ), + ) + error_msg = format_user_error_preview(e) + transcript.apply({"type": "error", "message": error_msg}) + await update_ui(self.format_status("💥", "Task Failed"), force=True) + if tree: + await self._propagate_error_to_children( + node_id, error_msg, "Parent task failed" + ) + finally: + logger.info(f"HANDLER: _process_node completed for node {node_id}") + # Free the session-manager slot. Session IDs are persisted in the tree and + # can be resumed later by ID; we don't need to keep a CLISession instance + # around after this node completes. + try: + if captured_session_id: + await self.cli_manager.remove_session(captured_session_id) + elif temp_session_id: + await self.cli_manager.remove_session(temp_session_id) + except Exception as e: + logger.debug( + "Failed to remove session for node {}: {}", + node_id, + format_exception_for_log( + e, log_full_message=self._log_messaging_error_details + ), + ) + + async def _propagate_error_to_children( + self, + node_id: str, + error_msg: str, + child_status_text: str, + ) -> None: + """Mark node as error and propagate to pending children with UI updates.""" + affected = await self.tree_queue.mark_node_error( + node_id, error_msg, propagate_to_children=True + ) + # Update status messages for all affected children (skip first = current node) + for child in affected[1:]: + self.platform.fire_and_forget( + self.platform.queue_edit_message( + child.incoming.chat_id, + child.status_message_id, + self.format_status("❌", "Cancelled:", child_status_text), + parse_mode=self._parse_mode(), + ) + ) + + def _get_initial_status( + self, + tree: object | None, + parent_node_id: str | None, + ) -> str: + """Get initial status message text.""" + if tree and parent_node_id: + # Reply to existing tree + if self.tree_queue.is_node_tree_busy(parent_node_id): + queue_size = self.tree_queue.get_queue_size(parent_node_id) + 1 + return self.format_status( + "📋", "Queued", f"(position {queue_size}) - waiting..." + ) + return self.format_status("🔄", "Continuing conversation...") + + # New conversation + return self.format_status("⏳", "Launching new Claude CLI instance...") + + async def stop_all_tasks(self) -> int: + """ + Stop all pending and in-progress tasks. + + Order of operations: + 1. Cancel tree queue tasks (uses internal locking) + 2. Stop CLI sessions + 3. Update UI for all affected nodes + """ + # 1. Cancel tree queue tasks using the public async method + logger.info("Cancelling tree queue tasks...") + cancelled_nodes = await self.tree_queue.cancel_all() + logger.info(f"Cancelled {len(cancelled_nodes)} nodes") + + # 2. Stop CLI sessions - this kills subprocesses and ensures everything is dead + logger.info("Stopping all CLI sessions...") + await self.cli_manager.stop_all() + + # 3. Update UI and persist state for all cancelled nodes + self.update_cancelled_nodes_ui(cancelled_nodes) + + return len(cancelled_nodes) + + async def stop_task(self, node_id: str) -> int: + """ + Stop a single queued or in-progress task node. + + Used when the user replies "/stop" to a specific status/user message. + """ + tree = self.tree_queue.get_tree_for_node(node_id) + if tree: + node = tree.get_node(node_id) + if node and node.state not in (MessageState.COMPLETED, MessageState.ERROR): + # Used by _process_node cancellation path to render "Stopped." + node.set_context({"cancel_reason": "stop"}) + + cancelled_nodes = await self.tree_queue.cancel_node(node_id) + self.update_cancelled_nodes_ui(cancelled_nodes) + return len(cancelled_nodes) + + def record_outgoing_message( + self, + platform: str, + chat_id: str, + msg_id: str | None, + kind: str, + ) -> None: + """Record outgoing message ID for /clear. Best-effort, never raises.""" + if not msg_id: + return + try: + self.session_store.record_message_id( + platform, chat_id, str(msg_id), direction="out", kind=kind + ) + except Exception as e: + logger.debug( + "Failed to record message_id: {}", + format_exception_for_log( + e, log_full_message=self._log_messaging_error_details + ), + ) + + def update_cancelled_nodes_ui(self, nodes: list[MessageNode]) -> None: + """Update status messages and persist tree state for cancelled nodes.""" + trees_to_save: dict[str, MessageTree] = {} + for node in nodes: + self.platform.fire_and_forget( + self.platform.queue_edit_message( + node.incoming.chat_id, + node.status_message_id, + self.format_status("⏹", "Stopped."), + parse_mode=self._parse_mode(), + ) + ) + tree = self.tree_queue.get_tree_for_node(node.node_id) + if tree: + trees_to_save[tree.root_id] = tree + for root_id, tree in trees_to_save.items(): + self.session_store.save_tree(root_id, tree.to_dict()) diff --git a/messaging/limiter.py b/messaging/limiter.py new file mode 100644 index 0000000000000000000000000000000000000000..648d4a8c891cf7531596e48c7fad9d23a989cd3e --- /dev/null +++ b/messaging/limiter.py @@ -0,0 +1,300 @@ +""" +Global Rate Limiter for Messaging Platforms. + +Centralizes outgoing message requests and ensures compliance with rate limits +using a strict sliding window algorithm and a task queue. +""" + +import asyncio +from collections import deque +from collections.abc import Awaitable, Callable +from typing import Any + +from loguru import logger + +from config.settings import get_settings +from core.rate_limit import StrictSlidingWindowLimiter as SlidingWindowLimiter + +from .safe_diagnostics import format_exception_for_log + + +class MessagingRateLimiter: + """ + A thread-safe global rate limiter for messaging. + + Uses a custom queue with task compaction (deduplication) to ensure + only the latest version of a message update is processed. + """ + + _instance: MessagingRateLimiter | None = None + _lock = asyncio.Lock() + + def __new__(cls, *args, **kwargs): + return super().__new__(cls) + + @classmethod + async def get_instance( + cls, + *, + rate_limit: int = 1, + rate_window: float = 1.0, + ) -> MessagingRateLimiter: + """Get the singleton instance of the limiter. + + ``rate_limit`` and ``rate_window`` apply only when the singleton is first + created. Call :meth:`shutdown_instance` before changing parameters. + """ + async with cls._lock: + if cls._instance is None: + cls._instance = cls(rate_limit=rate_limit, rate_window=rate_window) + # Start the background worker (tracked for graceful shutdown). + cls._instance._start_worker() + return cls._instance + + def __init__(self, *, rate_limit: int, rate_window: float) -> None: + # Prevent double initialization in singleton + if hasattr(self, "_initialized"): + return + + self.limiter = SlidingWindowLimiter(rate_limit, rate_window) + # Custom queue state - using deque for O(1) popleft + self._queue_list: deque[str] = deque() # Deque of dedup_keys in order + self._queue_map: dict[ + str, tuple[Callable[[], Awaitable[Any]], list[asyncio.Future]] + ] = {} + self._condition = asyncio.Condition() + self._shutdown = asyncio.Event() + self._worker_task: asyncio.Task | None = None + + self._initialized = True + self._paused_until = 0 + + logger.info( + f"MessagingRateLimiter initialized ({rate_limit} req / {rate_window}s with Task Compaction)" + ) + + def _start_worker(self) -> None: + """Ensure the worker task exists.""" + if self._worker_task and not self._worker_task.done(): + return + # Named task helps debugging shutdown hangs. + self._worker_task = asyncio.create_task( + self._worker(), name="msg-limiter-worker" + ) + + async def _worker(self): + """Background worker that processes queued messaging tasks.""" + logger.info("MessagingRateLimiter worker started") + while not self._shutdown.is_set(): + try: + # Get a task from the queue + async with self._condition: + while not self._queue_list and not self._shutdown.is_set(): + await self._condition.wait() + + if self._shutdown.is_set(): + break + + dedup_key = self._queue_list.popleft() + func, futures = self._queue_map.pop(dedup_key) + + # Check for manual pause (FloodWait) + now = asyncio.get_event_loop().time() + if self._paused_until > now: + wait_time = self._paused_until - now + logger.warning( + f"Limiter worker paused, waiting {wait_time:.1f}s more..." + ) + await asyncio.sleep(wait_time) + + # Wait for rate limit capacity + async with self.limiter: + try: + result = await func() + for f in futures: + if not f.done(): + f.set_result(result) + except Exception as e: + # Report error to all futures and log it + for f in futures: + if not f.done(): + f.set_exception(e) + + error_msg = str(e).lower() + if "flood" in error_msg or "wait" in error_msg: + seconds = 30 + try: + if hasattr(e, "seconds"): + seconds = e.seconds + elif "after " in error_msg: + # Try to parse "retry after X" + parts = error_msg.split("after ") + if len(parts) > 1: + seconds = int(parts[1].split()[0]) + except Exception: + pass + + logger.error( + f"FloodWait detected! Pausing worker for {seconds}s" + ) + wait_secs = ( + float(seconds) + if isinstance(seconds, (int, float, str)) + else 30.0 + ) + self._paused_until = ( + asyncio.get_event_loop().time() + wait_secs + ) + else: + d = get_settings().log_messaging_error_details + logger.error( + "Error in limiter worker for key {}: {}", + dedup_key, + format_exception_for_log(e, log_full_message=d), + ) + except asyncio.CancelledError: + break + except Exception as e: + d = get_settings().log_messaging_error_details + if d: + logger.error( + "MessagingRateLimiter worker critical error: {}", + e, + exc_info=True, + ) + else: + logger.error( + "MessagingRateLimiter worker critical error: exc_type={}", + type(e).__name__, + ) + await asyncio.sleep(1) + + async def shutdown(self, timeout: float = 2.0) -> None: + """Stop the background worker so process shutdown doesn't hang.""" + self._shutdown.set() + try: + async with self._condition: + self._condition.notify_all() + except Exception: + # Best-effort: condition may be bound to a closing loop. + pass + + task = self._worker_task + if not task or task.done(): + self._worker_task = None + return + + task.cancel() + try: + await asyncio.wait_for(task, timeout=timeout) + except TimeoutError: + logger.warning("MessagingRateLimiter worker did not stop before timeout") + except asyncio.CancelledError: + pass + except Exception as e: + d = get_settings().log_messaging_error_details + logger.debug( + "MessagingRateLimiter worker shutdown error: {}", + format_exception_for_log(e, log_full_message=d), + ) + finally: + self._worker_task = None + + @classmethod + async def shutdown_instance(cls, timeout: float = 2.0) -> None: + """Shutdown and clear the singleton instance (safe to call multiple times).""" + inst = cls._instance + if not inst: + return + try: + await inst.shutdown(timeout=timeout) + finally: + cls._instance = None + + async def _enqueue_internal(self, func, future, dedup_key, front=False): + await self._enqueue_internal_multi(func, [future], dedup_key, front) + + async def _enqueue_internal_multi(self, func, futures, dedup_key, front=False): + async with self._condition: + if dedup_key in self._queue_map: + # Compaction: Update existing task with new func, append new futures + _old_func, old_futures = self._queue_map[dedup_key] + old_futures.extend(futures) + self._queue_map[dedup_key] = (func, old_futures) + logger.debug( + f"Compacted task for key: {dedup_key} (now {len(old_futures)} futures)" + ) + else: + self._queue_map[dedup_key] = (func, futures) + if front: + self._queue_list.appendleft(dedup_key) + else: + self._queue_list.append(dedup_key) + self._condition.notify_all() + + async def enqueue( + self, func: Callable[[], Awaitable[Any]], dedup_key: str | None = None + ) -> Any: + """ + Enqueue a messaging task and return its future result. + If dedup_key is provided, subsequent tasks with the same key will replace this one. + """ + if dedup_key is None: + # Unique key to avoid deduplication + dedup_key = f"task_{id(func)}_{asyncio.get_event_loop().time()}" + + future = asyncio.get_event_loop().create_future() + await self._enqueue_internal(func, future, dedup_key) + return await future + + def fire_and_forget( + self, func: Callable[[], Awaitable[Any]], dedup_key: str | None = None + ): + """Enqueue a task without waiting for the result.""" + if dedup_key is None: + dedup_key = f"task_{id(func)}_{asyncio.get_event_loop().time()}" + + future = asyncio.get_event_loop().create_future() + + async def _wrapped(): + max_retries = 2 + for attempt in range(max_retries + 1): + try: + return await self.enqueue(func, dedup_key) + except Exception as e: + error_msg = str(e).lower() + # Only retry transient connectivity issues that might have slipped through + # or occurred between platform checks. + if attempt < max_retries and any( + x in error_msg for x in ["connect", "timeout", "broken"] + ): + wait = 2**attempt + d = get_settings().log_messaging_error_details + if d: + logger.warning( + "Limiter fire_and_forget transient error (attempt {}): {}. Retrying in {}s...", + attempt + 1, + e, + wait, + ) + else: + logger.warning( + "Limiter fire_and_forget transient error (attempt {}): exc_type={}. Retrying in {}s...", + attempt + 1, + type(e).__name__, + wait, + ) + await asyncio.sleep(wait) + continue + + d = get_settings().log_messaging_error_details + logger.error( + "Final error in fire_and_forget for key {}: {}", + dedup_key, + format_exception_for_log(e, log_full_message=d), + ) + if not future.done(): + future.set_exception(e) + break + + _ = asyncio.create_task(_wrapped()) diff --git a/messaging/models.py b/messaging/models.py new file mode 100644 index 0000000000000000000000000000000000000000..b63f4f7def24c116b87c7709c8ca043ccb5c4227 --- /dev/null +++ b/messaging/models.py @@ -0,0 +1,36 @@ +"""Platform-agnostic message models.""" + +from dataclasses import dataclass, field +from datetime import UTC, datetime +from typing import Any + + +@dataclass +class IncomingMessage: + """ + Platform-agnostic incoming message. + + Adapters convert platform-specific events to this format. + """ + + text: str + chat_id: str + user_id: str + message_id: str + platform: str # "telegram", "discord", "slack", etc. + + # Optional fields + reply_to_message_id: str | None = None + # Forum topic ID (Telegram); required when replying in forum supergroups + message_thread_id: str | None = None + username: str | None = None + # Pre-sent status message ID (e.g. "Transcribing voice note..."); handler edits in place + status_message_id: str | None = None + timestamp: datetime = field(default_factory=lambda: datetime.now(UTC)) + + # Platform-specific raw event for edge cases + raw_event: Any = None + + def is_reply(self) -> bool: + """Check if this message is a reply to another message.""" + return self.reply_to_message_id is not None diff --git a/messaging/node_event_pipeline.py b/messaging/node_event_pipeline.py new file mode 100644 index 0000000000000000000000000000000000000000..6f233e25cfb4169237c1fb57b4dfc438c0a8f9c3 --- /dev/null +++ b/messaging/node_event_pipeline.py @@ -0,0 +1,103 @@ +"""CLI event handling for a single queued node (transcript + session + errors).""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from typing import Any + +from loguru import logger + +from .cli_event_constants import TRANSCRIPT_EVENT_TYPES, get_status_for_event +from .platforms.base import SessionManagerInterface +from .safe_diagnostics import text_len_hint +from .session import SessionStore +from .transcript import TranscriptBuffer +from .trees.queue_manager import MessageState, MessageTree + + +async def handle_session_info_event( + event_data: dict[str, Any], + tree: MessageTree | None, + node_id: str, + captured_session_id: str | None, + temp_session_id: str | None, + *, + cli_manager: SessionManagerInterface, + session_store: SessionStore, +) -> tuple[str | None, str | None]: + """Handle session_info event; return updated (captured_session_id, temp_session_id).""" + if event_data.get("type") != "session_info": + return captured_session_id, temp_session_id + + real_session_id = event_data.get("session_id") + if not real_session_id or not temp_session_id: + return captured_session_id, temp_session_id + + await cli_manager.register_real_session_id(temp_session_id, real_session_id) + if tree and real_session_id: + await tree.update_state( + node_id, + MessageState.IN_PROGRESS, + session_id=real_session_id, + ) + session_store.save_tree(tree.root_id, tree.to_dict()) + + return real_session_id, None + + +async def process_parsed_cli_event( + parsed: dict[str, Any], + transcript: TranscriptBuffer, + update_ui: Callable[..., Awaitable[None]], + last_status: str | None, + had_transcript_events: bool, + tree: MessageTree | None, + node_id: str, + captured_session_id: str | None, + *, + session_store: SessionStore, + format_status: Callable[..., str], + propagate_error_to_children: Callable[[str, str, str], Awaitable[None]], + log_messaging_error_details: bool = False, +) -> tuple[str | None, bool]: + """Process a single parsed CLI event. Returns (last_status, had_transcript_events).""" + ptype = parsed.get("type") or "" + + if ptype in TRANSCRIPT_EVENT_TYPES: + transcript.apply(parsed) + had_transcript_events = True + + status = get_status_for_event(ptype, parsed, format_status) + if status is not None: + await update_ui(status) + last_status = status + elif ptype == "block_stop": + await update_ui(last_status, force=True) + elif ptype == "complete": + if not had_transcript_events: + transcript.apply({"type": "text_chunk", "text": "Done."}) + logger.info("HANDLER: Task complete, updating UI") + await update_ui(format_status("✅", "Complete"), force=True) + if tree and captured_session_id: + await tree.update_state( + node_id, + MessageState.COMPLETED, + session_id=captured_session_id, + ) + session_store.save_tree(tree.root_id, tree.to_dict()) + elif ptype == "error": + error_msg = parsed.get("message", "Unknown error") + if log_messaging_error_details: + logger.error("HANDLER: Error event received: {}", error_msg) + else: + em = error_msg if isinstance(error_msg, str) else str(error_msg) + logger.error( + "HANDLER: Error event received: message_chars={}", + text_len_hint(em), + ) + logger.info("HANDLER: Updating UI with error status") + await update_ui(format_status("❌", "Error"), force=True) + if tree: + await propagate_error_to_children(node_id, error_msg, "Parent task failed") + + return last_status, had_transcript_events diff --git a/messaging/platforms/__init__.py b/messaging/platforms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..88261a7a38273485f3d20ea0da66053cf6d7e9bf --- /dev/null +++ b/messaging/platforms/__init__.py @@ -0,0 +1,11 @@ +"""Messaging platform adapters (Telegram, Discord, etc.).""" + +from .base import CLISession, MessagingPlatform, SessionManagerInterface +from .factory import create_messaging_platform + +__all__ = [ + "CLISession", + "MessagingPlatform", + "SessionManagerInterface", + "create_messaging_platform", +] diff --git a/messaging/platforms/__pycache__/__init__.cpython-314.pyc b/messaging/platforms/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..769e6370752c11dbd3c1649717ebeecc8e6394e1 Binary files /dev/null and b/messaging/platforms/__pycache__/__init__.cpython-314.pyc differ diff --git a/messaging/platforms/__pycache__/base.cpython-314.pyc b/messaging/platforms/__pycache__/base.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..09e43e18c467de48c1c6c970536c94cf6cef1664 Binary files /dev/null and b/messaging/platforms/__pycache__/base.cpython-314.pyc differ diff --git a/messaging/platforms/__pycache__/factory.cpython-314.pyc b/messaging/platforms/__pycache__/factory.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4286ac5ef0b06e4bd9f5173af4e1f9c72f66e0f3 Binary files /dev/null and b/messaging/platforms/__pycache__/factory.cpython-314.pyc differ diff --git a/messaging/platforms/base.py b/messaging/platforms/base.py new file mode 100644 index 0000000000000000000000000000000000000000..60beffa65b0343ce2e65417baad8b49865d46734 --- /dev/null +++ b/messaging/platforms/base.py @@ -0,0 +1,230 @@ +"""Abstract base class for messaging platforms.""" + +from abc import ABC, abstractmethod +from collections.abc import AsyncGenerator, Awaitable, Callable +from typing import ( + Any, + Protocol, + runtime_checkable, +) + +from ..models import IncomingMessage + + +@runtime_checkable +class CLISession(Protocol): + """Protocol for CLI session - avoid circular import from cli package.""" + + def start_task( + self, prompt: str, session_id: str | None = None, fork_session: bool = False + ) -> AsyncGenerator[dict, Any]: ... + + @property + def is_busy(self) -> bool: ... + + +@runtime_checkable +class SessionManagerInterface(Protocol): + """ + Protocol for session managers to avoid tight coupling with cli package. + + Implementations: CLISessionManager + """ + + async def get_or_create_session( + self, session_id: str | None = None + ) -> tuple[CLISession, str, bool]: + """ + Get an existing session or create a new one. + + Returns: Tuple of (session, session_id, is_new_session) + """ + ... + + async def register_real_session_id( + self, temp_id: str, real_session_id: str + ) -> bool: + """Register the real session ID from CLI output.""" + ... + + async def stop_all(self) -> None: + """Stop all sessions.""" + ... + + async def remove_session(self, session_id: str) -> bool: + """Remove a session from the manager.""" + ... + + def get_stats(self) -> dict: + """Get session statistics.""" + ... + + +class MessagingPlatform(ABC): + """ + Base class for all messaging platform adapters. + + Implement this to add support for Telegram, Discord, Slack, etc. + """ + + name: str = "base" + + @abstractmethod + async def start(self) -> None: + """Initialize and connect to the messaging platform.""" + pass + + @abstractmethod + async def stop(self) -> None: + """Disconnect and cleanup resources.""" + pass + + @abstractmethod + async def send_message( + self, + chat_id: str, + text: str, + reply_to: str | None = None, + parse_mode: str | None = None, + message_thread_id: str | None = None, + ) -> str: + """ + Send a message to a chat. + + Args: + chat_id: The chat/channel ID to send to + text: Message content + reply_to: Optional message ID to reply to + parse_mode: Optional formatting mode ("markdown", "html") + message_thread_id: Optional thread or topic id for threaded channels + (e.g. forum topics); unused on platforms that do not support it. + + Returns: + The message ID of the sent message + """ + pass + + @abstractmethod + async def edit_message( + self, + chat_id: str, + message_id: str, + text: str, + parse_mode: str | None = None, + ) -> None: + """ + Edit an existing message. + + Args: + chat_id: The chat/channel ID + message_id: The message ID to edit + text: New message content + parse_mode: Optional formatting mode + """ + pass + + @abstractmethod + async def delete_message( + self, + chat_id: str, + message_id: str, + ) -> None: + """ + Delete a message from a chat. + + Args: + chat_id: The chat/channel ID + message_id: The message ID to delete + """ + pass + + @abstractmethod + async def queue_send_message( + self, + chat_id: str, + text: str, + reply_to: str | None = None, + parse_mode: str | None = None, + fire_and_forget: bool = True, + message_thread_id: str | None = None, + ) -> str | None: + """ + Enqueue a message to be sent. + + If fire_and_forget is True, returns None immediately. + Otherwise, waits for the rate limiter and returns message ID. + """ + pass + + @abstractmethod + async def queue_edit_message( + self, + chat_id: str, + message_id: str, + text: str, + parse_mode: str | None = None, + fire_and_forget: bool = True, + ) -> None: + """ + Enqueue a message edit. + + If fire_and_forget is True, returns immediately. + Otherwise, waits for the rate limiter. + """ + pass + + @abstractmethod + async def queue_delete_message( + self, + chat_id: str, + message_id: str, + fire_and_forget: bool = True, + ) -> None: + """ + Enqueue a message deletion. + + If fire_and_forget is True, returns immediately. + Otherwise, waits for the rate limiter. + """ + pass + + async def queue_delete_messages( + self, + chat_id: str, + message_ids: list[str], + *, + fire_and_forget: bool = True, + ) -> None: + """Delete many messages; default loops :meth:`queue_delete_message`. + + Adapters with native bulk delete should override. + """ + for mid in message_ids: + await self.queue_delete_message( + chat_id, mid, fire_and_forget=fire_and_forget + ) + + @abstractmethod + def on_message( + self, + handler: Callable[[IncomingMessage], Awaitable[None]], + ) -> None: + """ + Register a message handler callback. + + The handler will be called for each incoming message. + + Args: + handler: Async function that processes incoming messages + """ + pass + + @abstractmethod + def fire_and_forget(self, task: Awaitable[Any]) -> None: + """Execute a coroutine without awaiting it.""" + pass + + @property + def is_connected(self) -> bool: + """Check if the platform is connected.""" + return False diff --git a/messaging/platforms/discord.py b/messaging/platforms/discord.py new file mode 100644 index 0000000000000000000000000000000000000000..53aa7dc7d7e7c673d10124cf305b6a47d401f0aa --- /dev/null +++ b/messaging/platforms/discord.py @@ -0,0 +1,592 @@ +""" +Discord Platform Adapter + +Implements MessagingPlatform for Discord using discord.py. +""" + +import asyncio +import contextlib +import tempfile +from collections.abc import Awaitable, Callable +from pathlib import Path +from typing import Any, cast + +from loguru import logger + +from core.anthropic import format_user_error_preview + +from ..models import IncomingMessage +from ..rendering.discord_markdown import format_status_discord +from ..voice import PendingVoiceRegistry, VoiceTranscriptionService +from .base import MessagingPlatform + +AUDIO_EXTENSIONS = (".ogg", ".mp4", ".mp3", ".wav", ".m4a") + +_discord_module: Any = None +try: + import discord as _discord_import + + _discord_module = _discord_import + DISCORD_AVAILABLE = True +except ImportError: + DISCORD_AVAILABLE = False + +DISCORD_MESSAGE_LIMIT = 2000 + + +def _get_discord() -> Any: + """Return the discord module. Raises if not available.""" + if not DISCORD_AVAILABLE or _discord_module is None: + raise ImportError( + "discord.py is required. Install with: pip install discord.py" + ) + return _discord_module + + +def _parse_allowed_channels(raw: str | None) -> set[str]: + """Parse comma-separated channel IDs into a set of strings.""" + if not raw or not raw.strip(): + return set() + return {s.strip() for s in raw.split(",") if s.strip()} + + +if DISCORD_AVAILABLE and _discord_module is not None: + _discord = _discord_module + + class _DiscordClient(_discord.Client): + """Internal Discord client that forwards events to DiscordPlatform.""" + + def __init__( + self, + platform: DiscordPlatform, + intents: _discord.Intents, + ) -> None: + super().__init__(intents=intents) + self._platform = platform + + async def on_ready(self) -> None: + """Called when the bot is ready.""" + self._platform._connected = True + logger.info("Discord platform connected") + + async def on_message(self, message: Any) -> None: + """Handle incoming Discord messages.""" + await self._platform._handle_client_message(message) +else: + _DiscordClient = None + + +class DiscordPlatform(MessagingPlatform): + """ + Discord messaging platform adapter. + + Uses discord.py for Discord access. + Requires a Bot Token from Discord Developer Portal and message_content intent. + """ + + name = "discord" + + def __init__( + self, + bot_token: str | None = None, + allowed_channel_ids: str | None = None, + *, + voice_note_enabled: bool = True, + whisper_model: str = "base", + whisper_device: str = "cpu", + hf_token: str = "", + nvidia_nim_api_key: str = "", + messaging_rate_limit: int = 1, + messaging_rate_window: float = 1.0, + log_raw_messaging_content: bool = False, + log_api_error_tracebacks: bool = False, + ): + if not DISCORD_AVAILABLE: + raise ImportError( + "discord.py is required. Install with: pip install discord.py" + ) + + self.bot_token = bot_token + self.allowed_channel_ids = _parse_allowed_channels(allowed_channel_ids) + + if not self.bot_token: + logger.warning("DISCORD_BOT_TOKEN not set") + + discord = _get_discord() + intents = discord.Intents.default() + intents.message_content = True + + assert _DiscordClient is not None + self._client = _DiscordClient(self, intents) + self._message_handler: Callable[[IncomingMessage], Awaitable[None]] | None = ( + None + ) + self._connected = False + self._limiter: Any | None = None + self._start_task: asyncio.Task | None = None + self._pending_voice = PendingVoiceRegistry() + self._voice_transcription = VoiceTranscriptionService( + hf_token=hf_token, + nvidia_nim_api_key=nvidia_nim_api_key, + ) + self._voice_note_enabled = voice_note_enabled + self._whisper_model = whisper_model + self._whisper_device = whisper_device + self._messaging_rate_limit = messaging_rate_limit + self._messaging_rate_window = messaging_rate_window + self._log_raw_messaging_content = log_raw_messaging_content + self._log_api_error_tracebacks = log_api_error_tracebacks + + async def _handle_client_message(self, message: Any) -> None: + """Adapter entry point used by the internal discord client.""" + await self._on_discord_message(message) + + async def _register_pending_voice( + self, chat_id: str, voice_msg_id: str, status_msg_id: str + ) -> None: + """Register a voice note as pending transcription.""" + await self._pending_voice.register(chat_id, voice_msg_id, status_msg_id) + + async def cancel_pending_voice( + self, chat_id: str, reply_id: str + ) -> tuple[str, str] | None: + """Cancel a pending voice transcription. Returns (voice_msg_id, status_msg_id) if found.""" + return await self._pending_voice.cancel(chat_id, reply_id) + + async def _is_voice_still_pending(self, chat_id: str, voice_msg_id: str) -> bool: + """Check if a voice note is still pending (not cancelled).""" + return await self._pending_voice.is_pending(chat_id, voice_msg_id) + + def _get_audio_attachment(self, message: Any) -> Any | None: + """Return first audio attachment, or None.""" + for att in message.attachments: + ct = (att.content_type or "").lower() + fn = (att.filename or "").lower() + if ct.startswith("audio/") or any( + fn.endswith(ext) for ext in AUDIO_EXTENSIONS + ): + return att + return None + + async def _handle_voice_note( + self, message: Any, attachment: Any, channel_id: str + ) -> bool: + """Handle voice/audio attachment. Returns True if handled.""" + if not self._voice_note_enabled: + await message.reply("Voice notes are disabled.") + return True + + if not self._message_handler: + return False + + status_msg_id = await self.queue_send_message( + channel_id, + format_status_discord("Transcribing voice note..."), + reply_to=str(message.id), + fire_and_forget=False, + ) + + user_id = str(message.author.id) + message_id = str(message.id) + await self._register_pending_voice(channel_id, message_id, str(status_msg_id)) + reply_to = ( + str(message.reference.message_id) + if message.reference and message.reference.message_id + else None + ) + + ext = ".ogg" + fn = (attachment.filename or "").lower() + for e in AUDIO_EXTENSIONS: + if fn.endswith(e): + ext = e + break + ct = attachment.content_type or "audio/ogg" + if "mp4" in ct or "m4a" in fn: + ext = ".m4a" if "m4a" in fn else ".mp4" + elif "mp3" in ct or fn.endswith(".mp3"): + ext = ".mp3" + + with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as tmp: + tmp_path = Path(tmp.name) + + try: + await attachment.save(str(tmp_path)) + + transcribed = await self._voice_transcription.transcribe( + tmp_path, + ct, + whisper_model=self._whisper_model, + whisper_device=self._whisper_device, + ) + + if not await self._is_voice_still_pending(channel_id, message_id): + await self.queue_delete_message(channel_id, str(status_msg_id)) + return True + + await self._pending_voice.complete( + channel_id, message_id, str(status_msg_id) + ) + + incoming = IncomingMessage( + text=transcribed, + chat_id=channel_id, + user_id=user_id, + message_id=message_id, + platform="discord", + reply_to_message_id=reply_to, + username=message.author.display_name, + raw_event=message, + status_message_id=status_msg_id, + ) + + if self._log_raw_messaging_content: + logger.info( + "DISCORD_VOICE: chat_id={} message_id={} transcribed={!r}", + channel_id, + message_id, + ( + transcribed[:80] + "..." + if len(transcribed) > 80 + else transcribed + ), + ) + else: + logger.info( + "DISCORD_VOICE: chat_id={} message_id={} transcribed_len={}", + channel_id, + message_id, + len(transcribed), + ) + + await self._message_handler(incoming) + return True + except ValueError as e: + await message.reply(format_user_error_preview(e)) + return True + except ImportError as e: + await message.reply(format_user_error_preview(e)) + return True + except Exception as e: + if self._log_api_error_tracebacks: + logger.error("Voice transcription failed: {}", e) + else: + logger.error( + "Voice transcription failed: exc_type={}", type(e).__name__ + ) + await message.reply( + "Could not transcribe voice note. Please try again or send text." + ) + return True + finally: + with contextlib.suppress(OSError): + tmp_path.unlink(missing_ok=True) + + async def _on_discord_message(self, message: Any) -> None: + """Handle incoming Discord messages.""" + if message.author.bot: + return + + channel_id = str(message.channel.id) + + if not self.allowed_channel_ids or channel_id not in self.allowed_channel_ids: + return + + # Handle voice/audio attachments when message has no text content + if not message.content: + audio_att = self._get_audio_attachment(message) + if audio_att: + await self._handle_voice_note(message, audio_att, channel_id) + return + return + + user_id = str(message.author.id) + message_id = str(message.id) + reply_to = ( + str(message.reference.message_id) + if message.reference and message.reference.message_id + else None + ) + + raw_content = message.content or "" + if self._log_raw_messaging_content: + text_preview = raw_content[:80] + if len(raw_content) > 80: + text_preview += "..." + logger.info( + "DISCORD_MSG: chat_id={} message_id={} reply_to={} text_preview={!r}", + channel_id, + message_id, + reply_to, + text_preview, + ) + else: + logger.info( + "DISCORD_MSG: chat_id={} message_id={} reply_to={} text_len={}", + channel_id, + message_id, + reply_to, + len(raw_content), + ) + + if not self._message_handler: + return + + incoming = IncomingMessage( + text=message.content, + chat_id=channel_id, + user_id=user_id, + message_id=message_id, + platform="discord", + reply_to_message_id=reply_to, + username=message.author.display_name, + raw_event=message, + ) + + try: + await self._message_handler(incoming) + except Exception as e: + if self._log_api_error_tracebacks: + logger.error("Error handling message: {}", e) + else: + logger.error("Error handling message: exc_type={}", type(e).__name__) + with contextlib.suppress(Exception): + await self.send_message( + channel_id, + format_status_discord("Error:", format_user_error_preview(e)), + reply_to=message_id, + ) + + def _truncate(self, text: str, limit: int = DISCORD_MESSAGE_LIMIT) -> str: + """Truncate text to Discord's message limit.""" + if len(text) <= limit: + return text + return text[: limit - 3] + "..." + + async def start(self) -> None: + """Initialize and connect to Discord.""" + if not self.bot_token: + raise ValueError("DISCORD_BOT_TOKEN is required") + + from ..limiter import MessagingRateLimiter + + self._limiter = await MessagingRateLimiter.get_instance( + rate_limit=self._messaging_rate_limit, + rate_window=self._messaging_rate_window, + ) + + self._start_task = asyncio.create_task( + self._client.start(self.bot_token), + name="discord-client-start", + ) + + max_wait = 30 + waited = 0 + while not self._connected and waited < max_wait: + await asyncio.sleep(0.5) + waited += 0.5 + + if not self._connected: + raise RuntimeError("Discord client failed to connect within timeout") + + logger.info("Discord platform started") + + async def stop(self) -> None: + """Stop the bot.""" + if self._client.is_closed(): + self._connected = False + return + + await self._client.close() + if self._start_task and not self._start_task.done(): + try: + await asyncio.wait_for(self._start_task, timeout=5.0) + except TimeoutError, asyncio.CancelledError: + self._start_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await self._start_task + + self._connected = False + logger.info("Discord platform stopped") + + async def send_message( + self, + chat_id: str, + text: str, + reply_to: str | None = None, + parse_mode: str | None = None, + message_thread_id: str | None = None, + ) -> str: + """Send a message to a channel.""" + channel = self._client.get_channel(int(chat_id)) + if not channel or not hasattr(channel, "send"): + raise RuntimeError(f"Channel {chat_id} not found") + + text = self._truncate(text) + channel = cast(Any, channel) + + discord = _get_discord() + if reply_to: + ref = discord.MessageReference( + message_id=int(reply_to), + channel_id=int(chat_id), + ) + msg = await channel.send(content=text, reference=ref) + else: + msg = await channel.send(content=text) + + return str(msg.id) + + async def edit_message( + self, + chat_id: str, + message_id: str, + text: str, + parse_mode: str | None = None, + ) -> None: + """Edit an existing message.""" + channel = self._client.get_channel(int(chat_id)) + if not channel or not hasattr(channel, "fetch_message"): + raise RuntimeError(f"Channel {chat_id} not found") + + discord = _get_discord() + channel = cast(Any, channel) + try: + msg = await channel.fetch_message(int(message_id)) + except discord.NotFound: + return + + text = self._truncate(text) + await msg.edit(content=text) + + async def delete_message( + self, + chat_id: str, + message_id: str, + ) -> None: + """Delete a message from a channel.""" + channel = self._client.get_channel(int(chat_id)) + if not channel or not hasattr(channel, "fetch_message"): + return + + discord = _get_discord() + channel = cast(Any, channel) + try: + msg = await channel.fetch_message(int(message_id)) + await msg.delete() + except discord.NotFound, discord.Forbidden: + pass + + async def delete_messages(self, chat_id: str, message_ids: list[str]) -> None: + """Delete multiple messages (best-effort).""" + for mid in message_ids: + await self.delete_message(chat_id, mid) + + async def queue_send_message( + self, + chat_id: str, + text: str, + reply_to: str | None = None, + parse_mode: str | None = None, + fire_and_forget: bool = True, + message_thread_id: str | None = None, + ) -> str | None: + """Enqueue a message to be sent.""" + if not self._limiter: + return await self.send_message( + chat_id, text, reply_to, parse_mode, message_thread_id + ) + + async def _send(): + return await self.send_message( + chat_id, text, reply_to, parse_mode, message_thread_id + ) + + if fire_and_forget: + self._limiter.fire_and_forget(_send) + return None + return await self._limiter.enqueue(_send) + + async def queue_edit_message( + self, + chat_id: str, + message_id: str, + text: str, + parse_mode: str | None = None, + fire_and_forget: bool = True, + ) -> None: + """Enqueue a message edit.""" + if not self._limiter: + await self.edit_message(chat_id, message_id, text, parse_mode) + return + + async def _edit(): + await self.edit_message(chat_id, message_id, text, parse_mode) + + dedup_key = f"edit:{chat_id}:{message_id}" + if fire_and_forget: + self._limiter.fire_and_forget(_edit, dedup_key=dedup_key) + else: + await self._limiter.enqueue(_edit, dedup_key=dedup_key) + + async def queue_delete_message( + self, + chat_id: str, + message_id: str, + fire_and_forget: bool = True, + ) -> None: + """Enqueue a message delete.""" + if not self._limiter: + await self.delete_message(chat_id, message_id) + return + + async def _delete(): + await self.delete_message(chat_id, message_id) + + dedup_key = f"del:{chat_id}:{message_id}" + if fire_and_forget: + self._limiter.fire_and_forget(_delete, dedup_key=dedup_key) + else: + await self._limiter.enqueue(_delete, dedup_key=dedup_key) + + async def queue_delete_messages( + self, + chat_id: str, + message_ids: list[str], + fire_and_forget: bool = True, + ) -> None: + """Enqueue a bulk delete.""" + if not message_ids: + return + + if not self._limiter: + await self.delete_messages(chat_id, message_ids) + return + + async def _bulk(): + await self.delete_messages(chat_id, message_ids) + + dedup_key = f"del_bulk:{chat_id}:{hash(tuple(message_ids))}" + if fire_and_forget: + self._limiter.fire_and_forget(_bulk, dedup_key=dedup_key) + else: + await self._limiter.enqueue(_bulk, dedup_key=dedup_key) + + def fire_and_forget(self, task: Awaitable[Any]) -> None: + """Execute a coroutine without awaiting it.""" + if asyncio.iscoroutine(task): + _ = asyncio.create_task(task) + else: + _ = asyncio.ensure_future(task) + + def on_message( + self, + handler: Callable[[IncomingMessage], Awaitable[None]], + ) -> None: + """Register a message handler callback.""" + self._message_handler = handler + + @property + def is_connected(self) -> bool: + """Check if connected.""" + return self._connected diff --git a/messaging/platforms/factory.py b/messaging/platforms/factory.py new file mode 100644 index 0000000000000000000000000000000000000000..772a31daeb3ea30abbbe5510240f45d1ff8c19ef --- /dev/null +++ b/messaging/platforms/factory.py @@ -0,0 +1,103 @@ +"""Messaging platform factory. + +Creates the appropriate messaging platform adapter based on configuration. +To add a new platform (e.g. Discord, Slack): +1. Create a new class implementing MessagingPlatform in messaging/platforms/ +2. Add a case to create_messaging_platform() below +""" + +from __future__ import annotations + +from dataclasses import dataclass + +from loguru import logger + +from .base import MessagingPlatform + + +@dataclass(frozen=True, slots=True) +class MessagingPlatformOptions: + """Typed wiring from :class:`~api.runtime.AppRuntime` into platform adapters.""" + + telegram_bot_token: str | None = None + allowed_telegram_user_id: str | None = None + discord_bot_token: str | None = None + allowed_discord_channels: str | None = None + voice_note_enabled: bool = True + whisper_model: str = "base" + whisper_device: str = "cpu" + hf_token: str = "" + nvidia_nim_api_key: str = "" + messaging_rate_limit: int = 1 + messaging_rate_window: float = 1.0 + log_raw_messaging_content: bool = False + log_api_error_tracebacks: bool = False + + +def create_messaging_platform( + platform_type: str, + options: MessagingPlatformOptions | None = None, +) -> MessagingPlatform | None: + """Create a messaging platform instance based on type. + + Args: + platform_type: Platform identifier (``telegram``, ``discord``, ``none``). + options: Token, allowlist, and voice / transcription settings. + + Returns: + Configured :class:`MessagingPlatform` instance, or None if not configured. + """ + opts = options or MessagingPlatformOptions() + if platform_type == "none": + logger.info("Messaging platform disabled by configuration") + return None + + if platform_type == "telegram": + bot_token = opts.telegram_bot_token + if not bot_token: + logger.info("No Telegram bot token configured, skipping platform setup") + return None + + from .telegram import TelegramPlatform + + return TelegramPlatform( + bot_token=bot_token, + allowed_user_id=opts.allowed_telegram_user_id, + voice_note_enabled=opts.voice_note_enabled, + whisper_model=opts.whisper_model, + whisper_device=opts.whisper_device, + hf_token=opts.hf_token, + nvidia_nim_api_key=opts.nvidia_nim_api_key, + messaging_rate_limit=opts.messaging_rate_limit, + messaging_rate_window=opts.messaging_rate_window, + log_raw_messaging_content=opts.log_raw_messaging_content, + log_api_error_tracebacks=opts.log_api_error_tracebacks, + ) + + if platform_type == "discord": + bot_token = opts.discord_bot_token + if not bot_token: + logger.info("No Discord bot token configured, skipping platform setup") + return None + + from .discord import DiscordPlatform + + return DiscordPlatform( + bot_token=bot_token, + allowed_channel_ids=opts.allowed_discord_channels, + voice_note_enabled=opts.voice_note_enabled, + whisper_model=opts.whisper_model, + whisper_device=opts.whisper_device, + hf_token=opts.hf_token, + nvidia_nim_api_key=opts.nvidia_nim_api_key, + messaging_rate_limit=opts.messaging_rate_limit, + messaging_rate_window=opts.messaging_rate_window, + log_raw_messaging_content=opts.log_raw_messaging_content, + log_api_error_tracebacks=opts.log_api_error_tracebacks, + ) + + logger.warning( + f"Unknown messaging platform: '{platform_type}'. " + "Supported: 'none', 'telegram', 'discord'" + ) + return None diff --git a/messaging/platforms/telegram.py b/messaging/platforms/telegram.py new file mode 100644 index 0000000000000000000000000000000000000000..a8f34dffb9ab5661f213ae5442c295b61b73282a --- /dev/null +++ b/messaging/platforms/telegram.py @@ -0,0 +1,700 @@ +""" +Telegram Platform Adapter + +Implements MessagingPlatform for Telegram using python-telegram-bot. +""" + +import asyncio +import contextlib +import os +import tempfile +from pathlib import Path + +# Opt-in to future behavior for python-telegram-bot (retry_after as timedelta) +# This must be set BEFORE importing telegram.error +os.environ["PTB_TIMEDELTA"] = "1" + +from collections.abc import Awaitable, Callable +from typing import TYPE_CHECKING, Any + +from loguru import logger + +from core.anthropic import format_user_error_preview + +if TYPE_CHECKING: + from telegram import Update + from telegram.ext import ContextTypes + +from ..models import IncomingMessage +from ..rendering.telegram_markdown import escape_md_v2, format_status +from ..voice import PendingVoiceRegistry, VoiceTranscriptionService +from .base import MessagingPlatform + +# Optional import - python-telegram-bot may not be installed +try: + from telegram import Update + from telegram.error import NetworkError, RetryAfter, TelegramError + from telegram.ext import ( + Application, + CommandHandler, + ContextTypes, + MessageHandler, + filters, + ) + from telegram.request import HTTPXRequest + + TELEGRAM_AVAILABLE = True +except ImportError: + TELEGRAM_AVAILABLE = False + + +class TelegramPlatform(MessagingPlatform): + """ + Telegram messaging platform adapter. + + Uses python-telegram-bot (BoT API) for Telegram access. + Requires a Bot Token from @BotFather. + """ + + name = "telegram" + + def __init__( + self, + bot_token: str | None = None, + allowed_user_id: str | None = None, + *, + voice_note_enabled: bool = True, + whisper_model: str = "base", + whisper_device: str = "cpu", + hf_token: str = "", + nvidia_nim_api_key: str = "", + messaging_rate_limit: int = 1, + messaging_rate_window: float = 1.0, + log_raw_messaging_content: bool = False, + log_api_error_tracebacks: bool = False, + ): + if not TELEGRAM_AVAILABLE: + raise ImportError( + "python-telegram-bot is required. Install with: pip install python-telegram-bot" + ) + + self.bot_token = bot_token + self.allowed_user_id = allowed_user_id + + if not self.bot_token: + # We don't raise here to allow instantiation for testing/conditional logic, + # but start() will fail. + logger.warning("TELEGRAM_BOT_TOKEN not set") + + self._application: Application | None = None + self._message_handler: Callable[[IncomingMessage], Awaitable[None]] | None = ( + None + ) + self._connected = False + self._limiter: Any | None = None # Will be MessagingRateLimiter + # Pending voice transcriptions: (chat_id, msg_id) -> (voice_msg_id, status_msg_id) + self._pending_voice = PendingVoiceRegistry() + self._voice_transcription = VoiceTranscriptionService( + hf_token=hf_token, + nvidia_nim_api_key=nvidia_nim_api_key, + ) + self._voice_note_enabled = voice_note_enabled + self._whisper_model = whisper_model + self._whisper_device = whisper_device + self._messaging_rate_limit = messaging_rate_limit + self._messaging_rate_window = messaging_rate_window + self._log_raw_messaging_content = log_raw_messaging_content + self._log_api_error_tracebacks = log_api_error_tracebacks + + async def _register_pending_voice( + self, chat_id: str, voice_msg_id: str, status_msg_id: str + ) -> None: + """Register a voice note as pending transcription (for /clear reply during transcription).""" + await self._pending_voice.register(chat_id, voice_msg_id, status_msg_id) + + async def cancel_pending_voice( + self, chat_id: str, reply_id: str + ) -> tuple[str, str] | None: + """Cancel a pending voice transcription. Returns (voice_msg_id, status_msg_id) if found.""" + return await self._pending_voice.cancel(chat_id, reply_id) + + async def _is_voice_still_pending(self, chat_id: str, voice_msg_id: str) -> bool: + """Check if a voice note is still pending (not cancelled).""" + return await self._pending_voice.is_pending(chat_id, voice_msg_id) + + async def start(self) -> None: + """Initialize and connect to Telegram.""" + if not self.bot_token: + raise ValueError("TELEGRAM_BOT_TOKEN is required") + + # Configure request with longer timeouts + request = HTTPXRequest( + connection_pool_size=8, connect_timeout=30.0, read_timeout=30.0 + ) + + # Build Application + builder = Application.builder().token(self.bot_token).request(request) + self._application = builder.build() + + # Register Internal Handlers + # We catch ALL text messages and commands to forward them + self._application.add_handler( + MessageHandler(filters.TEXT & (~filters.COMMAND), self._on_telegram_message) + ) + self._application.add_handler(CommandHandler("start", self._on_start_command)) + # Catch-all for other commands if needed, or let them fall through + self._application.add_handler( + MessageHandler(filters.COMMAND, self._on_telegram_message) + ) + # Voice note handler + self._application.add_handler( + MessageHandler(filters.VOICE, self._on_telegram_voice) + ) + + # Initialize internal components with retry logic + max_retries = 3 + for attempt in range(max_retries): + try: + await self._application.initialize() + await self._application.start() + + # Start polling (non-blocking way for integration) + if self._application.updater: + await self._application.updater.start_polling( + drop_pending_updates=False + ) + + self._connected = True + break + except (NetworkError, Exception) as e: + if attempt < max_retries - 1: + wait_time = 2 * (attempt + 1) + logger.warning( + f"Connection failed (attempt {attempt + 1}/{max_retries}): {e}. Retrying in {wait_time}s..." + ) + await asyncio.sleep(wait_time) + else: + logger.error(f"Failed to connect after {max_retries} attempts") + raise + + # Initialize rate limiter + from ..limiter import MessagingRateLimiter + + self._limiter = await MessagingRateLimiter.get_instance( + rate_limit=self._messaging_rate_limit, + rate_window=self._messaging_rate_window, + ) + + # Send startup notification + try: + target = self.allowed_user_id + if target: + startup_text = ( + f"🚀 *{escape_md_v2('Claude Code Proxy is online!')}* " + f"{escape_md_v2('(Bot API)')}" + ) + await self.send_message( + target, + startup_text, + ) + except Exception as e: + if self._log_api_error_tracebacks: + logger.warning("Could not send startup message: {}", e) + else: + logger.warning( + "Could not send startup message: exc_type={}", + type(e).__name__, + ) + + logger.info("Telegram platform started (Bot API)") + + async def stop(self) -> None: + """Stop the bot.""" + if self._application and self._application.updater: + await self._application.updater.stop() + await self._application.stop() + await self._application.shutdown() + + self._connected = False + logger.info("Telegram platform stopped") + + async def _with_retry( + self, func: Callable[..., Awaitable[Any]], *args, **kwargs + ) -> Any: + """Helper to execute a function with exponential backoff on network errors.""" + max_retries = 3 + for attempt in range(max_retries): + try: + return await func(*args, **kwargs) + except (TimeoutError, NetworkError) as e: + if "Message is not modified" in str(e): + return None + if attempt < max_retries - 1: + wait_time = 2**attempt # 1s, 2s, 4s + logger.warning( + f"Telegram API network error (attempt {attempt + 1}/{max_retries}): {e}. Retrying in {wait_time}s..." + ) + await asyncio.sleep(wait_time) + else: + logger.error( + f"Telegram API failed after {max_retries} attempts: {e}" + ) + raise + except RetryAfter as e: + # Telegram explicitly tells us to wait (PTB_TIMEDELTA: retry_after is timedelta) + from datetime import timedelta + + retry_after = e.retry_after + if isinstance(retry_after, timedelta): + wait_secs = retry_after.total_seconds() + else: + wait_secs = float(retry_after) + + logger.warning(f"Rate limited by Telegram, waiting {wait_secs}s...") + await asyncio.sleep(wait_secs) + # We don't increment attempt here, as this is a specific instruction + return await func(*args, **kwargs) + except TelegramError as e: + # Non-network Telegram errors + err_lower = str(e).lower() + if "message is not modified" in err_lower: + return None + # Best-effort no-op cases (common during chat cleanup / /clear). + if any( + x in err_lower + for x in [ + "message to edit not found", + "message to delete not found", + "message can't be deleted", + "message can't be edited", + "not enough rights to delete", + ] + ): + return None + if "Can't parse entities" in str(e) and kwargs.get("parse_mode"): + logger.warning("Markdown failed, retrying without parse_mode") + kwargs["parse_mode"] = None + return await func(*args, **kwargs) + raise + + async def send_message( + self, + chat_id: str, + text: str, + reply_to: str | None = None, + parse_mode: str | None = "MarkdownV2", + message_thread_id: str | None = None, + ) -> str: + """Send a message to a chat.""" + app = self._application + if not app or not app.bot: + raise RuntimeError("Telegram application or bot not initialized") + + async def _do_send(parse_mode=parse_mode): + bot = app.bot + kwargs: dict[str, Any] = { + "chat_id": chat_id, + "text": text, + "reply_to_message_id": int(reply_to) if reply_to else None, + "parse_mode": parse_mode, + } + if message_thread_id is not None: + kwargs["message_thread_id"] = int(message_thread_id) + msg = await bot.send_message(**kwargs) + return str(msg.message_id) + + return await self._with_retry(_do_send, parse_mode=parse_mode) + + async def edit_message( + self, + chat_id: str, + message_id: str, + text: str, + parse_mode: str | None = "MarkdownV2", + ) -> None: + """Edit an existing message.""" + app = self._application + if not app or not app.bot: + raise RuntimeError("Telegram application or bot not initialized") + + async def _do_edit(parse_mode=parse_mode): + bot = app.bot + await bot.edit_message_text( + chat_id=chat_id, + message_id=int(message_id), + text=text, + parse_mode=parse_mode, + ) + + await self._with_retry(_do_edit, parse_mode=parse_mode) + + async def delete_message( + self, + chat_id: str, + message_id: str, + ) -> None: + """Delete a message from a chat.""" + app = self._application + if not app or not app.bot: + raise RuntimeError("Telegram application or bot not initialized") + + async def _do_delete(): + bot = app.bot + await bot.delete_message(chat_id=chat_id, message_id=int(message_id)) + + await self._with_retry(_do_delete) + + async def delete_messages(self, chat_id: str, message_ids: list[str]) -> None: + """Delete multiple messages (best-effort).""" + if not message_ids: + return + app = self._application + if not app or not app.bot: + raise RuntimeError("Telegram application or bot not initialized") + + # PTB supports bulk deletion via delete_messages; fall back to per-message. + bot = app.bot + if hasattr(bot, "delete_messages"): + + async def _do_bulk(): + mids = [] + for mid in message_ids: + try: + mids.append(int(mid)) + except Exception: + continue + if not mids: + return None + # delete_messages accepts a sequence of ints (up to 100). + await bot.delete_messages(chat_id=chat_id, message_ids=mids) + + await self._with_retry(_do_bulk) + return + + for mid in message_ids: + await self.delete_message(chat_id, mid) + + async def queue_send_message( + self, + chat_id: str, + text: str, + reply_to: str | None = None, + parse_mode: str | None = "MarkdownV2", + fire_and_forget: bool = True, + message_thread_id: str | None = None, + ) -> str | None: + """Enqueue a message to be sent (using limiter).""" + # Note: Bot API handles limits better, but we still use our limiter for nice queuing + if not self._limiter: + return await self.send_message( + chat_id, text, reply_to, parse_mode, message_thread_id + ) + + async def _send(): + return await self.send_message( + chat_id, text, reply_to, parse_mode, message_thread_id + ) + + if fire_and_forget: + self._limiter.fire_and_forget(_send) + return None + else: + return await self._limiter.enqueue(_send) + + async def queue_edit_message( + self, + chat_id: str, + message_id: str, + text: str, + parse_mode: str | None = "MarkdownV2", + fire_and_forget: bool = True, + ) -> None: + """Enqueue a message edit.""" + if not self._limiter: + return await self.edit_message(chat_id, message_id, text, parse_mode) + + async def _edit(): + return await self.edit_message(chat_id, message_id, text, parse_mode) + + dedup_key = f"edit:{chat_id}:{message_id}" + if fire_and_forget: + self._limiter.fire_and_forget(_edit, dedup_key=dedup_key) + else: + await self._limiter.enqueue(_edit, dedup_key=dedup_key) + + async def queue_delete_message( + self, + chat_id: str, + message_id: str, + fire_and_forget: bool = True, + ) -> None: + """Enqueue a message delete.""" + if not self._limiter: + return await self.delete_message(chat_id, message_id) + + async def _delete(): + return await self.delete_message(chat_id, message_id) + + dedup_key = f"del:{chat_id}:{message_id}" + if fire_and_forget: + self._limiter.fire_and_forget(_delete, dedup_key=dedup_key) + else: + await self._limiter.enqueue(_delete, dedup_key=dedup_key) + + async def queue_delete_messages( + self, + chat_id: str, + message_ids: list[str], + fire_and_forget: bool = True, + ) -> None: + """Enqueue a bulk delete (if supported) or a sequence of deletes.""" + if not message_ids: + return + + if not self._limiter: + return await self.delete_messages(chat_id, message_ids) + + async def _bulk(): + return await self.delete_messages(chat_id, message_ids) + + # Dedup by the chunk content; okay to be coarse here. + dedup_key = f"del_bulk:{chat_id}:{hash(tuple(message_ids))}" + if fire_and_forget: + self._limiter.fire_and_forget(_bulk, dedup_key=dedup_key) + else: + await self._limiter.enqueue(_bulk, dedup_key=dedup_key) + + def fire_and_forget(self, task: Awaitable[Any]) -> None: + """Execute a coroutine without awaiting it.""" + if asyncio.iscoroutine(task): + _ = asyncio.create_task(task) + else: + _ = asyncio.ensure_future(task) + + def on_message( + self, + handler: Callable[[IncomingMessage], Awaitable[None]], + ) -> None: + """Register a message handler callback.""" + self._message_handler = handler + + @property + def is_connected(self) -> bool: + """Check if connected.""" + return self._connected + + async def _on_start_command( + self, update: Update, context: ContextTypes.DEFAULT_TYPE + ) -> None: + """Handle /start command.""" + if update.message: + await update.message.reply_text("👋 Hello! I am the Claude Code Proxy Bot.") + # We can also treat this as a message if we want it to trigger something + await self._on_telegram_message(update, context) + + async def _on_telegram_message( + self, update: Update, context: ContextTypes.DEFAULT_TYPE + ) -> None: + """Handle incoming updates.""" + if ( + not update.message + or not update.message.text + or not update.effective_user + or not update.effective_chat + ): + return + + user_id = str(update.effective_user.id) + chat_id = str(update.effective_chat.id) + + # Security check + if self.allowed_user_id and user_id != str(self.allowed_user_id).strip(): + logger.warning(f"Unauthorized access attempt from {user_id}") + return + + message_id = str(update.message.message_id) + reply_to = ( + str(update.message.reply_to_message.message_id) + if update.message.reply_to_message + else None + ) + thread_id = ( + str(update.message.message_thread_id) + if getattr(update.message, "message_thread_id", None) is not None + else None + ) + raw_text = update.message.text or "" + if self._log_raw_messaging_content: + text_preview = raw_text[:80] + if len(raw_text) > 80: + text_preview += "..." + logger.info( + "TELEGRAM_MSG: chat_id={} message_id={} reply_to={} text_preview={!r}", + chat_id, + message_id, + reply_to, + text_preview, + ) + else: + logger.info( + "TELEGRAM_MSG: chat_id={} message_id={} reply_to={} text_len={}", + chat_id, + message_id, + reply_to, + len(raw_text), + ) + + if not self._message_handler: + return + + incoming = IncomingMessage( + text=update.message.text, + chat_id=chat_id, + user_id=user_id, + message_id=message_id, + platform="telegram", + reply_to_message_id=reply_to, + message_thread_id=thread_id, + raw_event=update, + ) + + try: + await self._message_handler(incoming) + except Exception as e: + if self._log_api_error_tracebacks: + logger.error("Error handling message: {}", e) + else: + logger.error("Error handling message: exc_type={}", type(e).__name__) + with contextlib.suppress(Exception): + await self.send_message( + chat_id, + f"❌ *{escape_md_v2('Error:')}* {escape_md_v2(format_user_error_preview(e))}", + reply_to=incoming.message_id, + message_thread_id=thread_id, + parse_mode="MarkdownV2", + ) + + async def _on_telegram_voice( + self, update: Update, context: ContextTypes.DEFAULT_TYPE + ) -> None: + """Handle incoming voice messages.""" + if ( + not update.message + or not update.message.voice + or not update.effective_user + or not update.effective_chat + ): + return + + if not self._voice_note_enabled: + await update.message.reply_text("Voice notes are disabled.") + return + + user_id = str(update.effective_user.id) + chat_id = str(update.effective_chat.id) + + if self.allowed_user_id and user_id != str(self.allowed_user_id).strip(): + logger.warning(f"Unauthorized voice access attempt from {user_id}") + return + + if not self._message_handler: + return + + thread_id = ( + str(update.message.message_thread_id) + if getattr(update.message, "message_thread_id", None) is not None + else None + ) + status_msg_id = await self.queue_send_message( + chat_id, + format_status("⏳", "Transcribing voice note..."), + reply_to=str(update.message.message_id), + parse_mode="MarkdownV2", + fire_and_forget=False, + message_thread_id=thread_id, + ) + + message_id = str(update.message.message_id) + await self._register_pending_voice(chat_id, message_id, str(status_msg_id)) + reply_to = ( + str(update.message.reply_to_message.message_id) + if update.message.reply_to_message + else None + ) + + voice = update.message.voice + suffix = ".ogg" + if voice.mime_type and "mpeg" in voice.mime_type: + suffix = ".mp3" + elif voice.mime_type and "mp4" in voice.mime_type: + suffix = ".mp4" + + with tempfile.NamedTemporaryFile(suffix=suffix, delete=False) as tmp: + tmp_path = Path(tmp.name) + + try: + tg_file = await context.bot.get_file(voice.file_id) + await tg_file.download_to_drive(custom_path=str(tmp_path)) + + transcribed = await self._voice_transcription.transcribe( + tmp_path, + voice.mime_type or "audio/ogg", + whisper_model=self._whisper_model, + whisper_device=self._whisper_device, + ) + + if not await self._is_voice_still_pending(chat_id, message_id): + await self.queue_delete_message(chat_id, str(status_msg_id)) + return + + await self._pending_voice.complete(chat_id, message_id, str(status_msg_id)) + + incoming = IncomingMessage( + text=transcribed, + chat_id=chat_id, + user_id=user_id, + message_id=message_id, + platform="telegram", + reply_to_message_id=reply_to, + message_thread_id=thread_id, + raw_event=update, + status_message_id=status_msg_id, + ) + + if self._log_raw_messaging_content: + logger.info( + "TELEGRAM_VOICE: chat_id={} message_id={} transcribed={!r}", + chat_id, + message_id, + ( + transcribed[:80] + "..." + if len(transcribed) > 80 + else transcribed + ), + ) + else: + logger.info( + "TELEGRAM_VOICE: chat_id={} message_id={} transcribed_len={}", + chat_id, + message_id, + len(transcribed), + ) + + await self._message_handler(incoming) + except ValueError as e: + await update.message.reply_text(format_user_error_preview(e)) + except ImportError as e: + await update.message.reply_text(format_user_error_preview(e)) + except Exception as e: + if self._log_api_error_tracebacks: + logger.error("Voice transcription failed: {}", e) + else: + logger.error( + "Voice transcription failed: exc_type={}", type(e).__name__ + ) + await update.message.reply_text( + "Could not transcribe voice note. Please try again or send text." + ) + finally: + with contextlib.suppress(OSError): + tmp_path.unlink(missing_ok=True) diff --git a/messaging/rendering/__init__.py b/messaging/rendering/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0c3f2c012e78600e0f2770ca188eb868c845939c --- /dev/null +++ b/messaging/rendering/__init__.py @@ -0,0 +1,3 @@ +"""Markdown rendering utilities for messaging platforms.""" + +__all__: list[str] = [] diff --git a/messaging/rendering/__pycache__/__init__.cpython-314.pyc b/messaging/rendering/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..609932c757fa5543f9416da7d08e635bd90ea096 Binary files /dev/null and b/messaging/rendering/__pycache__/__init__.cpython-314.pyc differ diff --git a/messaging/rendering/__pycache__/discord_markdown.cpython-314.pyc b/messaging/rendering/__pycache__/discord_markdown.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ab379a8829d6bf28e36071e8a45a76213264832 Binary files /dev/null and b/messaging/rendering/__pycache__/discord_markdown.cpython-314.pyc differ diff --git a/messaging/rendering/__pycache__/markdown_tables.cpython-314.pyc b/messaging/rendering/__pycache__/markdown_tables.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..519ba8b2867771ab7de490bb605630129dd1bab2 Binary files /dev/null and b/messaging/rendering/__pycache__/markdown_tables.cpython-314.pyc differ diff --git a/messaging/rendering/__pycache__/profiles.cpython-314.pyc b/messaging/rendering/__pycache__/profiles.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5771bf387467ef605c929365012a1f0d0c34c53 Binary files /dev/null and b/messaging/rendering/__pycache__/profiles.cpython-314.pyc differ diff --git a/messaging/rendering/__pycache__/telegram_markdown.cpython-314.pyc b/messaging/rendering/__pycache__/telegram_markdown.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be603e5797aa8611250fa240cd5d7171fd003a4d Binary files /dev/null and b/messaging/rendering/__pycache__/telegram_markdown.cpython-314.pyc differ diff --git a/messaging/rendering/discord_markdown.py b/messaging/rendering/discord_markdown.py new file mode 100644 index 0000000000000000000000000000000000000000..ccaf9f4610dcf80e5a2ccd75e2e8bb067a01266f --- /dev/null +++ b/messaging/rendering/discord_markdown.py @@ -0,0 +1,318 @@ +"""Discord markdown utilities. + +Discord uses standard markdown: **bold**, *italic*, `code`, ```code block```. +Used by the message handler and Discord platform adapter. +""" + +from markdown_it import MarkdownIt + +from .markdown_tables import normalize_gfm_tables + +# Discord escapes: \ * _ ` ~ | > +DISCORD_SPECIAL = set("\\*_`~|>") + +_MD = MarkdownIt("commonmark", {"html": False, "breaks": False}) +_MD.enable("strikethrough") +_MD.enable("table") + + +def escape_discord(text: str) -> str: + """Escape text for Discord markdown (bold, italic, etc.).""" + return "".join(f"\\{ch}" if ch in DISCORD_SPECIAL else ch for ch in text) + + +def escape_discord_code(text: str) -> str: + """Escape text for Discord code spans/blocks.""" + return text.replace("\\", "\\\\").replace("`", "\\`") + + +def discord_bold(text: str) -> str: + """Format text as bold in Discord (uses **).""" + return f"**{escape_discord(text)}**" + + +def discord_code_inline(text: str) -> str: + """Format text as inline code in Discord.""" + return f"`{escape_discord_code(text)}`" + + +def format_status_discord(label: str, suffix: str | None = None) -> str: + """Format a status message for Discord (label in bold, optional suffix).""" + base = discord_bold(label) + if suffix: + return f"{base} {escape_discord(suffix)}" + return base + + +def format_status(emoji: str, label: str, suffix: str | None = None) -> str: + """Format a status message with emoji for Discord (matches Telegram API).""" + base = f"{emoji} {discord_bold(label)}" + if suffix: + return f"{base} {escape_discord(suffix)}" + return base + + +def render_markdown_to_discord(text: str) -> str: + """Render common Markdown into Discord-compatible format.""" + if not text: + return "" + + text = normalize_gfm_tables(text) + tokens = _MD.parse(text) + + def render_inline_table_plain(children) -> str: + out: list[str] = [] + for tok in children: + if tok.type == "text" or tok.type == "code_inline": + out.append(tok.content) + elif tok.type in {"softbreak", "hardbreak"}: + out.append(" ") + elif tok.type == "image" and tok.content: + out.append(tok.content) + return "".join(out) + + def render_inline(children) -> str: + out: list[str] = [] + i = 0 + while i < len(children): + tok = children[i] + t = tok.type + if t == "text": + out.append(escape_discord(tok.content)) + elif t in {"softbreak", "hardbreak"}: + out.append("\n") + elif t == "em_open" or t == "em_close": + out.append("*") + elif t == "strong_open" or t == "strong_close": + out.append("**") + elif t == "s_open" or t == "s_close": + out.append("~~") + elif t == "code_inline": + out.append(f"`{escape_discord_code(tok.content)}`") + elif t == "link_open": + href = "" + if tok.attrs: + if isinstance(tok.attrs, dict): + href = tok.attrs.get("href", "") + else: + for key, val in tok.attrs: + if key == "href": + href = val + break + inner_tokens = [] + i += 1 + while i < len(children) and children[i].type != "link_close": + inner_tokens.append(children[i]) + i += 1 + link_text = "" + for child in inner_tokens: + if child.type == "text" or child.type == "code_inline": + link_text += child.content + out.append(f"[{escape_discord(link_text)}]({href})") + elif t == "image": + href = "" + alt = tok.content or "" + if tok.attrs: + if isinstance(tok.attrs, dict): + href = tok.attrs.get("src", "") + else: + for key, val in tok.attrs: + if key == "src": + href = val + break + if alt: + out.append(f"{escape_discord(alt)} ({href})") + else: + out.append(href) + else: + out.append(escape_discord(tok.content or "")) + i += 1 + return "".join(out) + + out: list[str] = [] + list_stack: list[dict] = [] + pending_prefix: str | None = None + blockquote_level = 0 + in_heading = False + + def apply_blockquote(val: str) -> str: + if blockquote_level <= 0: + return val + prefix = "> " * blockquote_level + return prefix + val.replace("\n", "\n" + prefix) + + i = 0 + while i < len(tokens): + tok = tokens[i] + t = tok.type + if t == "paragraph_open": + pass + elif t == "paragraph_close": + out.append("\n") + elif t == "heading_open": + in_heading = True + elif t == "heading_close": + in_heading = False + out.append("\n") + elif t == "bullet_list_open": + list_stack.append({"type": "bullet", "index": 1}) + elif t == "bullet_list_close": + if list_stack: + list_stack.pop() + out.append("\n") + elif t == "ordered_list_open": + start = 1 + if tok.attrs: + if isinstance(tok.attrs, dict): + val = tok.attrs.get("start") + if val is not None: + try: + start = int(val) + except TypeError, ValueError: + start = 1 + else: + for key, val in tok.attrs: + if key == "start": + try: + start = int(val) + except TypeError, ValueError: + start = 1 + break + list_stack.append({"type": "ordered", "index": start}) + elif t == "ordered_list_close": + if list_stack: + list_stack.pop() + out.append("\n") + elif t == "list_item_open": + if list_stack: + top = list_stack[-1] + if top["type"] == "bullet": + pending_prefix = "- " + else: + pending_prefix = f"{top['index']}. " + top["index"] += 1 + elif t == "list_item_close": + out.append("\n") + elif t == "blockquote_open": + blockquote_level += 1 + elif t == "blockquote_close": + blockquote_level = max(0, blockquote_level - 1) + out.append("\n") + elif t == "table_open": + if pending_prefix: + out.append(apply_blockquote(pending_prefix.rstrip())) + out.append("\n") + pending_prefix = None + + rows: list[list[str]] = [] + row_is_header: list[bool] = [] + + j = i + 1 + in_thead = False + in_row = False + current_row: list[str] = [] + current_row_header = False + + in_cell = False + cell_parts: list[str] = [] + + while j < len(tokens): + tt = tokens[j].type + if tt == "thead_open": + in_thead = True + elif tt == "thead_close": + in_thead = False + elif tt == "tr_open": + in_row = True + current_row = [] + current_row_header = in_thead + elif tt in {"th_open", "td_open"}: + in_cell = True + cell_parts = [] + elif tt == "inline" and in_cell: + cell_parts.append( + render_inline_table_plain(tokens[j].children or []) + ) + elif tt in {"th_close", "td_close"} and in_cell: + cell = " ".join(cell_parts).strip() + current_row.append(cell) + in_cell = False + cell_parts = [] + elif tt == "tr_close" and in_row: + rows.append(current_row) + row_is_header.append(bool(current_row_header)) + in_row = False + elif tt == "table_close": + break + j += 1 + + if rows: + col_count = max((len(r) for r in rows), default=0) + norm_rows: list[list[str]] = [] + for r in rows: + if len(r) < col_count: + r = r + [""] * (col_count - len(r)) + norm_rows.append(r) + + widths: list[int] = [] + for c in range(col_count): + w = max((len(r[c]) for r in norm_rows), default=0) + widths.append(max(w, 3)) + + def fmt_row( + r: list[str], _w: list[int] = widths, _c: int = col_count + ) -> str: + cells = [r[c].ljust(_w[c]) for c in range(_c)] + return "| " + " | ".join(cells) + " |" + + def fmt_sep(_w: list[int] = widths, _c: int = col_count) -> str: + cells = ["-" * _w[c] for c in range(_c)] + return "| " + " | ".join(cells) + " |" + + last_header_idx = -1 + for idx, is_h in enumerate(row_is_header): + if is_h: + last_header_idx = idx + + lines: list[str] = [] + for idx, r in enumerate(norm_rows): + lines.append(fmt_row(r)) + if idx == last_header_idx: + lines.append(fmt_sep()) + + table_text = "\n".join(lines).rstrip() + out.append(f"```\n{escape_discord_code(table_text)}\n```") + out.append("\n") + + i = j + 1 + continue + elif t in {"code_block", "fence"}: + code = escape_discord_code(tok.content.rstrip("\n")) + out.append(f"```\n{code}\n```") + out.append("\n") + elif t == "inline": + rendered = render_inline(tok.children or []) + if in_heading: + rendered = f"**{render_inline(tok.children or [])}**" + if pending_prefix: + rendered = pending_prefix + rendered + pending_prefix = None + rendered = apply_blockquote(rendered) + out.append(rendered) + else: + if tok.content: + out.append(escape_discord(tok.content)) + i += 1 + + return "".join(out).rstrip() + + +__all__ = [ + "discord_bold", + "discord_code_inline", + "escape_discord", + "escape_discord_code", + "format_status", + "format_status_discord", + "render_markdown_to_discord", +] diff --git a/messaging/rendering/markdown_tables.py b/messaging/rendering/markdown_tables.py new file mode 100644 index 0000000000000000000000000000000000000000..e2dd8947fda680c55091550acf09776cae95661d --- /dev/null +++ b/messaging/rendering/markdown_tables.py @@ -0,0 +1,49 @@ +"""Shared Markdown table pre-normalization for platform renderers.""" + +from __future__ import annotations + +import re + +_TABLE_SEP_RE = re.compile(r"^\s*\|?\s*:?-{3,}:?\s*(\|\s*:?-{3,}:?\s*)+\|?\s*$") +_FENCE_RE = re.compile(r"^\s*```") + + +def _is_gfm_table_header_line(line: str) -> bool: + """Return whether a line looks like a GFM table header.""" + if "|" not in line: + return False + if _TABLE_SEP_RE.match(line): + return False + parts = [part.strip() for part in line.strip().strip("|").split("|")] + return len([part for part in parts if part]) >= 2 + + +def normalize_gfm_tables(text: str) -> str: + """Insert blank lines before detected tables outside fenced code blocks.""" + lines = text.splitlines() + if len(lines) < 2: + return text + + out_lines: list[str] = [] + in_fence = False + + for idx, line in enumerate(lines): + if _FENCE_RE.match(line): + in_fence = not in_fence + out_lines.append(line) + continue + + if ( + not in_fence + and idx + 1 < len(lines) + and _is_gfm_table_header_line(line) + and _TABLE_SEP_RE.match(lines[idx + 1]) + and out_lines + and out_lines[-1].strip() != "" + ): + indent_match = re.match(r"^(\s*)", line) + out_lines.append(indent_match.group(1) if indent_match else "") + + out_lines.append(line) + + return "\n".join(out_lines) diff --git a/messaging/rendering/profiles.py b/messaging/rendering/profiles.py new file mode 100644 index 0000000000000000000000000000000000000000..53b9ef106ae3018374cd840c71e0a1bf5b24afe7 --- /dev/null +++ b/messaging/rendering/profiles.py @@ -0,0 +1,55 @@ +"""Platform rendering profiles for messaging transcripts and status text.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from messaging.rendering.discord_markdown import ( + discord_bold, + discord_code_inline, + escape_discord, + escape_discord_code, + render_markdown_to_discord, +) +from messaging.rendering.discord_markdown import ( + format_status as format_status_discord, +) +from messaging.rendering.telegram_markdown import ( + escape_md_v2, + escape_md_v2_code, + mdv2_bold, + mdv2_code_inline, + render_markdown_to_mdv2, +) +from messaging.rendering.telegram_markdown import ( + format_status as format_status_telegram, +) +from messaging.transcript import RenderCtx + + +@dataclass(frozen=True, slots=True) +class RenderingProfile: + format_status: Callable[[str, str, str | None], str] + parse_mode: str | None + render_ctx: RenderCtx + limit_chars: int + + +def build_rendering_profile(platform_name: str) -> RenderingProfile: + """Return rendering rules for a messaging platform.""" + is_discord = platform_name == "discord" + return RenderingProfile( + format_status=format_status_discord if is_discord else format_status_telegram, + parse_mode=None if is_discord else "MarkdownV2", + render_ctx=RenderCtx( + bold=discord_bold if is_discord else mdv2_bold, + code_inline=discord_code_inline if is_discord else mdv2_code_inline, + escape_code=escape_discord_code if is_discord else escape_md_v2_code, + escape_text=escape_discord if is_discord else escape_md_v2, + render_markdown=render_markdown_to_discord + if is_discord + else render_markdown_to_mdv2, + ), + limit_chars=1900 if is_discord else 3900, + ) diff --git a/messaging/rendering/telegram_markdown.py b/messaging/rendering/telegram_markdown.py new file mode 100644 index 0000000000000000000000000000000000000000..b0c24c6a6874585c3a135015be97b32926f84ce3 --- /dev/null +++ b/messaging/rendering/telegram_markdown.py @@ -0,0 +1,327 @@ +"""Telegram MarkdownV2 utilities. + +Renders common Markdown into Telegram MarkdownV2 format. +Used by the message handler and Telegram platform adapter. +""" + +from markdown_it import MarkdownIt + +from .markdown_tables import normalize_gfm_tables + +MDV2_SPECIAL_CHARS = set("\\_*[]()~`>#+-=|{}.!") +MDV2_LINK_ESCAPE = set("\\)") + +_MD = MarkdownIt("commonmark", {"html": False, "breaks": False}) +_MD.enable("strikethrough") +_MD.enable("table") + + +def escape_md_v2(text: str) -> str: + """Escape text for Telegram MarkdownV2.""" + return "".join(f"\\{ch}" if ch in MDV2_SPECIAL_CHARS else ch for ch in text) + + +def escape_md_v2_code(text: str) -> str: + """Escape text for Telegram MarkdownV2 code spans/blocks.""" + return text.replace("\\", "\\\\").replace("`", "\\`") + + +def escape_md_v2_link_url(text: str) -> str: + """Escape URL for Telegram MarkdownV2 link destination.""" + return "".join(f"\\{ch}" if ch in MDV2_LINK_ESCAPE else ch for ch in text) + + +def mdv2_bold(text: str) -> str: + """Format text as bold in MarkdownV2.""" + return f"*{escape_md_v2(text)}*" + + +def mdv2_code_inline(text: str) -> str: + """Format text as inline code in MarkdownV2.""" + return f"`{escape_md_v2_code(text)}`" + + +def format_status(emoji: str, label: str, suffix: str | None = None) -> str: + """Format a status message with emoji and optional suffix.""" + base = f"{emoji} {mdv2_bold(label)}" + if suffix: + return f"{base} {escape_md_v2(suffix)}" + return base + + +def render_markdown_to_mdv2(text: str) -> str: + """Render common Markdown into Telegram MarkdownV2.""" + if not text: + return "" + + text = normalize_gfm_tables(text) + tokens = _MD.parse(text) + + def render_inline_table_plain(children) -> str: + out: list[str] = [] + for tok in children: + if tok.type == "text" or tok.type == "code_inline": + out.append(tok.content) + elif tok.type in {"softbreak", "hardbreak"}: + out.append(" ") + elif tok.type == "image" and tok.content: + out.append(tok.content) + return "".join(out) + + def render_inline_plain(children) -> str: + out: list[str] = [] + for tok in children: + if tok.type == "text" or tok.type == "code_inline": + out.append(escape_md_v2(tok.content)) + elif tok.type in {"softbreak", "hardbreak"}: + out.append("\n") + return "".join(out) + + def render_inline(children) -> str: + out: list[str] = [] + i = 0 + while i < len(children): + tok = children[i] + t = tok.type + if t == "text": + out.append(escape_md_v2(tok.content)) + elif t in {"softbreak", "hardbreak"}: + out.append("\n") + elif t == "em_open" or t == "em_close": + out.append("_") + elif t == "strong_open" or t == "strong_close": + out.append("*") + elif t == "s_open" or t == "s_close": + out.append("~") + elif t == "code_inline": + out.append(f"`{escape_md_v2_code(tok.content)}`") + elif t == "link_open": + href = "" + if tok.attrs: + if isinstance(tok.attrs, dict): + href = tok.attrs.get("href", "") + else: + for key, val in tok.attrs: + if key == "href": + href = val + break + inner_tokens = [] + i += 1 + while i < len(children) and children[i].type != "link_close": + inner_tokens.append(children[i]) + i += 1 + link_text = "" + for child in inner_tokens: + if child.type == "text" or child.type == "code_inline": + link_text += child.content + out.append( + f"[{escape_md_v2(link_text)}]({escape_md_v2_link_url(href)})" + ) + elif t == "image": + href = "" + alt = tok.content or "" + if tok.attrs: + if isinstance(tok.attrs, dict): + href = tok.attrs.get("src", "") + else: + for key, val in tok.attrs: + if key == "src": + href = val + break + if alt: + out.append(f"{escape_md_v2(alt)} ({escape_md_v2_link_url(href)})") + else: + out.append(escape_md_v2_link_url(href)) + else: + out.append(escape_md_v2(tok.content or "")) + i += 1 + return "".join(out) + + out: list[str] = [] + list_stack: list[dict] = [] + pending_prefix: str | None = None + blockquote_level = 0 + in_heading = False + + def apply_blockquote(val: str) -> str: + if blockquote_level <= 0: + return val + prefix = "> " * blockquote_level + return prefix + val.replace("\n", "\n" + prefix) + + i = 0 + while i < len(tokens): + tok = tokens[i] + t = tok.type + if t == "paragraph_open": + pass + elif t == "paragraph_close": + out.append("\n") + elif t == "heading_open": + in_heading = True + elif t == "heading_close": + in_heading = False + out.append("\n") + elif t == "bullet_list_open": + list_stack.append({"type": "bullet", "index": 1}) + elif t == "bullet_list_close": + if list_stack: + list_stack.pop() + out.append("\n") + elif t == "ordered_list_open": + start = 1 + if tok.attrs: + if isinstance(tok.attrs, dict): + val = tok.attrs.get("start") + if val is not None: + try: + start = int(val) + except TypeError, ValueError: + start = 1 + else: + for key, val in tok.attrs: + if key == "start": + try: + start = int(val) + except TypeError, ValueError: + start = 1 + break + list_stack.append({"type": "ordered", "index": start}) + elif t == "ordered_list_close": + if list_stack: + list_stack.pop() + out.append("\n") + elif t == "list_item_open": + if list_stack: + top = list_stack[-1] + if top["type"] == "bullet": + pending_prefix = "\\- " + else: + pending_prefix = f"{top['index']}\\." + top["index"] += 1 + pending_prefix += " " + elif t == "list_item_close": + out.append("\n") + elif t == "blockquote_open": + blockquote_level += 1 + elif t == "blockquote_close": + blockquote_level = max(0, blockquote_level - 1) + out.append("\n") + elif t == "table_open": + if pending_prefix: + out.append(apply_blockquote(pending_prefix.rstrip())) + out.append("\n") + pending_prefix = None + + rows: list[list[str]] = [] + row_is_header: list[bool] = [] + + j = i + 1 + in_thead = False + in_row = False + current_row: list[str] = [] + current_row_header = False + + in_cell = False + cell_parts: list[str] = [] + + while j < len(tokens): + tt = tokens[j].type + if tt == "thead_open": + in_thead = True + elif tt == "thead_close": + in_thead = False + elif tt == "tr_open": + in_row = True + current_row = [] + current_row_header = in_thead + elif tt in {"th_open", "td_open"}: + in_cell = True + cell_parts = [] + elif tt == "inline" and in_cell: + cell_parts.append( + render_inline_table_plain(tokens[j].children or []) + ) + elif tt in {"th_close", "td_close"} and in_cell: + cell = " ".join(cell_parts).strip() + current_row.append(cell) + in_cell = False + cell_parts = [] + elif tt == "tr_close" and in_row: + rows.append(current_row) + row_is_header.append(bool(current_row_header)) + in_row = False + elif tt == "table_close": + break + j += 1 + + if rows: + col_count = max((len(r) for r in rows), default=0) + norm_rows: list[list[str]] = [] + for r in rows: + if len(r) < col_count: + r = r + [""] * (col_count - len(r)) + norm_rows.append(r) + + widths: list[int] = [] + for c in range(col_count): + w = max((len(r[c]) for r in norm_rows), default=0) + widths.append(max(w, 3)) + + def fmt_row( + r: list[str], _w: list[int] = widths, _c: int = col_count + ) -> str: + cells = [r[c].ljust(_w[c]) for c in range(_c)] + return "| " + " | ".join(cells) + " |" + + def fmt_sep(_w: list[int] = widths, _c: int = col_count) -> str: + cells = ["-" * _w[c] for c in range(_c)] + return "| " + " | ".join(cells) + " |" + + last_header_idx = -1 + for idx, is_h in enumerate(row_is_header): + if is_h: + last_header_idx = idx + + lines: list[str] = [] + for idx, r in enumerate(norm_rows): + lines.append(fmt_row(r)) + if idx == last_header_idx: + lines.append(fmt_sep()) + + table_text = "\n".join(lines).rstrip() + out.append(f"```\n{escape_md_v2_code(table_text)}\n```") + out.append("\n") + + i = j + 1 + continue + elif t in {"code_block", "fence"}: + code = escape_md_v2_code(tok.content.rstrip("\n")) + out.append(f"```\n{code}\n```") + out.append("\n") + elif t == "inline": + rendered = render_inline(tok.children or []) + if in_heading: + rendered = f"*{render_inline_plain(tok.children or [])}*" + if pending_prefix: + rendered = pending_prefix + rendered + pending_prefix = None + rendered = apply_blockquote(rendered) + out.append(rendered) + else: + if tok.content: + out.append(escape_md_v2(tok.content)) + i += 1 + + return "".join(out).rstrip() + + +__all__ = [ + "escape_md_v2", + "escape_md_v2_code", + "escape_md_v2_link_url", + "format_status", + "mdv2_bold", + "mdv2_code_inline", + "render_markdown_to_mdv2", +] diff --git a/messaging/safe_diagnostics.py b/messaging/safe_diagnostics.py new file mode 100644 index 0000000000000000000000000000000000000000..add30c536c677c6a66de14ae0345cfc6bfcbdd50 --- /dev/null +++ b/messaging/safe_diagnostics.py @@ -0,0 +1,17 @@ +"""Helpers for redacting user-derived content from log lines.""" + +from __future__ import annotations + + +def format_exception_for_log(exc: BaseException, *, log_full_message: bool) -> str: + """Return exception type and optionally ``str(exc)`` for operator diagnostics.""" + if log_full_message: + return f"{type(exc).__name__}: {exc}" + return type(exc).__name__ + + +def text_len_hint(text: str | None) -> int: + """Length of text for metadata-only logging (0 when missing).""" + if not text: + return 0 + return len(text) diff --git a/messaging/session.py b/messaging/session.py new file mode 100644 index 0000000000000000000000000000000000000000..bbb0cdc192bfb489baa9195249f2f8e63a057d83 --- /dev/null +++ b/messaging/session.py @@ -0,0 +1,305 @@ +""" +Session Store for Messaging Platforms + +Provides persistent storage for mapping platform messages to Claude CLI session IDs +and message trees for conversation continuation. +""" + +import contextlib +import json +import os +import tempfile +import threading +from datetime import UTC, datetime +from typing import Any + +from loguru import logger + + +class SessionStore: + """ + Persistent storage for message ↔ Claude session mappings and message trees. + + Uses a JSON file for storage with thread-safe operations. + Platform-agnostic: works with any messaging platform. + """ + + def __init__( + self, + storage_path: str = "sessions.json", + *, + message_log_cap: int | None = None, + ): + self.storage_path = storage_path + self._lock = threading.Lock() + self._trees: dict[str, dict] = {} # root_id -> tree data + self._node_to_tree: dict[str, str] = {} # node_id -> root_id + # Per-chat message ID log used to support best-effort UI clearing (/clear). + # Key: "{platform}:{chat_id}" -> list of records + self._message_log: dict[str, list[dict[str, Any]]] = {} + self._message_log_ids: dict[str, set[str]] = {} + self._dirty = False + self._save_timer: threading.Timer | None = None + self._save_debounce_secs = 0.5 + self._message_log_cap: int | None = message_log_cap + self._load() + + def _make_chat_key(self, platform: str, chat_id: str) -> str: + return f"{platform}:{chat_id}" + + def _load(self) -> None: + """Load sessions and trees from disk.""" + if not os.path.exists(self.storage_path): + return + + try: + with open(self.storage_path, encoding="utf-8") as f: + data = json.load(f) + + # Load trees + self._trees = data.get("trees", {}) + self._node_to_tree = data.get("node_to_tree", {}) + + # Load message log (optional/backward compatible) + raw_log = data.get("message_log", {}) or {} + if isinstance(raw_log, dict): + self._message_log = {} + self._message_log_ids = {} + for chat_key, items in raw_log.items(): + if not isinstance(chat_key, str) or not isinstance(items, list): + continue + cleaned: list[dict[str, Any]] = [] + seen: set[str] = set() + for it in items: + if not isinstance(it, dict): + continue + mid = it.get("message_id") + if mid is None: + continue + mid_s = str(mid) + if mid_s in seen: + continue + seen.add(mid_s) + cleaned.append( + { + "message_id": mid_s, + "ts": str(it.get("ts") or ""), + "direction": str(it.get("direction") or ""), + "kind": str(it.get("kind") or ""), + } + ) + self._message_log[chat_key] = cleaned + self._message_log_ids[chat_key] = seen + + logger.info( + f"Loaded {len(self._trees)} trees and " + f"{sum(len(v) for v in self._message_log.values())} msg_ids from {self.storage_path}" + ) + except Exception as e: + logger.error(f"Failed to load sessions: {e}") + + def _snapshot(self) -> dict: + """Snapshot current state for serialization. Caller must hold self._lock.""" + return { + "trees": dict(self._trees), + "node_to_tree": dict(self._node_to_tree), + "message_log": {k: list(v) for k, v in self._message_log.items()}, + } + + def _write_data(self, data: dict) -> None: + """Atomically write data dict to disk. Must be called WITHOUT holding self._lock.""" + abs_target = os.path.abspath(self.storage_path) + dir_name = os.path.dirname(abs_target) or "." + fd, tmp_path = tempfile.mkstemp( + dir=dir_name, prefix=".sessions.", suffix=".tmp.json" + ) + try: + with os.fdopen(fd, "w", encoding="utf-8") as f: + json.dump(data, f, indent=2) + f.flush() + os.fsync(f.fileno()) + os.replace(tmp_path, abs_target) + except BaseException: + with contextlib.suppress(OSError): + os.unlink(tmp_path) + raise + + def _schedule_save(self) -> None: + """Schedule a debounced save. Caller must hold self._lock.""" + self._dirty = True + if self._save_timer is not None: + self._save_timer.cancel() + self._save_timer = None + self._save_timer = threading.Timer( + self._save_debounce_secs, self._save_from_timer + ) + self._save_timer.daemon = True + self._save_timer.start() + + def _save_from_timer(self) -> None: + """Timer callback: save if dirty. Runs in timer thread.""" + with self._lock: + if not self._dirty: + self._save_timer = None + return + snapshot = self._snapshot() + self._dirty = False + self._save_timer = None + try: + self._write_data(snapshot) + except Exception as e: + logger.error(f"Failed to save sessions: {e}") + with self._lock: + self._dirty = True + + def _flush_save(self) -> dict: + """Cancel pending timer and snapshot current state. Caller must hold self._lock. + Returns snapshot dict; caller must call _write_data(snapshot) outside the lock.""" + if self._save_timer is not None: + self._save_timer.cancel() + self._save_timer = None + self._dirty = False + return self._snapshot() + + def flush_pending_save(self) -> None: + """Flush any pending debounced save. Call on shutdown to avoid losing data.""" + with self._lock: + snapshot = self._flush_save() + try: + self._write_data(snapshot) + except Exception as e: + logger.error(f"Failed to save sessions: {e}") + with self._lock: + self._dirty = True + + def record_message_id( + self, + platform: str, + chat_id: str, + message_id: str, + direction: str, + kind: str, + ) -> None: + """Record a message_id for later best-effort deletion (/clear).""" + if message_id is None: + return + + chat_key = self._make_chat_key(str(platform), str(chat_id)) + mid = str(message_id) + + with self._lock: + seen = self._message_log_ids.setdefault(chat_key, set()) + if mid in seen: + return + + rec = { + "message_id": mid, + "ts": datetime.now(UTC).isoformat(), + "direction": str(direction), + "kind": str(kind), + } + self._message_log.setdefault(chat_key, []).append(rec) + seen.add(mid) + + # Optional cap to prevent unbounded growth if configured. + if self._message_log_cap is not None and self._message_log_cap > 0: + items = self._message_log.get(chat_key, []) + if len(items) > self._message_log_cap: + self._message_log[chat_key] = items[-self._message_log_cap :] + self._message_log_ids[chat_key] = { + str(x.get("message_id")) for x in self._message_log[chat_key] + } + + self._schedule_save() + + def get_message_ids_for_chat(self, platform: str, chat_id: str) -> list[str]: + """Get all recorded message IDs for a chat (in insertion order).""" + chat_key = self._make_chat_key(str(platform), str(chat_id)) + with self._lock: + items = self._message_log.get(chat_key, []) + return [ + str(x.get("message_id")) + for x in items + if x.get("message_id") is not None + ] + + def clear_all(self) -> None: + """Clear all stored sessions/trees/mappings and persist an empty store.""" + with self._lock: + self._trees.clear() + self._node_to_tree.clear() + self._message_log.clear() + self._message_log_ids.clear() + snapshot = self._flush_save() + try: + self._write_data(snapshot) + except Exception as e: + logger.error(f"Failed to save sessions: {e}") + with self._lock: + self._dirty = True + + # ==================== Tree Methods ==================== + + def save_tree(self, root_id: str, tree_data: dict) -> None: + """ + Save a message tree. + + Args: + root_id: Root node ID of the tree + tree_data: Serialized tree data from tree.to_dict() + """ + with self._lock: + self._trees[root_id] = tree_data + + # Update node-to-tree mapping + for node_id in tree_data.get("nodes", {}): + self._node_to_tree[node_id] = root_id + + self._schedule_save() + logger.debug(f"Saved tree {root_id}") + + def get_tree(self, root_id: str) -> dict | None: + """Get a tree by its root ID.""" + with self._lock: + return self._trees.get(root_id) + + def register_node(self, node_id: str, root_id: str) -> None: + """Register a node ID to a tree root.""" + with self._lock: + self._node_to_tree[node_id] = root_id + self._schedule_save() + + def remove_node_mappings(self, node_ids: list[str]) -> None: + """Remove node IDs from the node-to-tree mapping.""" + with self._lock: + for nid in node_ids: + self._node_to_tree.pop(nid, None) + self._schedule_save() + + def remove_tree(self, root_id: str) -> None: + """Remove a tree and all its node mappings from the store.""" + with self._lock: + tree_data = self._trees.pop(root_id, None) + if tree_data: + for node_id in tree_data.get("nodes", {}): + self._node_to_tree.pop(node_id, None) + self._schedule_save() + + def get_all_trees(self) -> dict[str, dict]: + """Get all stored trees (public accessor).""" + with self._lock: + return dict(self._trees) + + def get_node_mapping(self) -> dict[str, str]: + """Get the node-to-tree mapping (public accessor).""" + with self._lock: + return dict(self._node_to_tree) + + def sync_from_tree_data( + self, trees: dict[str, dict], node_to_tree: dict[str, str] + ) -> None: + """Sync internal tree state from external data and persist.""" + with self._lock: + self._trees = trees + self._node_to_tree = node_to_tree + self._schedule_save() diff --git a/messaging/transcript.py b/messaging/transcript.py new file mode 100644 index 0000000000000000000000000000000000000000..f1a015d0600e34be19e45665939baa8f7e48c7d6 --- /dev/null +++ b/messaging/transcript.py @@ -0,0 +1,581 @@ +"""Ordered transcript builder for messaging UIs (Telegram, etc.). + +This module maintains an ordered list of "segments" that represent what the user +should see in the chat transcript: thinking, tool calls, tool results, subagent +headers, and assistant text. It is designed for in-place message editing where +the transcript grows over time and older content must be truncated. +""" + +from __future__ import annotations + +import json +from abc import ABC, abstractmethod +from collections import deque +from collections.abc import Callable, Iterable +from dataclasses import dataclass, field +from typing import Any + +from loguru import logger + + +def _safe_json_dumps(obj: Any) -> str: + try: + return json.dumps(obj, indent=2, ensure_ascii=False, sort_keys=True) + except Exception: + return str(obj) + + +@dataclass +class Segment(ABC): + kind: str + + @abstractmethod + def render(self, ctx: RenderCtx) -> str: ... + + +@dataclass +class ThinkingSegment(Segment): + def __init__(self) -> None: + super().__init__(kind="thinking") + self._parts: list[str] = [] + + def append(self, t: str) -> None: + if t: + self._parts.append(t) + + @property + def text(self) -> str: + return "".join(self._parts) + + def render(self, ctx: RenderCtx) -> str: + raw = self.text or "" + if ctx.thinking_tail_max is not None and len(raw) > ctx.thinking_tail_max: + raw = "..." + raw[-(ctx.thinking_tail_max - 3) :] + inner = ctx.escape_code(raw) + return f"💭 {ctx.bold('Thinking')}\n```\n{inner}\n```" + + +@dataclass +class TextSegment(Segment): + def __init__(self) -> None: + super().__init__(kind="text") + self._parts: list[str] = [] + + def append(self, t: str) -> None: + if t: + self._parts.append(t) + + @property + def text(self) -> str: + return "".join(self._parts) + + def render(self, ctx: RenderCtx) -> str: + raw = self.text or "" + if ctx.text_tail_max is not None and len(raw) > ctx.text_tail_max: + raw = "..." + raw[-(ctx.text_tail_max - 3) :] + return ctx.render_markdown(raw) + + +@dataclass +class ToolCallSegment(Segment): + tool_use_id: str + name: str + closed: bool = False + indent_level: int = 0 + + def __init__(self, tool_use_id: str, name: str, *, indent_level: int = 0) -> None: + super().__init__(kind="tool_call") + self.tool_use_id = str(tool_use_id or "") + self.name = str(name or "tool") + self.indent_level = max(0, int(indent_level)) + + def render(self, ctx: RenderCtx) -> str: + name = ctx.code_inline(self.name) + # Per UX requirement: do not display tool args/results, only the tool call. + prefix = " " * self.indent_level + return f"{prefix}🛠 {ctx.bold('Tool call:')} {name}" + + +@dataclass +class ToolResultSegment(Segment): + tool_use_id: str + name: str | None + content_text: str + is_error: bool = False + + def __init__( + self, + tool_use_id: str, + content: Any, + *, + name: str | None = None, + is_error: bool = False, + ) -> None: + super().__init__(kind="tool_result") + self.tool_use_id = str(tool_use_id or "") + self.name = str(name) if name is not None else None + self.is_error = bool(is_error) + if isinstance(content, str): + self.content_text = content + else: + self.content_text = _safe_json_dumps(content) + + def render(self, ctx: RenderCtx) -> str: + raw = self.content_text or "" + if ctx.tool_output_tail_max is not None and len(raw) > ctx.tool_output_tail_max: + raw = "..." + raw[-(ctx.tool_output_tail_max - 3) :] + inner = ctx.escape_code(raw) + label = "Tool error:" if self.is_error else "Tool result:" + maybe_name = f" {ctx.code_inline(self.name)}" if self.name else "" + return f"📤 {ctx.bold(label)}{maybe_name}\n```\n{inner}\n```" + + +@dataclass +class SubagentSegment(Segment): + description: str + tool_calls: int = 0 + tools_used: set[str] = field(default_factory=set) + current_tool: ToolCallSegment | None = None + + def __init__(self, description: str) -> None: + super().__init__(kind="subagent") + self.description = str(description or "Subagent") + self.tool_calls = 0 + self.tools_used = set() + self.current_tool = None + + def set_current_tool_call(self, tool_use_id: str, name: str) -> ToolCallSegment: + tool_use_id = str(tool_use_id or "") + name = str(name or "tool") + self.tools_used.add(name) + self.tool_calls += 1 + self.current_tool = ToolCallSegment(tool_use_id, name, indent_level=1) + return self.current_tool + + def render(self, ctx: RenderCtx) -> str: + inner_prefix = " " + + lines: list[str] = [ + f"🤖 {ctx.bold('Subagent:')} {ctx.code_inline(self.description)}" + ] + + if self.current_tool is not None: + try: + rendered = self.current_tool.render(ctx) + except Exception: + rendered = "" + if rendered: + lines.append(rendered) + + tools_used = sorted(self.tools_used) + tools_set_raw = "{{{}}}".format(", ".join(tools_used)) if tools_used else "{}" + + # Keep braces inside a code entity so MarkdownV2 doesn't require escaping them. + lines.append( + f"{inner_prefix}{ctx.bold('Tools used:')} {ctx.code_inline(tools_set_raw)}" + ) + lines.append( + f"{inner_prefix}{ctx.bold('Tool calls:')} {ctx.code_inline(str(self.tool_calls))}" + ) + return "\n".join(lines) + + +@dataclass +class ErrorSegment(Segment): + message: str + + def __init__(self, message: str) -> None: + super().__init__(kind="error") + self.message = str(message or "Unknown error") + + def render(self, ctx: RenderCtx) -> str: + return f"⚠️ {ctx.bold('Error:')} {ctx.code_inline(self.message)}" + + +@dataclass +class RenderCtx: + bold: Callable[[str], str] + code_inline: Callable[[str], str] + escape_code: Callable[[str], str] + escape_text: Callable[[str], str] + render_markdown: Callable[[str], str] + + thinking_tail_max: int | None = 1000 + tool_input_tail_max: int | None = 1200 + tool_output_tail_max: int | None = 1600 + text_tail_max: int | None = 2000 + + +class TranscriptBuffer: + """Maintains an ordered, truncatable transcript of events.""" + + def __init__( + self, + *, + show_tool_results: bool = True, + debug_subagent_stack: bool = False, + ) -> None: + self._segments: list[Segment] = [] + self._open_thinking_by_index: dict[int, ThinkingSegment] = {} + self._open_text_by_index: dict[int, TextSegment] = {} + + # content_block index -> tool call segment (for streaming tool args) + self._open_tools_by_index: dict[int, ToolCallSegment] = {} + + # tool_use_id -> tool name (for tool_result labeling) + self._tool_name_by_id: dict[str, str] = {} + + self._show_tool_results = bool(show_tool_results) + + # subagent context stack. Each entry is the Task tool_use_id we are waiting to close. + self._subagent_stack: list[str] = [] + # Parallel stack of segments for rendering nested subagents. + self._subagent_segments: list[SubagentSegment] = [] + self._debug_subagent_stack = debug_subagent_stack + + def _in_subagent(self) -> bool: + return bool(self._subagent_stack) + + def _subagent_current(self) -> SubagentSegment | None: + return self._subagent_segments[-1] if self._subagent_segments else None + + def _task_heading_from_input(self, inp: Any) -> str: + # We never display full JSON args; only extract a short heading. + if isinstance(inp, dict): + desc = str(inp.get("description", "") or "").strip() + if desc: + return desc + subagent_type = str(inp.get("subagent_type", "") or "").strip() + if subagent_type: + return subagent_type + typ = str(inp.get("type", "") or "").strip() + if typ: + return typ + return "Subagent" + + def _subagent_push(self, tool_id: str, seg: SubagentSegment) -> None: + # Some providers can omit ids; still track depth for UI suppression. + tool_id = ( + str(tool_id or "").strip() or f"__task_{len(self._subagent_stack) + 1}" + ) + self._subagent_stack.append(tool_id) + self._subagent_segments.append(seg) + if self._debug_subagent_stack: + logger.debug( + "SUBAGENT_STACK: push id=%r depth=%d heading=%r", + tool_id, + len(self._subagent_stack), + getattr(seg, "description", None), + ) + + def _subagent_pop(self, tool_id: str) -> bool: + tool_id = str(tool_id or "").strip() + if not self._subagent_stack: + return False + + def _ids_roughly_match(stack_id: str, result_id: str) -> bool: + if not stack_id or not result_id: + return False + if stack_id == result_id: + return True + # Some providers emit Task result ids with a suffix/prefix variant. + # Treat those as the same logical Task invocation. + return stack_id.startswith(result_id) or result_id.startswith(stack_id) + + if tool_id: + # O(1) common case: LIFO - top of stack matches. + if _ids_roughly_match(self._subagent_stack[-1], tool_id): + self._subagent_stack.pop() + if self._subagent_segments: + self._subagent_segments.pop() + if self._debug_subagent_stack: + logger.debug( + "SUBAGENT_STACK: pop id=%r depth=%d (LIFO)", + tool_id, + len(self._subagent_stack), + ) + return True + # Pop to the matching id (defensive against non-LIFO emissions). + idx = -1 + for i in range(len(self._subagent_stack) - 1, -1, -1): + if _ids_roughly_match(self._subagent_stack[i], tool_id): + idx = i + break + if idx < 0: + return False + while len(self._subagent_stack) > idx: + popped = self._subagent_stack.pop() + if self._subagent_segments: + self._subagent_segments.pop() + if self._debug_subagent_stack: + logger.debug( + "SUBAGENT_STACK: pop id=%r depth=%d (matched=%r)", + popped, + len(self._subagent_stack), + tool_id, + ) + return True + + # No id in result; only close if we have a synthetic top marker. + if self._subagent_stack and self._subagent_stack[-1].startswith("__task_"): + popped = self._subagent_stack.pop() + if self._subagent_segments: + self._subagent_segments.pop() + if self._debug_subagent_stack: + logger.debug( + "SUBAGENT_STACK: pop id=%r depth=%d (synthetic)", + popped, + len(self._subagent_stack), + ) + return True + return False + + def _ensure_thinking(self) -> ThinkingSegment: + seg = ThinkingSegment() + self._segments.append(seg) + return seg + + def _ensure_text(self) -> TextSegment: + seg = TextSegment() + self._segments.append(seg) + return seg + + def apply(self, ev: dict[str, Any]) -> None: + """Apply a parsed event to the transcript.""" + et = ev.get("type") + + # Subagent rules: inside a Task/subagent, we only show tool calls/results. + if self._in_subagent() and et in ( + "thinking_start", + "thinking_delta", + "thinking_chunk", + "text_start", + "text_delta", + "text_chunk", + ): + return + + if et == "thinking_start": + idx = int(ev.get("index", -1)) + if idx >= 0: + # Defensive: if a provider reuses indices without emitting a stop, + # close the previous open segment first. + self.apply({"type": "block_stop", "index": idx}) + seg = self._ensure_thinking() + if idx >= 0: + self._open_thinking_by_index[idx] = seg + return + if et in ("thinking_delta", "thinking_chunk"): + idx = int(ev.get("index", -1)) + seg = self._open_thinking_by_index.get(idx) + if seg is None: + seg = self._ensure_thinking() + if idx >= 0: + self._open_thinking_by_index[idx] = seg + seg.append(str(ev.get("text", ""))) + return + if et == "thinking_stop": + idx = int(ev.get("index", -1)) + if idx >= 0: + self._open_thinking_by_index.pop(idx, None) + return + + if et == "text_start": + idx = int(ev.get("index", -1)) + if idx >= 0: + self.apply({"type": "block_stop", "index": idx}) + seg = self._ensure_text() + if idx >= 0: + self._open_text_by_index[idx] = seg + return + if et in ("text_delta", "text_chunk"): + idx = int(ev.get("index", -1)) + seg = self._open_text_by_index.get(idx) + if seg is None: + seg = self._ensure_text() + if idx >= 0: + self._open_text_by_index[idx] = seg + seg.append(str(ev.get("text", ""))) + return + if et == "text_stop": + idx = int(ev.get("index", -1)) + if idx >= 0: + self._open_text_by_index.pop(idx, None) + return + + if et == "tool_use_start": + idx = int(ev.get("index", -1)) + if idx >= 0: + self.apply({"type": "block_stop", "index": idx}) + tool_id = str(ev.get("id", "") or "").strip() + name = str(ev.get("name", "") or "tool") + if tool_id: + self._tool_name_by_id[tool_id] = name + + # Task tool indicates subagent. + if name == "Task": + heading = self._task_heading_from_input(ev.get("input")) + seg = SubagentSegment(heading) + self._segments.append(seg) + self._subagent_push(tool_id, seg) + return + + # Normal tool call. + if self._in_subagent(): + parent = self._subagent_current() + if parent is not None: + seg = parent.set_current_tool_call(tool_id, name) + else: + seg = ToolCallSegment(tool_id, name) + self._segments.append(seg) + else: + seg = ToolCallSegment(tool_id, name) + self._segments.append(seg) + + if idx >= 0: + self._open_tools_by_index[idx] = seg + return + + if et == "tool_use_delta": + # Track open tool by index for tool_use_stop (closing state). + return + + if et == "tool_use_stop": + idx = int(ev.get("index", -1)) + seg = self._open_tools_by_index.pop(idx, None) + if seg is not None: + seg.closed = True + return + + if et == "block_stop": + idx = int(ev.get("index", -1)) + if idx in self._open_tools_by_index: + self.apply({"type": "tool_use_stop", "index": idx}) + return + if idx in self._open_thinking_by_index: + self.apply({"type": "thinking_stop", "index": idx}) + return + if idx in self._open_text_by_index: + self.apply({"type": "text_stop", "index": idx}) + return + return + + if et == "tool_use": + tool_id = str(ev.get("id", "") or "").strip() + name = str(ev.get("name", "") or "tool") + if tool_id: + self._tool_name_by_id[tool_id] = name + + if name == "Task": + heading = self._task_heading_from_input(ev.get("input")) + seg = SubagentSegment(heading) + self._segments.append(seg) + self._subagent_push(tool_id, seg) + return + + if self._in_subagent(): + parent = self._subagent_current() + if parent is not None: + seg = parent.set_current_tool_call(tool_id, name) + else: + seg = ToolCallSegment(tool_id, name) + self._segments.append(seg) + else: + seg = ToolCallSegment(tool_id, name) + self._segments.append(seg) + + seg.closed = True + return + + if et == "tool_result": + tool_id = str(ev.get("tool_use_id", "") or "").strip() + name = self._tool_name_by_id.get(tool_id) + + # If this was the Task tool result, close subagent context. + if self._subagent_stack: + popped = self._subagent_pop(tool_id) + top = self._subagent_stack[-1] if self._subagent_stack else "" + looks_like_task_id = "task" in tool_id.lower() + # Some streams omit Task tool_use ids (synthetic stack ids), but include + # a real Task id on tool_result (e.g. "functions.Task:0"). Reconcile that. + if ( + not popped + and tool_id + and top.startswith("__task_") + and (name in (None, "Task")) + and looks_like_task_id + ): + self._subagent_pop("") + + if not self._show_tool_results: + return + + seg = ToolResultSegment( + tool_id, + ev.get("content"), + name=name, + is_error=bool(ev.get("is_error", False)), + ) + self._segments.append(seg) + return + + if et == "error": + self._segments.append(ErrorSegment(str(ev.get("message", "")))) + return + + def render(self, ctx: RenderCtx, *, limit_chars: int, status: str | None) -> str: + """Render transcript with truncation (drop oldest segments).""" + # Filter out empty rendered segments. + rendered: list[str] = [] + for seg in self._segments: + try: + out = seg.render(ctx) + except Exception: + continue + if out: + rendered.append(out) + + status_text = f"\n\n{status}" if status else "" + prefix_marker = ctx.escape_text("... (truncated)\n") + + def _join(parts: Iterable[str], add_marker: bool) -> str: + body = "\n".join(parts) + if add_marker and body: + body = prefix_marker + body + return body + status_text if (body or status_text) else status_text + + # Fast path. + candidate = _join(rendered, add_marker=False) + if len(candidate) <= limit_chars: + return candidate + + # Drop oldest segments until under limit (keep the tail). + # Use deque for O(1) popleft; list.pop(0) would be O(n) per iteration. + parts: deque[str] = deque(rendered) + dropped = False + last_part: str | None = None + while parts: + candidate = _join(parts, add_marker=True) + if len(candidate) <= limit_chars: + return candidate + last_part = parts.popleft() + dropped = True + + # Nothing fits - preserve tail of last segment instead of only marker+status. + if dropped and last_part: + budget = limit_chars - len(prefix_marker) - len(status_text) + if budget > 20: + if len(last_part) > budget: + tail = "..." + last_part[-(budget - 3) :] + else: + tail = last_part + candidate = prefix_marker + tail + status_text + if len(candidate) <= limit_chars: + return candidate + + # Fallback: marker + status only. + if dropped: + minimal = prefix_marker + status_text.lstrip("\n") + if len(minimal) <= limit_chars: + return minimal + return status or "" diff --git a/messaging/transcription.py b/messaging/transcription.py new file mode 100644 index 0000000000000000000000000000000000000000..9a5f01ff1d06424df2e94845ea11ca5978a70bdc --- /dev/null +++ b/messaging/transcription.py @@ -0,0 +1,164 @@ +"""Voice note transcription for messaging platforms. + +Supports: +- Local Whisper (cpu/cuda): Hugging Face transformers pipeline +- NVIDIA NIM: NVIDIA NIM Whisper/Parakeet +""" + +from pathlib import Path +from typing import Any + +from loguru import logger + +from providers.nvidia_nim.voice import ( + transcribe_audio_file as transcribe_nvidia_nim_audio, +) + +# Max file size in bytes (25 MB) +MAX_AUDIO_SIZE_BYTES = 25 * 1024 * 1024 + +# Short model names -> full Hugging Face model IDs (for local Whisper) +_MODEL_MAP: dict[str, str] = { + "tiny": "openai/whisper-tiny", + "base": "openai/whisper-base", + "small": "openai/whisper-small", + "medium": "openai/whisper-medium", + "large-v2": "openai/whisper-large-v2", + "large-v3": "openai/whisper-large-v3", + "large-v3-turbo": "openai/whisper-large-v3-turbo", +} + +# Lazy-loaded pipelines: (model_id, device, hf_token_fingerprint) -> pipeline +_pipeline_cache: dict[tuple[str, str, str], Any] = {} + + +def _resolve_model_id(whisper_model: str) -> str: + """Resolve short name to full Hugging Face model ID.""" + return _MODEL_MAP.get(whisper_model, whisper_model) + + +def _get_pipeline(model_id: str, device: str, hf_token: str = "") -> Any: + """Lazy-load transformers Whisper pipeline. Raises ImportError if not installed.""" + global _pipeline_cache + if device not in ("cpu", "cuda"): + raise ValueError(f"whisper_device must be 'cpu' or 'cuda', got {device!r}") + resolved_token = hf_token or "" + cache_key = (model_id, device, resolved_token) + if cache_key not in _pipeline_cache: + try: + import torch + from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, pipeline + + hf_auth_token = resolved_token or None + + use_cuda = device == "cuda" and torch.cuda.is_available() + pipe_device = "cuda:0" if use_cuda else "cpu" + model_dtype = torch.float16 if use_cuda else torch.float32 + + model = AutoModelForSpeechSeq2Seq.from_pretrained( + model_id, + dtype=model_dtype, + low_cpu_mem_usage=True, + attn_implementation="sdpa", + token=hf_auth_token, + ) + model = model.to(pipe_device) + processor = AutoProcessor.from_pretrained(model_id, token=hf_auth_token) + + pipe = pipeline( + "automatic-speech-recognition", + model=model, + tokenizer=processor.tokenizer, + feature_extractor=processor.feature_extractor, + device=pipe_device, + ) + _pipeline_cache[cache_key] = pipe + logger.debug( + f"Loaded Whisper pipeline: model={model_id} device={pipe_device}" + ) + except ImportError as e: + raise ImportError( + "Local Whisper requires the voice_local extra. Install with: uv sync --extra voice_local" + ) from e + return _pipeline_cache[cache_key] + + +def transcribe_audio( + file_path: Path, + mime_type: str, + *, + whisper_model: str = "base", + whisper_device: str = "cpu", + hf_token: str = "", + nvidia_nim_api_key: str = "", +) -> str: + """ + Transcribe audio file to text. + + Supports: + - whisper_device="cpu"/"cuda": local Whisper (requires voice_local extra) + - whisper_device="nvidia_nim": NVIDIA NIM Whisper API (requires voice extra) + + Args: + file_path: Path to audio file (OGG, MP3, MP4, WAV, M4A supported) + mime_type: MIME type of the audio (e.g. "audio/ogg") + whisper_model: Model ID or short name (local) or NVIDIA NIM model + whisper_device: "cpu" | "cuda" | "nvidia_nim" + + Returns: + Transcribed text + + Raises: + FileNotFoundError: If file does not exist + ValueError: If file too large + ImportError: If voice_local extra not installed (for local Whisper) + """ + + if not file_path.exists(): + raise FileNotFoundError(f"Audio file not found: {file_path}") + + size = file_path.stat().st_size + if size > MAX_AUDIO_SIZE_BYTES: + raise ValueError( + f"Audio file too large ({size} bytes). Max {MAX_AUDIO_SIZE_BYTES} bytes." + ) + + if whisper_device == "nvidia_nim": + return transcribe_nvidia_nim_audio( + file_path, whisper_model, api_key=nvidia_nim_api_key + ) + return _transcribe_local( + file_path, whisper_model, whisper_device, hf_token=hf_token + ) + + +# Whisper expects 16 kHz sample rate +_WHISPER_SAMPLE_RATE = 16000 + + +def _load_audio(file_path: Path) -> dict[str, Any]: + """Load audio file to waveform dict. No ffmpeg required.""" + import librosa + + waveform, sr = librosa.load(str(file_path), sr=_WHISPER_SAMPLE_RATE, mono=True) + return {"array": waveform, "sampling_rate": sr} + + +def _transcribe_local( + file_path: Path, + whisper_model: str, + whisper_device: str, + *, + hf_token: str = "", +) -> str: + """Transcribe using transformers Whisper pipeline.""" + model_id = _resolve_model_id(whisper_model) + pipe = _get_pipeline(model_id, whisper_device, hf_token=hf_token) + audio = _load_audio(file_path) + result = pipe(audio, generate_kwargs={"language": "en", "task": "transcribe"}) + text = result.get("text", "") or "" + if isinstance(text, list): + text = " ".join(text) if text else "" + result_text = text.strip() + logger.debug(f"Local transcription: {len(result_text)} chars") + return result_text or "(no speech detected)" diff --git a/messaging/trees/__init__.py b/messaging/trees/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b3b556eb232dd54403db36572f474d81e559c698 --- /dev/null +++ b/messaging/trees/__init__.py @@ -0,0 +1,11 @@ +"""Message tree data structures and queue management.""" + +from .data import MessageNode, MessageState, MessageTree +from .queue_manager import TreeQueueManager + +__all__ = [ + "MessageNode", + "MessageState", + "MessageTree", + "TreeQueueManager", +] diff --git a/messaging/trees/__pycache__/__init__.cpython-314.pyc b/messaging/trees/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c8f5ec35faeefbab50ed6bd4c256459a55e8361 Binary files /dev/null and b/messaging/trees/__pycache__/__init__.cpython-314.pyc differ diff --git a/messaging/trees/__pycache__/data.cpython-314.pyc b/messaging/trees/__pycache__/data.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90623af8de6621e57f62a41fe6f2f54f1f211469 Binary files /dev/null and b/messaging/trees/__pycache__/data.cpython-314.pyc differ diff --git a/messaging/trees/__pycache__/queue_manager.cpython-314.pyc b/messaging/trees/__pycache__/queue_manager.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fcc37444975473c5740da551448de48e4048d4bb Binary files /dev/null and b/messaging/trees/__pycache__/queue_manager.cpython-314.pyc differ diff --git a/messaging/trees/data.py b/messaging/trees/data.py new file mode 100644 index 0000000000000000000000000000000000000000..d5db01dbb37a789efc968a17c787301f3f5387c3 --- /dev/null +++ b/messaging/trees/data.py @@ -0,0 +1,482 @@ +"""Tree data structures for message queue. + +Contains MessageState, MessageNode, and MessageTree classes. +""" + +import asyncio +from collections import deque +from contextlib import asynccontextmanager +from dataclasses import dataclass, field +from datetime import UTC, datetime +from enum import Enum +from typing import Any + +from loguru import logger + +from ..models import IncomingMessage + + +class _SnapshotQueue: + """Queue with snapshot/remove helpers, backed by a deque and a set index.""" + + def __init__(self) -> None: + self._deque: deque[str] = deque() + self._set: set[str] = set() + + async def put(self, item: str) -> None: + self._deque.append(item) + self._set.add(item) + + def put_nowait(self, item: str) -> None: + self._deque.append(item) + self._set.add(item) + + def get_nowait(self) -> str: + if not self._deque: + raise asyncio.QueueEmpty() + item = self._deque.popleft() + self._set.discard(item) + return item + + def qsize(self) -> int: + return len(self._deque) + + def get_snapshot(self) -> list[str]: + """Return current queue contents in FIFO order (read-only copy).""" + return list(self._deque) + + def remove_if_present(self, item: str) -> bool: + """Remove item from queue if present (O(1) membership check). Returns True if removed.""" + if item not in self._set: + return False + self._set.discard(item) + self._deque = deque(x for x in self._deque if x != item) + return True + + +class MessageState(Enum): + """State of a message node in the tree.""" + + PENDING = "pending" # Queued, waiting to be processed + IN_PROGRESS = "in_progress" # Currently being processed by Claude + COMPLETED = "completed" # Processing finished successfully + ERROR = "error" # Processing failed + + +@dataclass +class MessageNode: + """ + A node in the message tree. + + Each node represents a single message and tracks: + - Its relationship to parent/children + - Its processing state + - Claude session information + """ + + node_id: str # Unique ID (typically message_id) + incoming: IncomingMessage # The original message + status_message_id: str # Bot's status message ID + state: MessageState = MessageState.PENDING + parent_id: str | None = None # Parent node ID (None for root) + session_id: str | None = None # Claude session ID (forked from parent) + children_ids: list[str] = field(default_factory=list) + created_at: datetime = field(default_factory=lambda: datetime.now(UTC)) + completed_at: datetime | None = None + error_message: str | None = None + context: Any = None # Additional context if needed + + def set_context(self, context: Any) -> None: + self.context = context + + def to_dict(self) -> dict: + """Convert to dictionary for JSON serialization.""" + return { + "node_id": self.node_id, + "incoming": { + "text": self.incoming.text, + "chat_id": self.incoming.chat_id, + "user_id": self.incoming.user_id, + "message_id": self.incoming.message_id, + "platform": self.incoming.platform, + "reply_to_message_id": self.incoming.reply_to_message_id, + "message_thread_id": self.incoming.message_thread_id, + "username": self.incoming.username, + }, + "status_message_id": self.status_message_id, + "state": self.state.value, + "parent_id": self.parent_id, + "session_id": self.session_id, + "children_ids": self.children_ids, + "created_at": self.created_at.isoformat(), + "completed_at": self.completed_at.isoformat() + if self.completed_at + else None, + "error_message": self.error_message, + } + + @classmethod + def from_dict(cls, data: dict) -> MessageNode: + """Create from dictionary (JSON deserialization).""" + incoming_data = data["incoming"] + incoming = IncomingMessage( + text=incoming_data["text"], + chat_id=incoming_data["chat_id"], + user_id=incoming_data["user_id"], + message_id=incoming_data["message_id"], + platform=incoming_data["platform"], + reply_to_message_id=incoming_data.get("reply_to_message_id"), + message_thread_id=incoming_data.get("message_thread_id"), + username=incoming_data.get("username"), + ) + return cls( + node_id=data["node_id"], + incoming=incoming, + status_message_id=data["status_message_id"], + state=MessageState(data["state"]), + parent_id=data.get("parent_id"), + session_id=data.get("session_id"), + children_ids=data.get("children_ids", []), + created_at=datetime.fromisoformat(data["created_at"]), + completed_at=datetime.fromisoformat(data["completed_at"]) + if data.get("completed_at") + else None, + error_message=data.get("error_message"), + ) + + +class MessageTree: + """ + A tree of message nodes with queue functionality. + + Provides: + - O(1) node lookup via hashmap + - Per-tree message queue + - Thread-safe operations via asyncio.Lock + """ + + def __init__(self, root_node: MessageNode): + """ + Initialize tree with a root node. + + Args: + root_node: The root message node + """ + self.root_id = root_node.node_id + self._nodes: dict[str, MessageNode] = {root_node.node_id: root_node} + self._status_to_node: dict[str, str] = { + root_node.status_message_id: root_node.node_id + } + self._queue: _SnapshotQueue = _SnapshotQueue() + self._lock = asyncio.Lock() + self._is_processing = False + self._current_node_id: str | None = None + self._current_task: asyncio.Task | None = None + + logger.debug(f"Created MessageTree with root {self.root_id}") + + def set_current_task(self, task: asyncio.Task | None) -> None: + """Set the current processing task. Caller must hold lock.""" + self._current_task = task + + @property + def is_processing(self) -> bool: + """Check if tree is currently processing a message.""" + return self._is_processing + + async def add_node( + self, + node_id: str, + incoming: IncomingMessage, + status_message_id: str, + parent_id: str, + ) -> MessageNode: + """ + Add a child node to the tree. + + Args: + node_id: Unique ID for the new node + incoming: The incoming message + status_message_id: Bot's status message ID + parent_id: Parent node ID + + Returns: + The created MessageNode + """ + async with self._lock: + if parent_id not in self._nodes: + raise ValueError(f"Parent node {parent_id} not found in tree") + + node = MessageNode( + node_id=node_id, + incoming=incoming, + status_message_id=status_message_id, + parent_id=parent_id, + state=MessageState.PENDING, + ) + + self._nodes[node_id] = node + self._status_to_node[status_message_id] = node_id + self._nodes[parent_id].children_ids.append(node_id) + + logger.debug(f"Added node {node_id} as child of {parent_id}") + return node + + def get_node(self, node_id: str) -> MessageNode | None: + """Get a node by ID (O(1) lookup).""" + return self._nodes.get(node_id) + + def get_root(self) -> MessageNode: + """Get the root node.""" + return self._nodes[self.root_id] + + def get_children(self, node_id: str) -> list[MessageNode]: + """Get all child nodes of a given node.""" + node = self._nodes.get(node_id) + if not node: + return [] + return [self._nodes[cid] for cid in node.children_ids if cid in self._nodes] + + def get_parent(self, node_id: str) -> MessageNode | None: + """Get the parent node.""" + node = self._nodes.get(node_id) + if not node or not node.parent_id: + return None + return self._nodes.get(node.parent_id) + + def get_parent_session_id(self, node_id: str) -> str | None: + """ + Get the parent's session ID for forking. + + Returns None for root nodes. + """ + parent = self.get_parent(node_id) + return parent.session_id if parent else None + + async def update_state( + self, + node_id: str, + state: MessageState, + session_id: str | None = None, + error_message: str | None = None, + ) -> None: + """Update a node's state.""" + async with self._lock: + node = self._nodes.get(node_id) + if not node: + logger.warning(f"Node {node_id} not found for state update") + return + + node.state = state + if session_id: + node.session_id = session_id + if error_message: + node.error_message = error_message + if state in (MessageState.COMPLETED, MessageState.ERROR): + node.completed_at = datetime.now(UTC) + + logger.debug(f"Node {node_id} state -> {state.value}") + + async def enqueue(self, node_id: str) -> int: + """ + Add a node to the processing queue. + + Returns: + Queue position (1-indexed) + """ + async with self._lock: + await self._queue.put(node_id) + position = self._queue.qsize() + logger.debug(f"Enqueued node {node_id}, position {position}") + return position + + async def dequeue(self) -> str | None: + """ + Get the next node ID from the queue. + + Returns None if queue is empty. + """ + try: + return self._queue.get_nowait() + except asyncio.QueueEmpty: + return None + + async def get_queue_snapshot(self) -> list[str]: + """ + Get a snapshot of the current queue order. + + Returns: + List of node IDs in FIFO order. + """ + async with self._lock: + return self._queue.get_snapshot() + + def get_queue_size(self) -> int: + """Get number of messages waiting in queue.""" + return self._queue.qsize() + + def remove_from_queue(self, node_id: str) -> bool: + """ + Remove node_id from the internal queue if present. + + Caller must hold the tree lock (e.g. via with_lock). + Returns True if node was removed, False if not in queue. + """ + return self._queue.remove_if_present(node_id) + + @asynccontextmanager + async def with_lock(self): + """Async context manager for tree lock. Use when multiple operations need atomicity.""" + async with self._lock: + yield + + def set_processing_state(self, node_id: str | None, is_processing: bool) -> None: + """Set processing state. Caller must hold lock for consistency with queue operations.""" + self._is_processing = is_processing + self._current_node_id = node_id if is_processing else None + + def clear_current_node(self) -> None: + """Clear the currently processing node ID. Caller must hold lock.""" + self._current_node_id = None + + def is_current_node(self, node_id: str) -> bool: + """Check if node_id is the currently processing node.""" + return self._current_node_id == node_id + + def put_queue_unlocked(self, node_id: str) -> None: + """Add node to queue. Caller must hold lock (e.g. via with_lock).""" + self._queue.put_nowait(node_id) + + def cancel_current_task(self) -> bool: + """Cancel the currently running task. Returns True if a task was cancelled.""" + if self._current_task and not self._current_task.done(): + self._current_task.cancel() + return True + return False + + def set_node_error_sync(self, node: MessageNode, error_message: str) -> None: + """Synchronously mark a node as ERROR. Caller must ensure no concurrent access.""" + node.state = MessageState.ERROR + node.error_message = error_message + node.completed_at = datetime.now(UTC) + + def drain_queue_and_mark_cancelled( + self, error_message: str = "Cancelled by user" + ) -> list[MessageNode]: + """ + Drain the queue, mark each node as ERROR, and return affected nodes. + Does not acquire lock; caller must ensure no concurrent queue access. + """ + nodes: list[MessageNode] = [] + while True: + try: + node_id = self._queue.get_nowait() + except asyncio.QueueEmpty: + break + node = self._nodes.get(node_id) + if node: + self.set_node_error_sync(node, error_message) + nodes.append(node) + return nodes + + def reset_processing_state(self) -> None: + """Reset processing flags after cancel/cleanup.""" + self._is_processing = False + self._current_node_id = None + + @property + def current_node_id(self) -> str | None: + """Get the ID of the node currently being processed.""" + return self._current_node_id + + def to_dict(self) -> dict: + """Serialize tree to dictionary.""" + return { + "root_id": self.root_id, + "nodes": {nid: node.to_dict() for nid, node in self._nodes.items()}, + } + + def _add_node_from_dict(self, node: MessageNode) -> None: + """Register a deserialized node into the tree's internal indices.""" + self._nodes[node.node_id] = node + self._status_to_node[node.status_message_id] = node.node_id + + @classmethod + def from_dict(cls, data: dict) -> MessageTree: + """Deserialize tree from dictionary.""" + root_id = data["root_id"] + nodes_data = data["nodes"] + + # Create root node first + root_node = MessageNode.from_dict(nodes_data[root_id]) + tree = cls(root_node) + + # Add remaining nodes and build status->node index + for node_id, node_data in nodes_data.items(): + if node_id != root_id: + node = MessageNode.from_dict(node_data) + tree._add_node_from_dict(node) + + return tree + + def all_nodes(self) -> list[MessageNode]: + """Get all nodes in the tree.""" + return list(self._nodes.values()) + + def has_node(self, node_id: str) -> bool: + """Check if a node exists in this tree.""" + return node_id in self._nodes + + def find_node_by_status_message(self, status_msg_id: str) -> MessageNode | None: + """Find the node that has this status message ID (O(1) lookup).""" + node_id = self._status_to_node.get(status_msg_id) + return self._nodes.get(node_id) if node_id else None + + def get_descendants(self, node_id: str) -> list[str]: + """ + Get node_id and all descendant IDs (subtree). + + Returns: + List of node IDs including the given node. + """ + if node_id not in self._nodes: + return [] + result: list[str] = [] + stack = [node_id] + while stack: + nid = stack.pop() + result.append(nid) + node = self._nodes.get(nid) + if node: + stack.extend(node.children_ids) + return result + + def remove_branch(self, branch_root_id: str) -> list[MessageNode]: + """ + Remove a subtree (branch_root and all descendants) from the tree. + + Updates parent's children_ids. Caller must hold lock for consistency. + Does not acquire lock internally. + + Returns: + List of removed nodes. + """ + if branch_root_id not in self._nodes: + return [] + + parent = self.get_parent(branch_root_id) + removed = [] + for nid in self.get_descendants(branch_root_id): + node = self._nodes.get(nid) + if node: + removed.append(node) + del self._nodes[nid] + del self._status_to_node[node.status_message_id] + + if parent and branch_root_id in parent.children_ids: + parent.children_ids = [ + c for c in parent.children_ids if c != branch_root_id + ] + + logger.debug(f"Removed branch {branch_root_id} ({len(removed)} nodes)") + return removed diff --git a/messaging/trees/queue_manager.py b/messaging/trees/queue_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..2c0cecda8689d2e37599ad4fe315fcc123b99f90 --- /dev/null +++ b/messaging/trees/queue_manager.py @@ -0,0 +1,749 @@ +"""Tree-based message queue: index, async node processor, and public manager API.""" + +import asyncio +from collections.abc import Awaitable, Callable + +from loguru import logger + +from config.settings import get_settings +from core.anthropic import get_user_facing_error_message + +from ..models import IncomingMessage +from ..safe_diagnostics import format_exception_for_log +from .data import MessageNode, MessageState, MessageTree + + +class TreeRepository: + """ + In-memory index of trees and node-to-root mappings. + + Used only by :class:`TreeQueueManager`; kept as a named type for tests. + """ + + def __init__(self) -> None: + self._trees: dict[str, MessageTree] = {} # root_id -> tree + self._node_to_tree: dict[str, str] = {} # node_id -> root_id + + def get_tree(self, root_id: str) -> MessageTree | None: + """Get a tree by its root ID.""" + return self._trees.get(root_id) + + def get_tree_for_node(self, node_id: str) -> MessageTree | None: + """Get the tree containing a given node.""" + root_id = self._node_to_tree.get(node_id) + if not root_id: + return None + return self._trees.get(root_id) + + def get_node(self, node_id: str) -> MessageNode | None: + """Get a node from any tree.""" + tree = self.get_tree_for_node(node_id) + return tree.get_node(node_id) if tree else None + + def add_tree(self, root_id: str, tree: MessageTree) -> None: + """Add a new tree to the repository.""" + self._trees[root_id] = tree + self._node_to_tree[root_id] = root_id + logger.debug("TREE_REPO: add_tree root_id={}", root_id) + + def register_node(self, node_id: str, root_id: str) -> None: + """Register a node ID to a tree.""" + self._node_to_tree[node_id] = root_id + logger.debug("TREE_REPO: register_node node_id={} root_id={}", node_id, root_id) + + def has_node(self, node_id: str) -> bool: + """Check if a node is registered in any tree.""" + return node_id in self._node_to_tree + + def tree_count(self) -> int: + """Get the number of trees in the repository.""" + return len(self._trees) + + def is_tree_busy(self, root_id: str) -> bool: + """Check if a tree is currently processing.""" + tree = self._trees.get(root_id) + return tree.is_processing if tree else False + + def is_node_tree_busy(self, node_id: str) -> bool: + """Check if the tree containing a node is busy.""" + tree = self.get_tree_for_node(node_id) + return tree.is_processing if tree else False + + def get_queue_size(self, node_id: str) -> int: + """Get queue size for the tree containing a node.""" + tree = self.get_tree_for_node(node_id) + return tree.get_queue_size() if tree else 0 + + def resolve_parent_node_id(self, msg_id: str) -> str | None: + """ + Resolve a message ID to the actual parent node ID. + + Handles the case where msg_id is a status message ID + (which maps to the tree but isn't an actual node). + + Returns: + The node_id to use as parent, or None if not found + """ + tree = self.get_tree_for_node(msg_id) + if not tree: + return None + + if tree.has_node(msg_id): + return msg_id + + node = tree.find_node_by_status_message(msg_id) + if node: + return node.node_id + + return None + + def get_pending_children(self, node_id: str) -> list[MessageNode]: + """ + Get all pending child nodes (recursively) of a given node. + + Used for error propagation - when a node fails, its pending + children should also be marked as failed. + """ + tree = self.get_tree_for_node(node_id) + if not tree: + return [] + + pending: list[MessageNode] = [] + stack = [node_id] + + while stack: + current_id = stack.pop() + node = tree.get_node(current_id) + if not node: + continue + for child_id in node.children_ids: + child = tree.get_node(child_id) + if child and child.state == MessageState.PENDING: + pending.append(child) + stack.append(child_id) + + return pending + + def all_trees(self) -> list[MessageTree]: + """Get all trees in the repository.""" + return list(self._trees.values()) + + def tree_ids(self) -> list[str]: + """Get all tree root IDs.""" + return list(self._trees.keys()) + + def unregister_nodes(self, node_ids: list[str]) -> None: + """Remove node IDs from the node-to-tree mapping.""" + for nid in node_ids: + self._node_to_tree.pop(nid, None) + + def remove_tree(self, root_id: str) -> MessageTree | None: + """ + Remove a tree and all its node mappings from the repository. + + Returns: + The removed tree, or None if not found. + """ + tree = self._trees.pop(root_id, None) + if not tree: + return None + for node in tree.all_nodes(): + self._node_to_tree.pop(node.node_id, None) + logger.debug("TREE_REPO: remove_tree root_id={}", root_id) + return tree + + def get_message_ids_for_chat(self, platform: str, chat_id: str) -> set[str]: + """Get all message IDs (incoming + status) for a given platform/chat.""" + msg_ids: set[str] = set() + for tree in self._trees.values(): + for node in tree.all_nodes(): + if str(node.incoming.platform) == str(platform) and str( + node.incoming.chat_id + ) == str(chat_id): + if node.incoming.message_id is not None: + msg_ids.add(str(node.incoming.message_id)) + if node.status_message_id: + msg_ids.add(str(node.status_message_id)) + return msg_ids + + def to_dict(self) -> dict: + """Serialize all trees.""" + return { + "trees": {rid: tree.to_dict() for rid, tree in self._trees.items()}, + "node_to_tree": self._node_to_tree.copy(), + } + + @classmethod + def from_dict(cls, data: dict) -> TreeRepository: + """Deserialize from dictionary.""" + repo = cls() + for root_id, tree_data in data.get("trees", {}).items(): + repo._trees[root_id] = MessageTree.from_dict(tree_data) + repo._node_to_tree = data.get("node_to_tree", {}) + return repo + + +class TreeQueueProcessor: + """ + Per-tree async queue processing (one manager owns one processor instance). + """ + + def __init__( + self, + queue_update_callback: Callable[[MessageTree], Awaitable[None]] | None = None, + node_started_callback: Callable[[MessageTree, str], Awaitable[None]] + | None = None, + ) -> None: + self._queue_update_callback = queue_update_callback + self._node_started_callback = node_started_callback + + def set_queue_update_callback( + self, + queue_update_callback: Callable[[MessageTree], Awaitable[None]] | None, + ) -> None: + """Update the callback used to refresh queue positions.""" + self._queue_update_callback = queue_update_callback + + def set_node_started_callback( + self, + node_started_callback: Callable[[MessageTree, str], Awaitable[None]] | None, + ) -> None: + """Update the callback used when a queued node starts processing.""" + self._node_started_callback = node_started_callback + + async def _notify_queue_updated(self, tree: MessageTree) -> None: + """Invoke queue update callback if set.""" + if not self._queue_update_callback: + return + try: + await self._queue_update_callback(tree) + except Exception as e: + d = get_settings().log_messaging_error_details + logger.warning( + "Queue update callback failed: {}", + format_exception_for_log(e, log_full_message=d), + ) + + async def _notify_node_started(self, tree: MessageTree, node_id: str) -> None: + """Invoke node started callback if set.""" + if not self._node_started_callback: + return + try: + await self._node_started_callback(tree, node_id) + except Exception as e: + d = get_settings().log_messaging_error_details + logger.warning( + "Node started callback failed: {}", + format_exception_for_log(e, log_full_message=d), + ) + + async def process_node( + self, + tree: MessageTree, + node: MessageNode, + processor: Callable[[str, MessageNode], Awaitable[None]], + ) -> None: + """Process a single node and then check the queue.""" + if node.state == MessageState.ERROR: + logger.info( + f"Skipping node {node.node_id} as it is already in state {node.state}" + ) + await self._process_next(tree, processor) + return + + try: + await processor(node.node_id, node) + except asyncio.CancelledError: + logger.info(f"Task for node {node.node_id} was cancelled") + raise + except Exception as e: + d = get_settings().log_messaging_error_details + logger.error( + "Error processing node {}: {}", + node.node_id, + format_exception_for_log(e, log_full_message=d), + ) + await tree.update_state( + node.node_id, + MessageState.ERROR, + error_message=get_user_facing_error_message(e), + ) + finally: + async with tree.with_lock(): + tree.clear_current_node() + await self._process_next(tree, processor) + + async def _process_next( + self, + tree: MessageTree, + processor: Callable[[str, MessageNode], Awaitable[None]], + ) -> None: + """Process the next message in queue, if any.""" + next_node_id = None + async with tree.with_lock(): + next_node_id = await tree.dequeue() + + if not next_node_id: + tree.set_processing_state(None, False) + logger.debug(f"Tree {tree.root_id} queue empty, marking as free") + return + + tree.set_processing_state(next_node_id, True) + logger.info(f"Processing next queued node {next_node_id}") + + node = tree.get_node(next_node_id) + if node: + tree.set_current_task( + asyncio.create_task(self.process_node(tree, node, processor)) + ) + + if next_node_id: + await self._notify_node_started(tree, next_node_id) + await self._notify_queue_updated(tree) + + async def enqueue_and_start( + self, + tree: MessageTree, + node_id: str, + processor: Callable[[str, MessageNode], Awaitable[None]], + ) -> bool: + """ + Enqueue a node or start processing immediately. + + Returns: + True if queued, False if processing immediately + """ + async with tree.with_lock(): + if tree.is_processing: + tree.put_queue_unlocked(node_id) + queue_size = tree.get_queue_size() + logger.info(f"Queued node {node_id}, position {queue_size}") + return True + else: + tree.set_processing_state(node_id, True) + + node = tree.get_node(node_id) + if node: + tree.set_current_task( + asyncio.create_task(self.process_node(tree, node, processor)) + ) + return False + + def cancel_current(self, tree: MessageTree) -> bool: + """Cancel the currently running task in a tree.""" + return tree.cancel_current_task() + + +class TreeQueueManager: + """ + Manages multiple message trees: index + async processing. + + Each new conversation creates a new tree. + Replies to existing messages add nodes to existing trees. + """ + + def __init__( + self, + queue_update_callback: Callable[[MessageTree], Awaitable[None]] | None = None, + node_started_callback: Callable[[MessageTree, str], Awaitable[None]] + | None = None, + _repository: TreeRepository | None = None, + ) -> None: + self._repository = _repository or TreeRepository() + self._processor = TreeQueueProcessor( + queue_update_callback=queue_update_callback, + node_started_callback=node_started_callback, + ) + self._lock = asyncio.Lock() + + logger.info("TreeQueueManager initialized") + + async def create_tree( + self, + node_id: str, + incoming: IncomingMessage, + status_message_id: str, + ) -> MessageTree: + """ + Create a new tree with a root node. + + Args: + node_id: ID for the root node + incoming: The incoming message + status_message_id: Bot's status message ID + + Returns: + The created MessageTree + """ + async with self._lock: + root_node = MessageNode( + node_id=node_id, + incoming=incoming, + status_message_id=status_message_id, + state=MessageState.PENDING, + ) + + tree = MessageTree(root_node) + self._repository.add_tree(node_id, tree) + + logger.info(f"Created new tree with root {node_id}") + return tree + + async def add_to_tree( + self, + parent_node_id: str, + node_id: str, + incoming: IncomingMessage, + status_message_id: str, + ) -> tuple[MessageTree, MessageNode]: + """ + Add a reply as a child node to an existing tree. + + Args: + parent_node_id: ID of the parent message + node_id: ID for the new node + incoming: The incoming reply message + status_message_id: Bot's status message ID + + Returns: + Tuple of (tree, new_node) + """ + async with self._lock: + if not self._repository.has_node(parent_node_id): + raise ValueError(f"Parent node {parent_node_id} not found in any tree") + + tree = self._repository.get_tree_for_node(parent_node_id) + if not tree: + raise ValueError(f"Parent node {parent_node_id} not found in any tree") + + node = await tree.add_node( + node_id=node_id, + incoming=incoming, + status_message_id=status_message_id, + parent_id=parent_node_id, + ) + + async with self._lock: + self._repository.register_node(node_id, tree.root_id) + + logger.info(f"Added node {node_id} to tree {tree.root_id}") + return tree, node + + def get_tree(self, root_id: str) -> MessageTree | None: + """Get a tree by its root ID.""" + return self._repository.get_tree(root_id) + + def get_tree_for_node(self, node_id: str) -> MessageTree | None: + """Get the tree containing a given node.""" + return self._repository.get_tree_for_node(node_id) + + def get_node(self, node_id: str) -> MessageNode | None: + """Get a node from any tree.""" + return self._repository.get_node(node_id) + + def resolve_parent_node_id(self, msg_id: str) -> str | None: + """Resolve a message ID to the actual parent node ID.""" + return self._repository.resolve_parent_node_id(msg_id) + + def is_tree_busy(self, root_id: str) -> bool: + """Check if a tree is currently processing.""" + return self._repository.is_tree_busy(root_id) + + def is_node_tree_busy(self, node_id: str) -> bool: + """Check if the tree containing a node is busy.""" + return self._repository.is_node_tree_busy(node_id) + + async def enqueue( + self, + node_id: str, + processor: Callable[[str, MessageNode], Awaitable[None]], + ) -> bool: + """ + Enqueue a node for processing. + + If the tree is not busy, processing starts immediately. + If busy, the message is queued. + + Args: + node_id: Node to process + processor: Async function to process the node + + Returns: + True if queued, False if processing immediately + """ + tree = self._repository.get_tree_for_node(node_id) + if not tree: + logger.error(f"No tree found for node {node_id}") + return False + + return await self._processor.enqueue_and_start(tree, node_id, processor) + + def get_queue_size(self, node_id: str) -> int: + """Get queue size for the tree containing a node.""" + return self._repository.get_queue_size(node_id) + + def get_pending_children(self, node_id: str) -> list[MessageNode]: + """Get all pending child nodes (recursively) of a given node.""" + return self._repository.get_pending_children(node_id) + + async def mark_node_error( + self, + node_id: str, + error_message: str, + propagate_to_children: bool = True, + ) -> list[MessageNode]: + """ + Mark a node as ERROR and optionally propagate to pending children. + + Args: + node_id: The node to mark as error + error_message: Error description + propagate_to_children: If True, also mark pending children as error + + Returns: + List of all nodes marked as error (including children) + """ + tree = self._repository.get_tree_for_node(node_id) + if not tree: + return [] + + affected = [] + node = tree.get_node(node_id) + if node: + await tree.update_state( + node_id, MessageState.ERROR, error_message=error_message + ) + affected.append(node) + + if propagate_to_children: + pending_children = self._repository.get_pending_children(node_id) + for child in pending_children: + await tree.update_state( + child.node_id, + MessageState.ERROR, + error_message=f"Parent failed: {error_message}", + ) + affected.append(child) + + return affected + + async def cancel_tree(self, root_id: str) -> list[MessageNode]: + """ + Cancel all queued and in-progress messages in a tree. + + Updates node states to ERROR and returns list of affected nodes + that were actually active or in the current processing queue. + """ + tree = self._repository.get_tree(root_id) + if not tree: + return [] + + cancelled_nodes = [] + + cleanup_count = 0 + async with tree.with_lock(): + if tree.cancel_current_task(): + current_id = tree.current_node_id + if current_id: + node = tree.get_node(current_id) + if node and node.state not in ( + MessageState.COMPLETED, + MessageState.ERROR, + ): + tree.set_node_error_sync(node, "Cancelled by user") + cancelled_nodes.append(node) + + queue_nodes = tree.drain_queue_and_mark_cancelled() + cancelled_nodes.extend(queue_nodes) + cancelled_ids = {n.node_id for n in cancelled_nodes} + + for node in tree.all_nodes(): + if ( + node.state in (MessageState.PENDING, MessageState.IN_PROGRESS) + and node.node_id not in cancelled_ids + ): + tree.set_node_error_sync(node, "Stale task cleaned up") + cleanup_count += 1 + + tree.reset_processing_state() + + if cancelled_nodes: + logger.info( + f"Cancelled {len(cancelled_nodes)} active nodes in tree {root_id}" + ) + if cleanup_count: + logger.info(f"Cleaned up {cleanup_count} stale nodes in tree {root_id}") + + return cancelled_nodes + + async def cancel_node(self, node_id: str) -> list[MessageNode]: + """ + Cancel a single node (queued or in-progress) without affecting other nodes. + + Returns: + List containing the cancelled node if it was cancellable, else empty list. + """ + tree = self._repository.get_tree_for_node(node_id) + if not tree: + return [] + + async with tree.with_lock(): + node = tree.get_node(node_id) + if not node: + return [] + + if node.state in (MessageState.COMPLETED, MessageState.ERROR): + return [] + + if tree.is_current_node(node_id): + self._processor.cancel_current(tree) + + try: + tree.remove_from_queue(node_id) + except Exception: + logger.debug( + "Failed to remove node from queue; will rely on state=ERROR" + ) + + tree.set_node_error_sync(node, "Cancelled by user") + + return [node] + + async def cancel_all(self) -> list[MessageNode]: + """Cancel all messages in all trees.""" + async with self._lock: + root_ids = list(self._repository.tree_ids()) + all_cancelled: list[MessageNode] = [] + for root_id in root_ids: + all_cancelled.extend(await self.cancel_tree(root_id)) + return all_cancelled + + def cleanup_stale_nodes(self) -> int: + """ + Mark any PENDING or IN_PROGRESS nodes in all trees as ERROR. + Used on startup to reconcile restored state. + """ + count = 0 + for tree in self._repository.all_trees(): + for node in tree.all_nodes(): + if node.state in (MessageState.PENDING, MessageState.IN_PROGRESS): + tree.set_node_error_sync(node, "Lost during server restart") + count += 1 + if count: + logger.info(f"Cleaned up {count} stale nodes during startup") + return count + + def get_tree_count(self) -> int: + """Get the number of active message trees.""" + return self._repository.tree_count() + + def set_queue_update_callback( + self, + queue_update_callback: Callable[[MessageTree], Awaitable[None]] | None, + ) -> None: + """Set callback for queue position updates.""" + self._processor.set_queue_update_callback(queue_update_callback) + + def set_node_started_callback( + self, + node_started_callback: Callable[[MessageTree, str], Awaitable[None]] | None, + ) -> None: + """Set callback for when a queued node starts processing.""" + self._processor.set_node_started_callback(node_started_callback) + + def register_node(self, node_id: str, root_id: str) -> None: + """Register a node ID to a tree (for external mapping).""" + self._repository.register_node(node_id, root_id) + + async def cancel_branch(self, branch_root_id: str) -> list[MessageNode]: + """ + Cancel all PENDING/IN_PROGRESS nodes in the subtree (branch_root + descendants). + """ + tree = self._repository.get_tree_for_node(branch_root_id) + if not tree: + return [] + + branch_ids = set(tree.get_descendants(branch_root_id)) + cancelled: list[MessageNode] = [] + + async with tree.with_lock(): + for nid in branch_ids: + node = tree.get_node(nid) + if not node or node.state in ( + MessageState.COMPLETED, + MessageState.ERROR, + ): + continue + + if tree.is_current_node(nid): + self._processor.cancel_current(tree) + tree.set_node_error_sync(node, "Cancelled by user") + cancelled.append(node) + else: + tree.remove_from_queue(nid) + tree.set_node_error_sync(node, "Cancelled by user") + cancelled.append(node) + + if cancelled: + logger.info(f"Cancelled {len(cancelled)} nodes in branch {branch_root_id}") + return cancelled + + async def remove_branch( + self, branch_root_id: str + ) -> tuple[list[MessageNode], str, bool]: + """ + Remove a branch (subtree) from the tree. + + If branch_root is the tree root, removes the entire tree. + + Returns: + (removed_nodes, root_id, removed_entire_tree) + """ + tree = self._repository.get_tree_for_node(branch_root_id) + if not tree: + return ([], "", False) + + root_id = tree.root_id + + if branch_root_id == root_id: + cancelled = await self.cancel_tree(root_id) + removed_tree = self._repository.remove_tree(root_id) + if removed_tree: + return (removed_tree.all_nodes(), root_id, True) + return (cancelled, root_id, True) + + async with tree.with_lock(): + removed = tree.remove_branch(branch_root_id) + + self._repository.unregister_nodes([n.node_id for n in removed]) + return (removed, root_id, False) + + def get_message_ids_for_chat(self, platform: str, chat_id: str) -> set[str]: + """Get all message IDs for a given platform/chat.""" + return self._repository.get_message_ids_for_chat(platform, chat_id) + + def to_dict(self) -> dict: + """Serialize all trees.""" + return self._repository.to_dict() + + @classmethod + def from_dict( + cls, + data: dict, + queue_update_callback: Callable[[MessageTree], Awaitable[None]] | None = None, + node_started_callback: Callable[[MessageTree, str], Awaitable[None]] + | None = None, + ) -> TreeQueueManager: + """Deserialize from dictionary.""" + return cls( + queue_update_callback=queue_update_callback, + node_started_callback=node_started_callback, + _repository=TreeRepository.from_dict(data), + ) + + +__all__ = [ + "TreeQueueManager", + "TreeQueueProcessor", + "TreeRepository", +] diff --git a/messaging/ui_updates.py b/messaging/ui_updates.py new file mode 100644 index 0000000000000000000000000000000000000000..929202ea1c4063314527d222f8678379d0352de2 --- /dev/null +++ b/messaging/ui_updates.py @@ -0,0 +1,101 @@ +"""Throttled platform UI updates driven by transcript rendering.""" + +from __future__ import annotations + +import time +from collections.abc import Callable + +from loguru import logger + +from .platforms.base import MessagingPlatform +from .safe_diagnostics import format_exception_for_log +from .transcript import RenderCtx, TranscriptBuffer + + +class ThrottledTranscriptEditor: + """Rate-limited status message edits from a growing transcript.""" + + def __init__( + self, + *, + platform: MessagingPlatform, + parse_mode: str | None, + get_limit_chars: Callable[[], int], + transcript: TranscriptBuffer, + render_ctx: RenderCtx, + node_id: str, + chat_id: str, + status_msg_id: str, + debug_platform_edits: bool, + log_messaging_error_details: bool = False, + ) -> None: + self._platform = platform + self._parse_mode = parse_mode + self._get_limit_chars = get_limit_chars + self._transcript = transcript + self._render_ctx = render_ctx + self._node_id = node_id + self._chat_id = chat_id + self._status_msg_id = status_msg_id + self._debug_platform_edits = debug_platform_edits + self._log_messaging_error_details = log_messaging_error_details + self._last_ui_update = 0.0 + self._last_displayed_text: str | None = None + self._last_status: str | None = None + + @property + def last_status(self) -> str | None: + return self._last_status + + async def update(self, status: str | None = None, *, force: bool = False) -> None: + """Render transcript + optional status line and edit the platform message.""" + now = time.time() + if not force and now - self._last_ui_update < 1.0: + return + + self._last_ui_update = now + if status is not None: + self._last_status = status + try: + display = self._transcript.render( + self._render_ctx, + limit_chars=self._get_limit_chars(), + status=status, + ) + except Exception as e: + logger.warning( + "Transcript render failed for node {}: {}", + self._node_id, + format_exception_for_log( + e, log_full_message=self._log_messaging_error_details + ), + ) + return + if display and display != self._last_displayed_text: + logger.debug( + "PLATFORM_EDIT: node_id={} chat_id={} msg_id={} force={} status={!r} chars={}", + self._node_id, + self._chat_id, + self._status_msg_id, + bool(force), + status, + len(display), + ) + if self._debug_platform_edits: + logger.debug("PLATFORM_EDIT_TEXT:\n{}", display) + self._last_displayed_text = display + try: + await self._platform.queue_edit_message( + self._chat_id, + self._status_msg_id, + display, + parse_mode=self._parse_mode, + ) + except Exception as e: + logger.warning( + "Failed to update platform for node {}: {}", + self._node_id, + format_exception_for_log( + e, log_full_message=self._log_messaging_error_details + ), + ) diff --git a/messaging/voice.py b/messaging/voice.py new file mode 100644 index 0000000000000000000000000000000000000000..0dd4b128ecc7524cfd3fdf084a515b33bf6cd879 --- /dev/null +++ b/messaging/voice.py @@ -0,0 +1,76 @@ +"""Platform-neutral voice note helpers.""" + +from __future__ import annotations + +import asyncio +from pathlib import Path + + +class PendingVoiceRegistry: + """Track voice notes that are still waiting on transcription.""" + + def __init__(self) -> None: + self._pending: dict[tuple[str, str], tuple[str, str]] = {} + self._lock = asyncio.Lock() + + async def register( + self, chat_id: str, voice_msg_id: str, status_msg_id: str + ) -> None: + async with self._lock: + entry = (voice_msg_id, status_msg_id) + self._pending[(chat_id, voice_msg_id)] = entry + self._pending[(chat_id, status_msg_id)] = entry + + async def cancel(self, chat_id: str, reply_id: str) -> tuple[str, str] | None: + async with self._lock: + entry = self._pending.pop((chat_id, reply_id), None) + if entry is None: + return None + voice_msg_id, status_msg_id = entry + self._pending.pop((chat_id, voice_msg_id), None) + self._pending.pop((chat_id, status_msg_id), None) + return entry + + async def is_pending(self, chat_id: str, voice_msg_id: str) -> bool: + async with self._lock: + return (chat_id, voice_msg_id) in self._pending + + async def complete( + self, chat_id: str, voice_msg_id: str, status_msg_id: str + ) -> None: + async with self._lock: + self._pending.pop((chat_id, voice_msg_id), None) + self._pending.pop((chat_id, status_msg_id), None) + + +class VoiceTranscriptionService: + """Run configured transcription backends off the event loop.""" + + def __init__( + self, + *, + hf_token: str = "", + nvidia_nim_api_key: str = "", + ) -> None: + self._hf_token = hf_token + self._nvidia_nim_api_key = nvidia_nim_api_key + + async def transcribe( + self, + file_path: Path, + mime_type: str, + *, + whisper_model: str, + whisper_device: str, + ) -> str: + from .transcription import transcribe_audio + + return await asyncio.to_thread( + transcribe_audio, + file_path, + mime_type, + whisper_model=whisper_model, + whisper_device=whisper_device, + hf_token=self._hf_token, + nvidia_nim_api_key=self._nvidia_nim_api_key, + ) diff --git a/providers/__init__.py b/providers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9741e498ebccfb837ac2ebb93119c8f9194acf74 --- /dev/null +++ b/providers/__init__.py @@ -0,0 +1,31 @@ +"""Providers package - implement your own provider by extending BaseProvider. + +Concrete adapters (e.g. ``NvidiaNimProvider``) live in subpackages; import them +from ``providers.nvidia_nim`` etc. to avoid loading every adapter when the +``providers`` package is imported. +""" + +from .base import BaseProvider, ProviderConfig +from .exceptions import ( + APIError, + AuthenticationError, + InvalidRequestError, + ModelListResponseError, + OverloadedError, + ProviderError, + RateLimitError, + UnknownProviderTypeError, +) + +__all__ = [ + "APIError", + "AuthenticationError", + "BaseProvider", + "InvalidRequestError", + "ModelListResponseError", + "OverloadedError", + "ProviderConfig", + "ProviderError", + "RateLimitError", + "UnknownProviderTypeError", +] diff --git a/providers/__pycache__/__init__.cpython-314.pyc b/providers/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96fbdfd1aa45938497919705cd643fcef6bfe9ee Binary files /dev/null and b/providers/__pycache__/__init__.cpython-314.pyc differ diff --git a/providers/__pycache__/base.cpython-314.pyc b/providers/__pycache__/base.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e9d131fc36234f69f0356cf20153b72e7878831 Binary files /dev/null and b/providers/__pycache__/base.cpython-314.pyc differ diff --git a/providers/__pycache__/defaults.cpython-314.pyc b/providers/__pycache__/defaults.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f57e025d35bd65634bed5963bbb889a2482af32 Binary files /dev/null and b/providers/__pycache__/defaults.cpython-314.pyc differ diff --git a/providers/__pycache__/error_mapping.cpython-314.pyc b/providers/__pycache__/error_mapping.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..efc9c02b65ee663ac9946188f402dcde2165dec1 Binary files /dev/null and b/providers/__pycache__/error_mapping.cpython-314.pyc differ diff --git a/providers/__pycache__/exceptions.cpython-314.pyc b/providers/__pycache__/exceptions.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1588048c671957399703ea95e6bbd897f68cea6c Binary files /dev/null and b/providers/__pycache__/exceptions.cpython-314.pyc differ diff --git a/providers/__pycache__/model_listing.cpython-314.pyc b/providers/__pycache__/model_listing.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c3db21fe0f3ccf2af1f3ab1f3eadf0b457f6dfd6 Binary files /dev/null and b/providers/__pycache__/model_listing.cpython-314.pyc differ diff --git a/providers/__pycache__/openai_compat.cpython-314.pyc b/providers/__pycache__/openai_compat.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96116a97fdae4eb383055d04b1ffbf20f7c80ec8 Binary files /dev/null and b/providers/__pycache__/openai_compat.cpython-314.pyc differ diff --git a/providers/__pycache__/rate_limit.cpython-314.pyc b/providers/__pycache__/rate_limit.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5576d928d5eb7851079f90890b457c83e3c29fa7 Binary files /dev/null and b/providers/__pycache__/rate_limit.cpython-314.pyc differ diff --git a/providers/__pycache__/registry.cpython-314.pyc b/providers/__pycache__/registry.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ec8018ab552f6eb9a692ec53630a04a0de5be595 Binary files /dev/null and b/providers/__pycache__/registry.cpython-314.pyc differ diff --git a/providers/base.py b/providers/base.py new file mode 100644 index 0000000000000000000000000000000000000000..a009ff1e71925e56241707256e40f86d6d24279d --- /dev/null +++ b/providers/base.py @@ -0,0 +1,130 @@ +"""Base provider interface - extend this to implement your own provider.""" + +from abc import ABC, abstractmethod +from collections.abc import AsyncIterator +from typing import Any + +from pydantic import BaseModel + +from config.constants import HTTP_CONNECT_TIMEOUT_DEFAULT +from providers.model_listing import ProviderModelInfo, model_infos_from_ids + + +class ProviderConfig(BaseModel): + """Configuration for a provider. + + Base fields apply to all providers. Provider-specific parameters + (e.g. NIM temperature, top_p) are passed by the provider constructor. + """ + + api_key: str + base_url: str | None = None + rate_limit: int | None = None + rate_window: int = 60 + max_concurrency: int = 5 + http_read_timeout: float = 300.0 + http_write_timeout: float = 10.0 + http_connect_timeout: float = HTTP_CONNECT_TIMEOUT_DEFAULT + enable_thinking: bool = True + proxy: str = "" + log_raw_sse_events: bool = False + log_api_error_tracebacks: bool = False + + +class BaseProvider(ABC): + """Base class for all providers. Extend this to add your own.""" + + def __init__(self, config: ProviderConfig): + self._config = config + + def _is_thinking_enabled( + self, request: Any, thinking_enabled: bool | None = None + ) -> bool: + """Return whether thinking should be enabled for this request.""" + thinking = getattr(request, "thinking", None) + config_enabled = ( + self._config.enable_thinking + if thinking_enabled is None + else thinking_enabled + ) + request_enabled = True + if thinking is not None: + thinking_type = ( + thinking.get("type") + if isinstance(thinking, dict) + else getattr(thinking, "type", None) + ) + if thinking_type == "disabled": + request_enabled = False + + enabled = ( + thinking.get("enabled") + if isinstance(thinking, dict) + else getattr(thinking, "enabled", None) + ) + if enabled is not None: + request_enabled = bool(enabled) + return config_enabled and request_enabled + + def preflight_stream( + self, request: Any, *, thinking_enabled: bool | None = None + ) -> None: + """Eagerly validate/build the upstream request before opening an SSE stream. + + Subclasses with ``_build_request_body`` (OpenAI and native) raise + :class:`providers.exceptions.InvalidRequestError` on conversion failures. + """ + build = getattr(self, "_build_request_body", None) + if build is None: + return + build(request, thinking_enabled=thinking_enabled) + + def _log_stream_transport_error( + self, tag: str, req_tag: str, error: Exception + ) -> None: + """Log streaming transport failures (metadata-only unless verbose is enabled).""" + from loguru import logger + + if self._config.log_api_error_tracebacks: + logger.error( + "{}_ERROR:{} {}: {}", tag, req_tag, type(error).__name__, error + ) + return + response = getattr(error, "response", None) + status_code = ( + getattr(response, "status_code", None) if response is not None else None + ) + logger.error( + "{}_ERROR:{} exc_type={} http_status={}", + tag, + req_tag, + type(error).__name__, + status_code, + ) + + @abstractmethod + async def cleanup(self) -> None: + """Release any resources held by this provider.""" + + @abstractmethod + async def list_model_ids(self) -> frozenset[str]: + """Return the model ids currently advertised by this provider.""" + + async def list_model_infos(self) -> frozenset[ProviderModelInfo]: + """Return advertised model ids with optional provider capability metadata.""" + return model_infos_from_ids(await self.list_model_ids()) + + @abstractmethod + async def stream_response( + self, + request: Any, + input_tokens: int = 0, + *, + request_id: str | None = None, + thinking_enabled: bool | None = None, + ) -> AsyncIterator[str]: + """Stream response in Anthropic SSE format.""" + # Typing: abstract async generators need a yield for AsyncIterator[str] + # inference; this branch is never executed. + if False: + yield "" diff --git a/providers/cerebras.py b/providers/cerebras.py new file mode 100644 index 0000000000000000000000000000000000000000..191229c6fc6dab9f5cc4fe613defb05da52c4991 --- /dev/null +++ b/providers/cerebras.py @@ -0,0 +1,28 @@ +"""Cerebras provider implementation.""" + +from __future__ import annotations + +from typing import Any + +from config.settings import Settings +from providers.openai_compat import OpenAICompatProvider + + +class CerebrasProvider(OpenAICompatProvider): + """Provider for Cerebras AI API.""" + + def __init__(self, settings: Settings): + super().__init__(settings) + self._api_key = settings.cerebras_api_key.strip() + # Cerebras uses the standard OpenAI-compatible endpoint + self._base_url = "https://api.cerebras.ai/v1" + + @property + def provider_id(self) -> str: + return "cerebras" + + def _get_api_key(self, model_id: str) -> str: + return self._api_key + + def _get_base_url(self, model_id: str) -> str: + return self._base_url diff --git a/providers/defaults.py b/providers/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..dad32deca9c50b88f6482eb5a9a2666ff54cfe77 --- /dev/null +++ b/providers/defaults.py @@ -0,0 +1,11 @@ +"""Re-exports default upstream base URLs from the config provider catalog.""" + +from config.provider_catalog import ( + NVIDIA_NIM_DEFAULT_BASE, + ZEN_DEFAULT_BASE, +) + +__all__ = ( + "NVIDIA_NIM_DEFAULT_BASE", + "ZEN_DEFAULT_BASE", +) diff --git a/providers/error_mapping.py b/providers/error_mapping.py new file mode 100644 index 0000000000000000000000000000000000000000..44c33a5087e71acbb46dec028dc89096b70c0b3a --- /dev/null +++ b/providers/error_mapping.py @@ -0,0 +1,76 @@ +"""Provider-specific exception mapping.""" + +import httpx +import openai + +from core.anthropic import get_user_facing_error_message +from providers.exceptions import ( + APIError, + AuthenticationError, + InvalidRequestError, + OverloadedError, + RateLimitError, +) +from providers.rate_limit import GlobalRateLimiter + + +def user_visible_message_for_mapped_provider_error( + mapped: Exception, + *, + provider_name: str, + read_timeout_s: float | None, +) -> str: + """Return the user-visible string after :func:`map_error` (405 + mapped types).""" + if getattr(mapped, "status_code", None) == 405: + return ( + f"Upstream provider {provider_name} rejected the request method " + "or endpoint (HTTP 405)." + ) + return get_user_facing_error_message(mapped, read_timeout_s=read_timeout_s) + + +def map_error( + e: Exception, *, rate_limiter: GlobalRateLimiter | None = None +) -> Exception: + """Map OpenAI or HTTPX exception to specific ProviderError. + + Streaming transports should pass their scoped limiter (``self._global_rate_limiter``) + so reactive 429 handling applies to the correct provider. Tests may omit + ``rate_limiter`` to use the process-wide singleton. + """ + message = get_user_facing_error_message(e) + limiter = rate_limiter or GlobalRateLimiter.get_instance() + + if isinstance(e, openai.AuthenticationError): + return AuthenticationError(message, raw_error=str(e)) + if isinstance(e, openai.RateLimitError): + limiter.set_blocked(60) + return RateLimitError(message, raw_error=str(e)) + if isinstance(e, openai.BadRequestError): + return InvalidRequestError(message, raw_error=str(e)) + if isinstance(e, openai.InternalServerError): + raw_message = str(e) + if "overloaded" in raw_message.lower() or "capacity" in raw_message.lower(): + return OverloadedError(message, raw_error=raw_message) + return APIError(message, status_code=500, raw_error=str(e)) + if isinstance(e, openai.APIError): + return APIError( + message, status_code=getattr(e, "status_code", 500), raw_error=str(e) + ) + + if isinstance(e, httpx.HTTPStatusError): + status = e.response.status_code + if status in (401, 403): + return AuthenticationError(message, raw_error=str(e)) + if status == 429: + limiter.set_blocked(60) + return RateLimitError(message, raw_error=str(e)) + if status == 400: + return InvalidRequestError(message, raw_error=str(e)) + if status >= 500: + if status in (502, 503, 504): + return OverloadedError(message, raw_error=str(e)) + return APIError(message, status_code=status, raw_error=str(e)) + return APIError(message, status_code=status, raw_error=str(e)) + + return e diff --git a/providers/exceptions.py b/providers/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..31d80d3960d93ac4df8a12dc1939dff76d0059fb --- /dev/null +++ b/providers/exceptions.py @@ -0,0 +1,113 @@ +"""Unified exception hierarchy for providers.""" + +from typing import Any + + +class ProviderError(Exception): + """Base exception for all provider errors.""" + + def __init__( + self, + message: str, + status_code: int = 500, + error_type: str = "api_error", + raw_error: Any = None, + ): + super().__init__(message) + self.message = message + self.status_code = status_code + self.error_type = error_type + self.raw_error = raw_error + + def to_anthropic_format(self) -> dict: + """Convert to Anthropic-compatible error response.""" + return { + "type": "error", + "error": { + "type": self.error_type, + "message": self.message, + }, + } + + +class AuthenticationError(ProviderError): + """Raised when API key is invalid or missing.""" + + def __init__(self, message: str, raw_error: Any = None): + super().__init__( + message, + status_code=401, + error_type="authentication_error", + raw_error=raw_error, + ) + + +class InvalidRequestError(ProviderError): + """Raised when the request parameters are invalid.""" + + def __init__(self, message: str, raw_error: Any = None): + super().__init__( + message, + status_code=400, + error_type="invalid_request_error", + raw_error=raw_error, + ) + + +class RateLimitError(ProviderError): + """Raised when rate limit is exceeded.""" + + def __init__(self, message: str, raw_error: Any = None): + super().__init__( + message, + status_code=429, + error_type="rate_limit_error", + raw_error=raw_error, + ) + + +class OverloadedError(ProviderError): + """Raised when the provider is overloaded.""" + + def __init__(self, message: str, raw_error: Any = None): + super().__init__( + message, + status_code=529, + error_type="overloaded_error", + raw_error=raw_error, + ) + + +class APIError(ProviderError): + """Raised when the provider returns a generic API error.""" + + def __init__(self, message: str, status_code: int = 500, raw_error: Any = None): + super().__init__( + message, + status_code=status_code, + error_type="api_error", + raw_error=raw_error, + ) + + +class UnknownProviderTypeError(InvalidRequestError): + """Raised when ``provider_id`` is not registered in the provider map.""" + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class ServiceUnavailableError(ProviderError): + """Raised when the server is not ready (e.g. app lifespan did not wire state).""" + + def __init__(self, message: str, raw_error: Any = None): + super().__init__( + message, + status_code=503, + error_type="api_error", + raw_error=raw_error, + ) + + +class ModelListResponseError(ServiceUnavailableError): + """Raised when a provider model-list response cannot be parsed safely.""" diff --git a/providers/groq.py b/providers/groq.py new file mode 100644 index 0000000000000000000000000000000000000000..ede46671388ccd53e82a1765add0887c29561b68 --- /dev/null +++ b/providers/groq.py @@ -0,0 +1,28 @@ +"""Groq provider implementation.""" + +from __future__ import annotations + +from typing import Any + +from config.settings import Settings +from providers.openai_compat import OpenAICompatProvider + + +class GroqProvider(OpenAICompatProvider): + """Provider for Groq Cloud API.""" + + def __init__(self, settings: Settings): + super().__init__(settings) + self._api_key = settings.groq_api_key.strip() + # Groq uses the standard OpenAI-compatible endpoint + self._base_url = "https://api.groq.com/openai/v1" + + @property + def provider_id(self) -> str: + return "groq" + + def _get_api_key(self, model_id: str) -> str: + return self._api_key + + def _get_base_url(self, model_id: str) -> str: + return self._base_url diff --git a/providers/model_listing.py b/providers/model_listing.py new file mode 100644 index 0000000000000000000000000000000000000000..a4435f332b52271aaa08a779b080e561952d098e --- /dev/null +++ b/providers/model_listing.py @@ -0,0 +1,133 @@ +"""Provider model-list response parsing helpers.""" + +from __future__ import annotations + +from collections.abc import Iterable, Mapping, Sequence +from dataclasses import dataclass +from typing import Any + +from providers.exceptions import ModelListResponseError + + +@dataclass(frozen=True, slots=True) +class ProviderModelInfo: + """Internal provider model metadata used for gateway model-list shaping.""" + + model_id: str + supports_thinking: bool | None = None + + +def model_infos_from_ids( + model_ids: Iterable[str], *, supports_thinking: bool | None = None +) -> frozenset[ProviderModelInfo]: + """Build unknown-capability model metadata from plain provider model ids.""" + return frozenset( + ProviderModelInfo(model_id=model_id, supports_thinking=supports_thinking) + for model_id in model_ids + if model_id.strip() + ) + + +def extract_openai_model_ids(payload: Any, *, provider_name: str) -> frozenset[str]: + """Extract model ids from an OpenAI-compatible ``/models`` response.""" + data = _field(payload, "data") + if not _is_sequence(data): + raise _malformed(provider_name, "expected top-level data array") + + model_ids: set[str] = set() + for item in data: + model_id = _field(item, "id") + if not isinstance(model_id, str) or not model_id.strip(): + raise _malformed(provider_name, "expected every data item to include id") + model_ids.add(model_id) + + if not model_ids: + raise _malformed(provider_name, "response did not include any model ids") + return frozenset(model_ids) + + +def extract_openrouter_tool_model_ids( + payload: Any, *, provider_name: str +) -> frozenset[str]: + """Extract OpenRouter model ids that advertise tool-use support.""" + return frozenset( + info.model_id + for info in extract_openrouter_tool_model_infos( + payload, provider_name=provider_name + ) + ) + + +def extract_openrouter_tool_model_infos( + payload: Any, *, provider_name: str +) -> frozenset[ProviderModelInfo]: + """Extract OpenRouter tool-capable model ids with thinking capability metadata.""" + data = _field(payload, "data") + if not _is_sequence(data): + raise _malformed(provider_name, "expected top-level data array") + + model_infos: set[ProviderModelInfo] = set() + for item in data: + model_id = _field(item, "id") + if not isinstance(model_id, str) or not model_id.strip(): + raise _malformed(provider_name, "expected every data item to include id") + + supported_parameters = _field(item, "supported_parameters") + if not _is_sequence(supported_parameters): + continue + supported_parameter_names = { + param for param in supported_parameters if isinstance(param, str) + } + if supported_parameter_names.isdisjoint({"tools", "tool_choice"}): + continue + model_infos.add( + ProviderModelInfo( + model_id=model_id, + supports_thinking="reasoning" in supported_parameter_names, + ) + ) + + return frozenset(model_infos) + + +def extract_ollama_model_ids(payload: Any, *, provider_name: str) -> frozenset[str]: + """Extract model ids from Ollama's native ``/api/tags`` response.""" + models = _field(payload, "models") + if not _is_sequence(models): + raise _malformed(provider_name, "expected top-level models array") + + model_ids: set[str] = set() + for item in models: + item_ids: list[str] = [] + for key in ("model", "name"): + value = _field(item, key) + if isinstance(value, str) and value.strip(): + item_ids.append(value) + if not item_ids: + raise _malformed( + provider_name, + "expected every models item to include model or name", + ) + model_ids.update(item_ids) + + if not model_ids: + raise _malformed(provider_name, "response did not include any model ids") + return frozenset(model_ids) + + +def _field(item: Any, name: str) -> Any: + if isinstance(item, Mapping): + return item.get(name) + return getattr(item, name, None) + + +def _is_sequence(value: Any) -> bool: + return isinstance(value, Sequence) and not isinstance( + value, str | bytes | bytearray + ) + + +def _malformed(provider_name: str, reason: str) -> ModelListResponseError: + return ModelListResponseError( + f"{provider_name} model-list response is malformed: {reason}" + ) diff --git a/providers/nvidia_nim/__init__.py b/providers/nvidia_nim/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..253acd1cd04af49d5a94a68c6c10cccd0a30fece --- /dev/null +++ b/providers/nvidia_nim/__init__.py @@ -0,0 +1,7 @@ +"""NVIDIA NIM provider package.""" + +from providers.defaults import NVIDIA_NIM_DEFAULT_BASE + +from .client import NvidiaNimProvider + +__all__ = ["NVIDIA_NIM_DEFAULT_BASE", "NvidiaNimProvider"] diff --git a/providers/nvidia_nim/__pycache__/__init__.cpython-314.pyc b/providers/nvidia_nim/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e363b1cf263fc00e32e31fde0cbc1d2f49e0ac4 Binary files /dev/null and b/providers/nvidia_nim/__pycache__/__init__.cpython-314.pyc differ diff --git a/providers/nvidia_nim/__pycache__/client.cpython-314.pyc b/providers/nvidia_nim/__pycache__/client.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b3d731355e8c71fa8634d0c449ef6db383c5bffc Binary files /dev/null and b/providers/nvidia_nim/__pycache__/client.cpython-314.pyc differ diff --git a/providers/nvidia_nim/__pycache__/metrics.cpython-314.pyc b/providers/nvidia_nim/__pycache__/metrics.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa9dbb001864795288a3762f927d2ebad7bf7072 Binary files /dev/null and b/providers/nvidia_nim/__pycache__/metrics.cpython-314.pyc differ diff --git a/providers/nvidia_nim/__pycache__/request.cpython-314.pyc b/providers/nvidia_nim/__pycache__/request.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7ef94be2ec8105e4a2b5aceb4ed0f2d4e4a1bbb6 Binary files /dev/null and b/providers/nvidia_nim/__pycache__/request.cpython-314.pyc differ diff --git a/providers/nvidia_nim/client.py b/providers/nvidia_nim/client.py new file mode 100644 index 0000000000000000000000000000000000000000..3777b0e32507207921b316fb6530c04c929ff5f3 --- /dev/null +++ b/providers/nvidia_nim/client.py @@ -0,0 +1,243 @@ +"""NVIDIA NIM provider implementation.""" + +import json +from typing import Any + +import openai +from openai import AsyncOpenAI +from loguru import logger + +from config.nim import NimSettings +from config.settings import Settings +from providers.base import ProviderConfig +from providers.defaults import NVIDIA_NIM_DEFAULT_BASE +from providers.openai_compat import OpenAIChatTransport + +from .request import ( + build_request_body, + clone_body_without_chat_template, + clone_body_without_reasoning_budget, + clone_body_without_reasoning_content, +) +from . import metrics as nim_metrics + + +class NvidiaNimProvider(OpenAIChatTransport): + """NVIDIA NIM provider using official OpenAI client.""" + + def __init__( + self, + config: ProviderConfig, + *, + nim_settings: NimSettings, + settings: Settings, + ): + super().__init__( + config, + provider_name="NIM", + base_url=config.base_url or NVIDIA_NIM_DEFAULT_BASE, + api_key=config.api_key, + ) + self._nim_settings = nim_settings + self._settings = settings + + def _api_key_for_model(self, model_name: str) -> str: + return self._settings.nvidia_nim_api_key_for_model(model_name) + + def _client_for_body(self, body: dict[str, Any]) -> AsyncOpenAI: + model_name = str(body.get("model") or "") + api_key = self._api_key_for_model(model_name) + return self._client_for_api_key(api_key) + + def _build_request_body( + self, request: Any, thinking_enabled: bool | None = None + ) -> dict: + """Internal helper for tests and shared building.""" + return build_request_body( + request, + self._nim_settings, + thinking_enabled=self._is_thinking_enabled(request, thinking_enabled), + ) + + def _get_retry_request_body(self, error: Exception, body: dict) -> dict | None: + """Retry once with a downgraded body when NIM rejects a known field.""" + status_code = getattr(error, "status_code", None) + if not isinstance(error, openai.BadRequestError) and status_code != 400: + return None + + error_text = str(error) + error_body = getattr(error, "body", None) + if error_body is not None: + error_text = f"{error_text} {json.dumps(error_body, default=str)}" + error_text = error_text.lower() + + if "reasoning_budget" in error_text: + retry_body = clone_body_without_reasoning_budget(body) + if retry_body is None: + return None + logger.warning( + "NIM_STREAM: retrying without reasoning_budget after 400 error" + ) + return retry_body + + if "chat_template" in error_text: + retry_body = clone_body_without_chat_template(body) + if retry_body is None: + return None + logger.warning("NIM_STREAM: retrying without chat_template after 400 error") + return retry_body + + if "reasoning_content" in error_text: + retry_body = clone_body_without_reasoning_content(body) + if retry_body is None: + return None + logger.warning( + "NIM_STREAM: retrying without reasoning_content after 400 error" + ) + return retry_body + + return None + + async def _create_stream(self, body: dict) -> tuple[Any, dict]: + """Override to support fallback models on transient failures (429/connection/timeouts). + + Attempts the primary model first; on certain transient errors, will iterate + configured fallback models from settings `nvidia_nim_fallback_models`. + """ + from config.settings import get_settings + import httpx + import asyncio + + connect_timeout_s = 30 + first_chunk_timeout_s = 60 + fallback_first_chunk_timeout_s = 60 + + try: + client = self._client_for_body(body) + stream = await asyncio.wait_for( + self._global_rate_limiter.execute_with_retry( + client.chat.completions.create, + **body, + stream=True, + max_retries=1, + ), + timeout=connect_timeout_s, + ) + used_body = body + # Probe for initial content; if no chunk arrives in time, treat as transient + try: + first = await asyncio.wait_for( + stream.__anext__(), timeout=first_chunk_timeout_s + ) + except asyncio.TimeoutError: + # try to close original stream if possible + try: + await getattr(stream, "aclose", lambda: None)() + except Exception: + pass + raise + + async def _wrapped(): + # yield the already-received first chunk, then the rest + yield first + async for c in stream: + yield c + + return _wrapped(), used_body + except Exception as error: # primary model failed + # Decide whether to attempt fallbacks + status_code = getattr(error, "status_code", None) + text = str(error).lower() + transient = False + if status_code == 429: + transient = True + if "rate limit" in text or "too many requests" in text: + transient = True + if "connection" in text and ("refused" in text or "reset" in text): + transient = True + if isinstance(error, (httpx.ConnectError, httpx.ReadTimeout, asyncio.TimeoutError)): + transient = True + + if not transient: + raise + + settings = get_settings() + csv = (settings.nvidia_nim_fallback_models or "").strip() + if not csv: + raise + + candidates = [c.strip() for c in csv.split(",") if c.strip()] + # normalize: for entries like 'nvidia_nim/model/name' -> use only model part + def model_for_candidate(cand: str) -> str: + if "/" in cand: + parts = cand.split("/", 1) + # if provider prefix present and not this provider, skip later + return parts[1] + return cand + + last_exc = error + for cand in candidates: + # skip self model if identical + try_model = model_for_candidate(cand) + if try_model == body.get("model"): + continue + # If candidate specified a different provider, ensure it's for NIM + if "/" in cand: + provider = cand.split("/", 1)[0] + if provider != "nvidia_nim": + # Not applicable to this provider + continue + + retry_body = dict(body) + retry_body["model"] = try_model + client = self._client_for_body(retry_body) + logger.warning( + "NIM_STREAM: primary model failed (%s); attempting fallback %s", + type(error).__name__, + cand, + ) + try: + # record attempt + try: + nim_metrics.record_attempt(cand) + except Exception: + logger.debug("NIM_METRICS: failed to record attempt for %s", cand) + + stream = await self._global_rate_limiter.execute_with_retry( + client.chat.completions.create, + **retry_body, + stream=True, + max_retries=1, + ) + # Probe for initial content on fallback stream as well + try: + first = await asyncio.wait_for( + stream.__anext__(), timeout=fallback_first_chunk_timeout_s + ) + except asyncio.TimeoutError: + try: + await getattr(stream, "aclose", lambda: None)() + except Exception: + pass + raise + + async def _wrapped_fallback(): + yield first + async for c in stream: + yield c + + try: + nim_metrics.record_success(cand) + except Exception: + logger.debug("NIM_METRICS: failed to record success for %s", cand) + return _wrapped_fallback(), retry_body + except Exception as e2: + logger.warning("NIM_STREAM: fallback %s failed: %s", cand, e2) + try: + nim_metrics.record_failure(cand) + except Exception: + logger.debug("NIM_METRICS: failed to record failure for %s", cand) + last_exc = e2 + + # No fallback succeeded; re-raise last exception + raise last_exc diff --git a/providers/nvidia_nim/metrics.py b/providers/nvidia_nim/metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..f162ecf2e569019a9157674d6fb1527c91cb57fc --- /dev/null +++ b/providers/nvidia_nim/metrics.py @@ -0,0 +1,43 @@ +"""Lightweight in-memory metrics for NVIDIA NIM fallback usage. + +Keep metrics local to the process. Simple API for recording attempts, +successes, and failures per candidate model. +""" +from __future__ import annotations + +from threading import Lock +from typing import Dict + + +_lock = Lock() +# stats: model_id -> {attempts: int, successes: int, failures: int} +_stats: Dict[str, Dict[str, int]] = {} + + +def _ensure_model(model: str) -> None: + with _lock: + if model not in _stats: + _stats[model] = {"attempts": 0, "successes": 0, "failures": 0} + + +def record_attempt(model: str) -> None: + _ensure_model(model) + with _lock: + _stats[model]["attempts"] += 1 + + +def record_success(model: str) -> None: + _ensure_model(model) + with _lock: + _stats[model]["successes"] += 1 + + +def record_failure(model: str) -> None: + _ensure_model(model) + with _lock: + _stats[model]["failures"] += 1 + + +def snapshot() -> Dict[str, Dict[str, int]]: + with _lock: + return {k: v.copy() for k, v in _stats.items()} diff --git a/providers/nvidia_nim/request.py b/providers/nvidia_nim/request.py new file mode 100644 index 0000000000000000000000000000000000000000..6cc816cca3c51da0c4c581cfc4e1bc7b3797e33e --- /dev/null +++ b/providers/nvidia_nim/request.py @@ -0,0 +1,277 @@ +"""Request builder for NVIDIA NIM provider.""" + +from collections.abc import Callable +from copy import deepcopy +from typing import Any + +from loguru import logger + +from config.nim import NimSettings +from core.anthropic import ( + ReasoningReplayMode, + build_base_request_body, + set_if_not_none, +) +from core.anthropic.conversion import OpenAIConversionError +from providers.exceptions import InvalidRequestError + +# Models known to support NIM chat_template_kwargs for thinking/reasoning. +# Models NOT in this set will get thinking disabled to avoid BadRequestError. +_THINKING_CAPABLE_MODEL_PREFIXES: tuple[str, ...] = ( + "qwen/qwen3-", + "mistralai/mistral-nemotron", +) + +_SCHEMA_VALUE_KEYS = frozenset( + { + "additionalProperties", + "additionalItems", + "unevaluatedProperties", + "unevaluatedItems", + "items", + "contains", + "propertyNames", + "if", + "then", + "else", + "not", + } +) +_SCHEMA_LIST_KEYS = frozenset({"allOf", "anyOf", "oneOf", "prefixItems"}) +_SCHEMA_MAP_KEYS = frozenset( + {"properties", "patternProperties", "$defs", "definitions", "dependentSchemas"} +) + + +def _clone_strip_extra_body( + body: dict[str, Any], + strip: Callable[[dict[str, Any]], bool], +) -> dict[str, Any] | None: + """Deep-clone ``body`` and remove fields via ``strip`` on ``extra_body`` only. + + Returns ``None`` when there is no ``extra_body`` dict or ``strip`` reports no change. + """ + cloned_body = deepcopy(body) + extra_body = cloned_body.get("extra_body") + if not isinstance(extra_body, dict): + return None + if not strip(extra_body): + return None + if not extra_body: + cloned_body.pop("extra_body", None) + return cloned_body + + +def _strip_reasoning_budget_fields(extra_body: dict[str, Any]) -> bool: + removed = extra_body.pop("reasoning_budget", None) is not None + chat_template_kwargs = extra_body.get("chat_template_kwargs") + if ( + isinstance(chat_template_kwargs, dict) + and chat_template_kwargs.pop("reasoning_budget", None) is not None + ): + removed = True + return removed + + +def _strip_chat_template_field(extra_body: dict[str, Any]) -> bool: + return extra_body.pop("chat_template", None) is not None + + +def _strip_message_reasoning_content(body: dict[str, Any]) -> bool: + removed = False + messages = body.get("messages") + if not isinstance(messages, list): + return False + for message in messages: + if ( + isinstance(message, dict) + and message.pop("reasoning_content", None) is not None + ): + removed = True + return removed + + +def _sanitize_nim_schema_node(value: Any) -> tuple[bool, Any]: + """Remove boolean JSON Schema subschemas that hosted NIM rejects.""" + if isinstance(value, bool): + return False, None + if isinstance(value, dict): + sanitized: dict[str, Any] = {} + for key, item in value.items(): + if key in _SCHEMA_VALUE_KEYS: + keep, sanitized_item = _sanitize_nim_schema_node(item) + if keep: + sanitized[key] = sanitized_item + elif key in _SCHEMA_LIST_KEYS and isinstance(item, list): + sanitized_items: list[Any] = [] + for schema_item in item: + keep, sanitized_item = _sanitize_nim_schema_node(schema_item) + if keep: + sanitized_items.append(sanitized_item) + if sanitized_items: + sanitized[key] = sanitized_items + elif key in _SCHEMA_MAP_KEYS and isinstance(item, dict): + sanitized_map: dict[str, Any] = {} + for map_key, schema_item in item.items(): + keep, sanitized_item = _sanitize_nim_schema_node(schema_item) + if keep: + sanitized_map[map_key] = sanitized_item + sanitized[key] = sanitized_map + else: + sanitized[key] = item + return True, sanitized + if isinstance(value, list): + sanitized_items = [] + for item in value: + keep, sanitized_item = _sanitize_nim_schema_node(item) + if keep: + sanitized_items.append(sanitized_item) + return True, sanitized_items + return True, value + + +def _sanitize_nim_tool_schemas(body: dict[str, Any]) -> None: + """Sanitize only tool parameter schemas, preserving tool calls/history.""" + tools = body.get("tools") + if not isinstance(tools, list): + return + + sanitized_tools: list[Any] = [] + for tool in tools: + if not isinstance(tool, dict): + sanitized_tools.append(tool) + continue + sanitized_tool = dict(tool) + function = tool.get("function") + if isinstance(function, dict): + sanitized_function = dict(function) + parameters = function.get("parameters") + if isinstance(parameters, dict): + _, sanitized_parameters = _sanitize_nim_schema_node(parameters) + sanitized_function["parameters"] = sanitized_parameters + sanitized_tool["function"] = sanitized_function + sanitized_tools.append(sanitized_tool) + + body["tools"] = sanitized_tools + + +def _set_extra( + extra_body: dict[str, Any], key: str, value: Any, ignore_value: Any = None +) -> None: + if key in extra_body: + return + if value is None: + return + if ignore_value is not None and value == ignore_value: + return + extra_body[key] = value + + +def clone_body_without_reasoning_budget(body: dict[str, Any]) -> dict[str, Any] | None: + """Clone a request body and strip only reasoning_budget fields.""" + return _clone_strip_extra_body(body, _strip_reasoning_budget_fields) + + +def clone_body_without_chat_template(body: dict[str, Any]) -> dict[str, Any] | None: + """Clone a request body and strip only chat_template.""" + return _clone_strip_extra_body(body, _strip_chat_template_field) + + +def clone_body_without_reasoning_content(body: dict[str, Any]) -> dict[str, Any] | None: + """Clone a request body and strip assistant message ``reasoning_content`` fields.""" + cloned_body = deepcopy(body) + if not _strip_message_reasoning_content(cloned_body): + return None + return cloned_body + + +def build_request_body( + request_data: Any, nim: NimSettings, *, thinking_enabled: bool +) -> dict: + """Build OpenAI-format request body from Anthropic request.""" + logger.debug( + "NIM_REQUEST: conversion start model={} msgs={}", + getattr(request_data, "model", "?"), + len(getattr(request_data, "messages", [])), + ) + try: + body = build_base_request_body( + request_data, + reasoning_replay=ReasoningReplayMode.REASONING_CONTENT + if thinking_enabled + else ReasoningReplayMode.DISABLED, + ) + except OpenAIConversionError as exc: + raise InvalidRequestError(str(exc)) from exc + + _sanitize_nim_tool_schemas(body) + + # NIM-specific max_tokens: cap against nim.max_tokens + max_tokens = body.get("max_tokens") or getattr(request_data, "max_tokens", None) + if max_tokens is None: + max_tokens = nim.max_tokens + elif nim.max_tokens: + max_tokens = min(max_tokens, nim.max_tokens) + set_if_not_none(body, "max_tokens", max_tokens) + + # NIM-specific temperature/top_p: fall back to NIM defaults if request didn't set + if body.get("temperature") is None and nim.temperature is not None: + body["temperature"] = nim.temperature + if body.get("top_p") is None and nim.top_p is not None: + body["top_p"] = nim.top_p + + # NIM-specific stop sequences fallback + if "stop" not in body and nim.stop: + body["stop"] = nim.stop + + if nim.presence_penalty != 0.0: + body["presence_penalty"] = nim.presence_penalty + if nim.frequency_penalty != 0.0: + body["frequency_penalty"] = nim.frequency_penalty + if nim.seed is not None: + body["seed"] = nim.seed + + body["parallel_tool_calls"] = nim.parallel_tool_calls + + # Handle non-standard parameters via extra_body + extra_body: dict[str, Any] = {} + request_extra = getattr(request_data, "extra_body", None) + if request_extra: + extra_body.update(request_extra) + + # Only inject chat_template_kwargs for models that support it. + # Other models (Mistral-Large, Dracarys, GLM4, StepFun, Seed-OSS) + # reject these params with BadRequestError. + model_name = str(body.get("model", "")).lower() + model_supports_thinking = any( + model_name.startswith(prefix) for prefix in _THINKING_CAPABLE_MODEL_PREFIXES + ) + if thinking_enabled and model_supports_thinking: + chat_template_kwargs = extra_body.setdefault( + "chat_template_kwargs", {"thinking": True, "enable_thinking": True} + ) + if isinstance(chat_template_kwargs, dict): + chat_template_kwargs.setdefault("reasoning_budget", max_tokens) + + req_top_k = getattr(request_data, "top_k", None) + top_k = req_top_k if req_top_k is not None else nim.top_k + _set_extra(extra_body, "top_k", top_k, ignore_value=-1) + _set_extra(extra_body, "min_p", nim.min_p, ignore_value=0.0) + _set_extra( + extra_body, "repetition_penalty", nim.repetition_penalty, ignore_value=1.0 + ) + _set_extra(extra_body, "min_tokens", nim.min_tokens, ignore_value=0) + _set_extra(extra_body, "chat_template", nim.chat_template) + _set_extra(extra_body, "request_id", nim.request_id) + _set_extra(extra_body, "ignore_eos", nim.ignore_eos) + + if extra_body: + body["extra_body"] = extra_body + + logger.debug( + "NIM_REQUEST: conversion done model={} msgs={} tools={}", + body.get("model"), + len(body.get("messages", [])), + len(body.get("tools", [])), + ) + return body diff --git a/providers/nvidia_nim/voice.py b/providers/nvidia_nim/voice.py new file mode 100644 index 0000000000000000000000000000000000000000..f26255a63c28cb32c2d81056dc9c14675459eccf --- /dev/null +++ b/providers/nvidia_nim/voice.py @@ -0,0 +1,95 @@ +"""NVIDIA NIM / Riva offline ASR for voice notes (provider-owned transport).""" + +from __future__ import annotations + +from pathlib import Path + +from loguru import logger + +# NVIDIA NIM Whisper model mapping: (function_id, language_code) +_NIM_ASR_MODEL_MAP: dict[str, tuple[str, str]] = { + "nvidia/parakeet-ctc-0.6b-zh-tw": ("8473f56d-51ef-473c-bb26-efd4f5def2bf", "zh-TW"), + "nvidia/parakeet-ctc-0.6b-zh-cn": ("9add5ef7-322e-47e0-ad7a-5653fb8d259b", "zh-CN"), + # function-id from NVIDIA NIM API docs (parakeet-ctc-0.6b-es). + "nvidia/parakeet-ctc-0.6b-es": ("a9eeee8f-b509-4712-b19d-194361fa5f31", "es-US"), + "nvidia/parakeet-ctc-0.6b-vi": ("f3dff2bb-99f9-403d-a5f1-f574a757deb0", "vi-VN"), + "nvidia/parakeet-ctc-1.1b-asr": ("1598d209-5e27-4d3c-8079-4751568b1081", "en-US"), + "nvidia/parakeet-ctc-0.6b-asr": ("d8dd4e9b-fbf5-4fb0-9dba-8cf436c8d965", "en-US"), + "nvidia/parakeet-1.1b-rnnt-multilingual-asr": ( + "71203149-d3b7-4460-8231-1be2543a1fca", + "", + ), + "openai/whisper-large-v3": ("b702f636-f60c-4a3d-a6f4-f3568c13bd7d", "multi"), +} + +_RIVA_SERVER = "grpc.nvcf.nvidia.com:443" + + +def transcribe_audio_file( + file_path: Path, + model: str, + *, + api_key: str, +) -> str: + """Transcribe audio using NVIDIA NIM / Riva gRPC (offline recognition). + + Args: + file_path: Path to encoded audio bytes readable by Riva. + model: Hugging Face-style NIM model id (see ``_NIM_ASR_MODEL_MAP``). + api_key: NVIDIA API key (Bearer token); must be non-empty. + + Returns: + Transcript text, or ``(no speech detected)`` when empty. + """ + key = (api_key or "").strip() + if not key: + raise ValueError( + "NVIDIA NIM transcription requires a non-empty nvidia_nim_api_key " + "(configure NVIDIA_NIM_API_KEY or pass api_key explicitly)." + ) + + try: + import riva.client + except ImportError as e: + raise ImportError( + "NVIDIA NIM transcription requires the voice extra. " + "Install with: uv sync --extra voice" + ) from e + + model_config = _NIM_ASR_MODEL_MAP.get(model) + if not model_config: + raise ValueError( + f"No NVIDIA NIM config found for model: {model}. " + f"Supported models: {', '.join(_NIM_ASR_MODEL_MAP.keys())}" + ) + function_id, language_code = model_config + + auth = riva.client.Auth( + use_ssl=True, + uri=_RIVA_SERVER, + metadata_args=[ + ["function-id", function_id], + ["authorization", f"Bearer {key}"], + ], + ) + + asr_service = riva.client.ASRService(auth) + + config = riva.client.RecognitionConfig( + language_code=language_code, + max_alternatives=1, + verbatim_transcripts=True, + ) + + with open(file_path, "rb") as f: + data = f.read() + + response = asr_service.offline_recognize(data, config) + + transcript = "" + results = getattr(response, "results", None) + if results and results[0].alternatives: + transcript = results[0].alternatives[0].transcript + + logger.debug(f"NIM transcription: {len(transcript)} chars") + return transcript or "(no speech detected)" diff --git a/providers/openai_compat.py b/providers/openai_compat.py new file mode 100644 index 0000000000000000000000000000000000000000..0db5311975e880c1aabc5384cfa9fb3b1fa91054 --- /dev/null +++ b/providers/openai_compat.py @@ -0,0 +1,505 @@ +"""OpenAI-style chat base for :class:`OpenAIChatTransport` (NIM, etc.). + +``AnthropicMessagesTransport``-based providers (OpenRouter, LM Studio, DeepSeek, …) live +in separate modules; do not list them as subclasses of this class. +""" + +import asyncio +import json +import uuid +from abc import abstractmethod +from collections.abc import AsyncIterator, Iterator +from typing import Any + +import httpx +from loguru import logger +from openai import AsyncOpenAI + +from core.anthropic import ( + ContentType, + HeuristicToolParser, + SSEBuilder, + ThinkTagParser, + append_request_id, + map_stop_reason, +) +from providers.base import BaseProvider, ProviderConfig +from providers.error_mapping import ( + map_error, + user_visible_message_for_mapped_provider_error, +) +from providers.rate_limit import GlobalRateLimiter +from providers.model_listing import ( + ProviderModelInfo, + extract_openai_model_ids, + model_infos_from_ids, +) + + +def _iter_heuristic_tool_use_sse( + sse: SSEBuilder, tool_use: dict[str, Any] +) -> Iterator[str]: + """Emit SSE for one heuristic tool_use block (closes open text/thinking first).""" + if tool_use.get("name") == "Task" and isinstance(tool_use.get("input"), dict): + task_input = tool_use["input"] + if task_input.get("run_in_background") is not False: + task_input["run_in_background"] = False + yield from sse.close_content_blocks() + block_idx = sse.blocks.allocate_index() + yield sse.content_block_start( + block_idx, + "tool_use", + id=tool_use["id"], + name=tool_use["name"], + ) + yield sse.content_block_delta( + block_idx, + "input_json_delta", + json.dumps(tool_use["input"]), + ) + yield sse.content_block_stop(block_idx) + + +class OpenAIChatTransport(BaseProvider): + """Base for OpenAI-compatible ``/chat/completions`` adapters (NIM, …).""" + + def __init__( + self, + config: ProviderConfig, + *, + provider_name: str, + base_url: str, + api_key: str, + ): + super().__init__(config) + self._provider_name = provider_name + self._api_key = api_key + self._base_url = base_url.rstrip("/") + self._http_client = None + self._client_cache: dict[str, AsyncOpenAI] = {} + self._global_rate_limiter = GlobalRateLimiter.get_scoped_instance( + provider_name.lower(), + rate_limit=config.rate_limit, + rate_window=config.rate_window, + max_concurrency=config.max_concurrency, + ) + # Always create an explicit httpx.AsyncClient with trust_env=False to avoid + # slow system proxy detection on Windows during initialization. + http_client_args = { + "timeout": httpx.Timeout( + config.http_read_timeout, + connect=config.http_connect_timeout, + read=config.http_read_timeout, + write=config.http_write_timeout, + ), + "trust_env": False, + "http2": True, + } + if config.proxy: + http_client_args["proxy"] = config.proxy + + self._http_client = httpx.AsyncClient(**http_client_args) + + self._client = AsyncOpenAI( + api_key=self._api_key, + base_url=self._base_url, + max_retries=0, + timeout=httpx.Timeout( + config.http_read_timeout, + connect=config.http_connect_timeout, + read=config.http_read_timeout, + write=config.http_write_timeout, + ), + http_client=self._http_client, + ) + self._client_cache[self._api_key] = self._client + + def _client_for_api_key(self, api_key: str) -> AsyncOpenAI: + """Return a cached OpenAI client for the given API key.""" + if api_key == self._api_key: + return self._client + client = self._client_cache.get(api_key) + if client is not None: + return client + client = AsyncOpenAI( + api_key=api_key, + base_url=self._base_url, + max_retries=0, + timeout=httpx.Timeout( + self._config.http_read_timeout, + connect=self._config.http_connect_timeout, + read=self._config.http_read_timeout, + write=self._config.http_write_timeout, + ), + http_client=self._http_client, + ) + self._client_cache[api_key] = client + return client + + async def cleanup(self) -> None: + """Release HTTP client resources.""" + seen: set[int] = set() + for client in list(self._client_cache.values()): + client_id = id(client) + if client_id in seen: + continue + seen.add(client_id) + await client.aclose() + + async def list_model_infos(self) -> frozenset[ProviderModelInfo]: + """Return model metadata from the provider's OpenAI-compatible models endpoint.""" + model_ids = await self.list_model_ids() + # Default all models to supports_thinking=None (unknown) unless provider overrides + return model_infos_from_ids(model_ids, supports_thinking=None) + + async def list_model_ids(self) -> frozenset[str]: + """Return model ids from the provider's OpenAI-compatible models endpoint.""" + payload = await self._client.models.list() + return extract_openai_model_ids(payload, provider_name=self._provider_name) + + @abstractmethod + def _build_request_body( + self, request: Any, thinking_enabled: bool | None = None + ) -> dict: + """Build request body. Must be implemented by subclasses.""" + + def _handle_extra_reasoning( + self, delta: Any, sse: SSEBuilder, *, thinking_enabled: bool + ) -> Iterator[str]: + """Hook for provider-specific reasoning (e.g. OpenRouter reasoning_details).""" + return iter(()) + + def _get_retry_request_body(self, error: Exception, body: dict) -> dict | None: + """Return a modified request body for one retry, or None.""" + return None + + async def _create_stream(self, body: dict) -> tuple[Any, dict]: + """Create a streaming chat completion, optionally retrying once.""" + try: + stream = await self._global_rate_limiter.execute_with_retry( + self._client.chat.completions.create, + **body, + stream=True, + max_retries=1, + ) + return stream, body + except Exception as error: + retry_body = self._get_retry_request_body(error, body) + if retry_body is None: + raise + + stream = await self._global_rate_limiter.execute_with_retry( + self._client.chat.completions.create, + **retry_body, + stream=True, + max_retries=1, + ) + return stream, retry_body + + def _emit_tool_arg_delta( + self, sse: SSEBuilder, tc_index: int, args: str + ) -> Iterator[str]: + """Emit one argument fragment for a started tool block (Task buffer or raw JSON).""" + if not args: + return + state = sse.blocks.tool_states.get(tc_index) + if state is None: + return + if state.name == "Task": + parsed = sse.blocks.buffer_task_args(tc_index, args) + if parsed is not None: + yield sse.emit_tool_delta(tc_index, json.dumps(parsed)) + return + yield sse.emit_tool_delta(tc_index, args) + + def _process_tool_call(self, tc: dict, sse: SSEBuilder) -> Iterator[str]: + """Process a single tool call delta and yield SSE events.""" + tc_index = tc.get("index", 0) + if tc_index < 0: + tc_index = len(sse.blocks.tool_states) + + fn_delta = tc.get("function", {}) + incoming_name = fn_delta.get("name") + arguments = fn_delta.get("arguments", "") or "" + + if tc.get("id") is not None: + sse.blocks.set_stream_tool_id(tc_index, tc.get("id")) + + if incoming_name is not None: + sse.blocks.register_tool_name(tc_index, incoming_name) + + state = sse.blocks.tool_states.get(tc_index) + resolved_id = (state.tool_id if state and state.tool_id else None) or tc.get( + "id" + ) + resolved_name = (state.name if state else "") or "" + + if not state or not state.started: + name_ok = bool((resolved_name or "").strip()) + if name_ok: + tool_id = str(resolved_id) if resolved_id else f"tool_{uuid.uuid4()}" + display_name = (resolved_name or "").strip() or "tool_call" + yield sse.start_tool_block(tc_index, tool_id, display_name) + state = sse.blocks.tool_states[tc_index] + if state.pre_start_args: + pre = state.pre_start_args + state.pre_start_args = "" + yield from self._emit_tool_arg_delta(sse, tc_index, pre) + + state = sse.blocks.tool_states.get(tc_index) + if not arguments: + return + if state is None or not state.started: + state = sse.blocks.ensure_tool_state(tc_index) + if not (resolved_name or "").strip(): + state.pre_start_args += arguments + return + + yield from self._emit_tool_arg_delta(sse, tc_index, arguments) + + def _flush_task_arg_buffers(self, sse: SSEBuilder) -> Iterator[str]: + """Emit buffered Task args as a single JSON delta (best-effort).""" + for tool_index, out in sse.blocks.flush_task_arg_buffers(): + yield sse.emit_tool_delta(tool_index, out) + + async def stream_response( + self, + request: Any, + input_tokens: int = 0, + *, + request_id: str | None = None, + thinking_enabled: bool | None = None, + ) -> AsyncIterator[str]: + """Stream response in Anthropic SSE format.""" + with logger.contextualize(request_id=request_id): + async for event in self._stream_response_impl( + request, input_tokens, request_id, thinking_enabled=thinking_enabled + ): + yield event + + async def _stream_response_impl( + self, + request: Any, + input_tokens: int, + request_id: str | None, + *, + thinking_enabled: bool | None, + ) -> AsyncIterator[str]: + """Shared streaming implementation.""" + tag = self._provider_name + message_id = f"msg_{uuid.uuid4()}" + sse = SSEBuilder( + message_id, + request.model, + input_tokens, + log_raw_events=self._config.log_raw_sse_events, + ) + + body = self._build_request_body(request, thinking_enabled=thinking_enabled) + thinking_enabled = self._is_thinking_enabled(request, thinking_enabled) + req_tag = f" request_id={request_id}" if request_id else "" + logger.info( + "{}_STREAM:{} model={} msgs={} tools={}", + tag, + req_tag, + body.get("model"), + len(body.get("messages", [])), + len(body.get("tools", [])), + ) + + think_parser = ThinkTagParser() + heuristic_parser = HeuristicToolParser() + finish_reason = None + usage_info = None + + async with self._global_rate_limiter.concurrency_slot(): + try: + yield sse.message_start() + stream, body = await self._create_stream(body) + async for chunk in stream: + if getattr(chunk, "usage", None): + usage_info = chunk.usage + + if not chunk.choices: + continue + + choice = chunk.choices[0] + delta = choice.delta + if delta is None: + continue + + if choice.finish_reason: + finish_reason = choice.finish_reason + logger.debug("{} finish_reason: {}", tag, finish_reason) + + # Handle reasoning_content (OpenAI extended format) + reasoning = getattr(delta, "reasoning_content", None) + if thinking_enabled and reasoning: + for event in sse.ensure_thinking_block(): + yield event + yield sse.emit_thinking_delta(reasoning) + + # Provider-specific extra reasoning (e.g. OpenRouter reasoning_details) + for event in self._handle_extra_reasoning( + delta, + sse, + thinking_enabled=thinking_enabled, + ): + yield event + + # Handle text content + if delta.content: + for part in think_parser.feed(delta.content): + if part.type == ContentType.THINKING: + if not thinking_enabled: + continue + for event in sse.ensure_thinking_block(): + yield event + yield sse.emit_thinking_delta(part.content) + else: + filtered_text, detected_tools = heuristic_parser.feed( + part.content + ) + + if filtered_text: + for event in sse.ensure_text_block(): + yield event + yield sse.emit_text_delta(filtered_text) + + for tool_use in detected_tools: + for event in _iter_heuristic_tool_use_sse( + sse, tool_use + ): + yield event + + # Handle native tool calls + if delta.tool_calls: + for event in sse.close_content_blocks(): + yield event + for tc in delta.tool_calls: + tc_info = { + "index": tc.index, + "id": tc.id, + "function": { + "name": tc.function.name, + "arguments": tc.function.arguments, + }, + } + for event in self._process_tool_call(tc_info, sse): + yield event + + except asyncio.CancelledError: + raise + except Exception as e: + self._log_stream_transport_error(tag, req_tag, e) + mapped_e = map_error(e, rate_limiter=self._global_rate_limiter) + + has_started_tool = any(s.started for s in sse.blocks.tool_states.values()) + has_content_blocks = ( + sse.blocks.text_index != -1 + or sse.blocks.thinking_index != -1 + or has_started_tool + or len(sse._accumulated_text_parts) > 0 + or len(sse._accumulated_reasoning_parts) > 0 + ) + + if has_content_blocks and isinstance(e, (httpx.RemoteProtocolError, httpx.ReadTimeout, asyncio.TimeoutError, httpx.ConnectError)): + logger.warning("{}_STREAM: Transient error mid-stream. Faking max_tokens to resume. {}", tag, e) + for event in sse.close_all_blocks(): + yield event + yield sse.message_delta("max_tokens", sse.estimate_output_tokens()) + yield sse.message_stop() + return + + base_message = user_visible_message_for_mapped_provider_error( + mapped_e, + provider_name=tag, + read_timeout_s=self._config.http_read_timeout, + ) + error_message = append_request_id(base_message, request_id) + logger.info( + "{}_STREAM: Emitting SSE error event for {}{}", + tag, + type(e).__name__, + req_tag, + ) + for event in sse.close_all_blocks(): + yield event + if sse.blocks.has_emitted_tool_block(): + # Avoid a second assistant text block after an emitted tool_use, which + # breaks OpenAI history replay (issue #206) when Claude Code stores it. + yield sse.emit_top_level_error(error_message) + else: + for event in sse.emit_error(error_message): + yield event + yield sse.message_delta("end_turn", 1) + yield sse.message_stop() + return + + # Flush remaining content + remaining = think_parser.flush() + if remaining: + if remaining.type == ContentType.THINKING: + if not thinking_enabled: + remaining = None + else: + for event in sse.ensure_thinking_block(): + yield event + yield sse.emit_thinking_delta(remaining.content) + if remaining and remaining.type == ContentType.TEXT: + for event in sse.ensure_text_block(): + yield event + yield sse.emit_text_delta(remaining.content) + + for tool_use in heuristic_parser.flush(): + for event in _iter_heuristic_tool_use_sse(sse, tool_use): + yield event + + has_started_tool = any(s.started for s in sse.blocks.tool_states.values()) + has_content_blocks = ( + sse.blocks.text_index != -1 + or sse.blocks.thinking_index != -1 + or has_started_tool + ) + if not has_content_blocks: + for event in sse.ensure_text_block(): + yield event + yield sse.emit_text_delta(" ") + elif ( + not has_started_tool + and not sse.accumulated_text.strip() + and sse.accumulated_reasoning.strip() + ): + # Some OpenAI-compatible models (e.g. NIM reasoning templates) stream only + # ``reasoning_content`` with no ``content``; emit a minimal text block so + # clients and smoke ``text_content()`` see a completed assistant message. + for event in sse.ensure_text_block(): + yield event + yield sse.emit_text_delta(" ") + + for event in self._flush_task_arg_buffers(sse): + yield event + + for event in sse.close_all_blocks(): + yield event + + completion = ( + getattr(usage_info, "completion_tokens", None) + if usage_info is not None + else None + ) + if isinstance(completion, int): + output_tokens = completion + else: + output_tokens = sse.estimate_output_tokens() + if usage_info and hasattr(usage_info, "prompt_tokens"): + provider_input = usage_info.prompt_tokens + if isinstance(provider_input, int): + logger.debug( + "TOKEN_ESTIMATE: our={} provider={} diff={:+d}", + input_tokens, + provider_input, + provider_input - input_tokens, + ) + yield sse.message_delta(map_stop_reason(finish_reason), output_tokens) + yield sse.message_stop() diff --git a/providers/rate_limit.py b/providers/rate_limit.py new file mode 100644 index 0000000000000000000000000000000000000000..1c5206ebca0faf7e4a41e3b20d6659fbb0d169d8 --- /dev/null +++ b/providers/rate_limit.py @@ -0,0 +1,267 @@ +"""Global rate limiter for API requests.""" + +import asyncio +import random +import time +from collections.abc import AsyncIterator, Callable +from contextlib import asynccontextmanager +from typing import Any, ClassVar, TypeVar + +import httpx +import openai +from loguru import logger + +from core.rate_limit import StrictSlidingWindowLimiter + +T = TypeVar("T") + + +class GlobalRateLimiter: + """ + Global singleton rate limiter that blocks all requests + when a rate limit error is encountered (reactive) and + throttles requests (proactive) using a strict rolling window. + + Optionally enforces a max_concurrency cap: at most N provider streams + may be open simultaneously, independent of the sliding window. + + Proactive limits - throttles requests to stay within API limits. + Reactive limits - pauses all requests when a 429 is hit. + Concurrency limit - caps simultaneously open streams. + """ + + _instance: ClassVar[GlobalRateLimiter | None] = None + _scoped_instances: ClassVar[dict[str, GlobalRateLimiter]] = {} + + def __init__( + self, + rate_limit: int = 40, + rate_window: float = 60.0, + max_concurrency: int = 5, + ): + # Prevent re-initialization on singleton reuse + if hasattr(self, "_initialized"): + return + + if rate_limit <= 0: + raise ValueError("rate_limit must be > 0") + if rate_window <= 0: + raise ValueError("rate_window must be > 0") + if max_concurrency <= 0: + raise ValueError("max_concurrency must be > 0") + + self._rate_limit = rate_limit + self._rate_window = float(rate_window) + self._max_concurrency = max_concurrency + self._proactive_limiter = StrictSlidingWindowLimiter( + self._rate_limit, self._rate_window + ) + self._blocked_until: float = 0 + self._concurrency_sem = asyncio.Semaphore(max_concurrency) + self._initialized = True + + logger.info( + f"GlobalRateLimiter (Provider) initialized ({rate_limit} req / {rate_window}s, max_concurrency={max_concurrency})" + ) + + @classmethod + def get_instance( + cls, + rate_limit: int | None = None, + rate_window: float | None = None, + max_concurrency: int = 5, + ) -> GlobalRateLimiter: + """Get or create the singleton instance. + + Args: + rate_limit: Requests per window (only used on first creation) + rate_window: Window in seconds (only used on first creation) + max_concurrency: Max simultaneous open streams (only used on first creation) + """ + if cls._instance is None: + cls._instance = cls( + rate_limit=rate_limit or 40, + rate_window=rate_window or 60.0, + max_concurrency=max_concurrency, + ) + return cls._instance + + @classmethod + def get_scoped_instance( + cls, + scope: str, + *, + rate_limit: int | None = None, + rate_window: float | None = None, + max_concurrency: int = 5, + ) -> GlobalRateLimiter: + """Get or create a provider-scoped limiter instance.""" + if not scope: + raise ValueError("scope must be non-empty") + desired_rate_limit = rate_limit or 40 + desired_rate_window = float(rate_window or 60.0) + existing = cls._scoped_instances.get(scope) + if existing and existing.matches_config( + desired_rate_limit, desired_rate_window, max_concurrency + ): + return existing + if existing: + logger.info( + "Rebuilding provider rate limiter for updated scope '{}'", scope + ) + cls._scoped_instances[scope] = cls( + rate_limit=desired_rate_limit, + rate_window=desired_rate_window, + max_concurrency=max_concurrency, + ) + return cls._scoped_instances[scope] + + @classmethod + def reset_instance(cls) -> None: + """Reset singleton (for testing).""" + cls._instance = None + cls._scoped_instances = {} + + async def wait_if_blocked(self) -> bool: + """ + Wait if currently rate limited or throttle to meet quota. + + Returns: + True if was reactively blocked and waited, False otherwise. + """ + # 1. Reactive check: Wait if someone hit a 429 + waited_reactively = False + now = time.monotonic() + if now < self._blocked_until: + wait_time = self._blocked_until - now + logger.warning( + f"Global provider rate limit active (reactive), waiting {wait_time:.1f}s..." + ) + await asyncio.sleep(wait_time) + waited_reactively = True + + # 2. Proactive check: strict rolling window (no bursts beyond N in last W seconds) + await self._acquire_proactive_slot() + return waited_reactively + + async def _acquire_proactive_slot(self) -> None: + """ + Acquire a proactive slot enforcing a strict rolling window. + + Guarantees: at most `self._rate_limit` acquisitions in any interval of length + `self._rate_window` (seconds). + """ + await self._proactive_limiter.acquire() + + def set_blocked(self, seconds: float = 60) -> None: + """ + Set global block for specified seconds (reactive). + + Args: + seconds: How long to block (default 60s) + """ + self._blocked_until = time.monotonic() + seconds + logger.warning(f"Global provider rate limit set for {seconds:.1f}s (reactive)") + + def is_blocked(self) -> bool: + """Check if currently reactively blocked.""" + return time.monotonic() < self._blocked_until + + def matches_config( + self, rate_limit: int, rate_window: float, max_concurrency: int + ) -> bool: + """Return whether this limiter matches the requested runtime config.""" + return ( + self._rate_limit == rate_limit + and self._rate_window == float(rate_window) + and self._max_concurrency == max_concurrency + ) + + def remaining_wait(self) -> float: + """Get remaining reactive wait time in seconds.""" + return max(0.0, self._blocked_until - time.monotonic()) + + @asynccontextmanager + async def concurrency_slot(self) -> AsyncIterator[None]: + """Async context manager that holds one concurrency slot for a stream. + + Blocks until a slot is available (controlled by max_concurrency). + """ + await self._concurrency_sem.acquire() + try: + yield + finally: + self._concurrency_sem.release() + + async def execute_with_retry( + self, + fn: Callable[..., Any], + *args: Any, + max_retries: int = 3, + base_delay: float = 1.0, + max_delay: float = 60.0, + jitter: float = 0.5, + **kwargs: Any, + ) -> Any: + """Execute an async callable with rate limiting and retry on 429. + + Waits for the proactive limiter before each attempt. On 429, applies + exponential backoff with jitter before retrying. + + Args: + fn: Async callable to execute. + max_retries: Maximum number of retry attempts after the first failure. + base_delay: Base delay in seconds for exponential backoff. + max_delay: Maximum delay cap in seconds. + jitter: Maximum random jitter in seconds added to each delay. + + Returns: + The result of the callable. + + Raises: + The last exception if all retries are exhausted. + """ + last_exc: Exception | None = None + + for attempt in range(1 + max_retries): + await self.wait_if_blocked() + + try: + return await fn(*args, **kwargs) + except openai.RateLimitError as e: + last_exc = e + if attempt >= max_retries: + logger.warning( + f"Rate limit retry exhausted after {max_retries} retries" + ) + break + + delay = min(base_delay * (2**attempt), max_delay) + delay += random.uniform(0, jitter) + logger.warning( + f"Rate limited (429), attempt {attempt + 1}/{max_retries + 1}. " + f"Retrying in {delay:.1f}s..." + ) + self.set_blocked(delay) + await asyncio.sleep(delay) + except httpx.HTTPStatusError as e: + if e.response.status_code != 429: + raise + last_exc = e + if attempt >= max_retries: + logger.warning( + f"HTTP 429 retry exhausted after {max_retries} retries" + ) + break + + delay = min(base_delay * (2**attempt), max_delay) + delay += random.uniform(0, jitter) + logger.warning( + f"HTTP 429 from upstream, attempt {attempt + 1}/{max_retries + 1}. " + f"Retrying in {delay:.1f}s..." + ) + self.set_blocked(delay) + await asyncio.sleep(delay) + + assert last_exc is not None + raise last_exc diff --git a/providers/registry.py b/providers/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..d2bc98ba95fc350d78529530db96265fb7e99d6d --- /dev/null +++ b/providers/registry.py @@ -0,0 +1,422 @@ +"""Provider descriptors, factory, and runtime registry.""" + +from __future__ import annotations + +import asyncio +from collections import defaultdict +from collections.abc import Callable, Iterable, MutableMapping +from contextlib import suppress + +import httpx +from loguru import logger + +from config.provider_catalog import ( + PROVIDER_CATALOG, + SUPPORTED_PROVIDER_IDS, + ProviderDescriptor, +) +from config.settings import ConfiguredChatModelRef, Settings +from providers.base import BaseProvider, ProviderConfig +from providers.exceptions import ( + AuthenticationError, + ModelListResponseError, + ProviderError, + ServiceUnavailableError, + UnknownProviderTypeError, +) +from providers.model_listing import ProviderModelInfo, model_infos_from_ids + +ProviderFactory = Callable[[ProviderConfig, Settings], BaseProvider] + +# Backwards-compatible name for the catalog (single source: ``config.provider_catalog``). +PROVIDER_DESCRIPTORS: dict[str, ProviderDescriptor] = PROVIDER_CATALOG + + +def _create_nvidia_nim(config: ProviderConfig, settings: Settings) -> BaseProvider: + from providers.nvidia_nim import NvidiaNimProvider + + return NvidiaNimProvider(config, nim_settings=settings.nim, settings=settings) + + +def _create_zen(config: ProviderConfig, settings: Settings) -> BaseProvider: + from providers.zen import ZenProvider + + return ZenProvider(config, settings=settings) + + +PROVIDER_FACTORIES: dict[str, ProviderFactory] = { + "nvidia_nim": _create_nvidia_nim, + "zen": _create_zen, +} + +if set(PROVIDER_DESCRIPTORS) != set(SUPPORTED_PROVIDER_IDS) or set( + PROVIDER_FACTORIES +) != set(SUPPORTED_PROVIDER_IDS): + raise AssertionError( + "PROVIDER_DESCRIPTORS, PROVIDER_FACTORIES, and SUPPORTED_PROVIDER_IDS are out of sync: " + f"descriptors={set(PROVIDER_DESCRIPTORS)!r} factories={set(PROVIDER_FACTORIES)!r} " + f"ids={set(SUPPORTED_PROVIDER_IDS)!r}" + ) + + +def _string_attr(settings: Settings, attr_name: str | None, default: str = "") -> str: + if attr_name is None: + return default + value = getattr(settings, attr_name, default) + return value if isinstance(value, str) else default + + +def _credential_for(descriptor: ProviderDescriptor, settings: Settings) -> str: + if descriptor.static_credential is not None: + return descriptor.static_credential + if descriptor.credential_attr: + return _string_attr(settings, descriptor.credential_attr) + return "" + + +def _require_credential(descriptor: ProviderDescriptor, credential: str) -> None: + if descriptor.credential_env is None: + return + if credential and credential.strip(): + return + message = f"{descriptor.credential_env} is not set. Add it to your .env file." + if descriptor.credential_url: + message = f"{message} Get a key at {descriptor.credential_url}" + raise AuthenticationError(message) + + +def build_provider_config( + descriptor: ProviderDescriptor, settings: Settings +) -> ProviderConfig: + credential = _credential_for(descriptor, settings) + _require_credential(descriptor, credential) + base_url = _string_attr( + settings, descriptor.base_url_attr, descriptor.default_base_url or "" + ) + proxy = _string_attr(settings, descriptor.proxy_attr) + return ProviderConfig( + api_key=credential, + base_url=base_url or descriptor.default_base_url, + rate_limit=settings.provider_rate_limit, + rate_window=settings.provider_rate_window, + max_concurrency=settings.provider_max_concurrency, + http_read_timeout=settings.http_read_timeout, + http_write_timeout=settings.http_write_timeout, + http_connect_timeout=settings.http_connect_timeout, + enable_thinking=settings.enable_model_thinking, + proxy=proxy, + log_raw_sse_events=settings.log_raw_sse_events, + log_api_error_tracebacks=settings.log_api_error_tracebacks, + ) + + +def create_provider(provider_id: str, settings: Settings) -> BaseProvider: + descriptor = PROVIDER_DESCRIPTORS.get(provider_id) + if descriptor is None: + supported = "', '".join(PROVIDER_DESCRIPTORS) + raise UnknownProviderTypeError( + f"Unknown provider_type: '{provider_id}'. Supported: '{supported}'" + ) + + config = build_provider_config(descriptor, settings) + factory = PROVIDER_FACTORIES.get(provider_id) + if factory is None: + raise AssertionError(f"Unhandled provider descriptor: {provider_id}") + return factory(config, settings) + + +def _format_provider_query_failures( + refs: list[ConfiguredChatModelRef], + exc: BaseException, + settings: Settings, +) -> list[str]: + reason = _provider_query_failure_reason(exc, settings) + return [_format_model_validation_failure(ref, reason) for ref in refs] + + +def _format_missing_model_failure(ref: ConfiguredChatModelRef) -> str: + return _format_model_validation_failure(ref, "missing model") + + +def _format_model_validation_failure(ref: ConfiguredChatModelRef, problem: str) -> str: + return ( + f"sources={','.join(ref.sources)} provider={ref.provider_id} " + f"model={ref.model_id} problem={problem}" + ) + + +def _provider_query_failure_reason( + exc: BaseException, + settings: Settings, +) -> str: + if isinstance(exc, ModelListResponseError): + return f"malformed model-list response: {exc.message}" + if isinstance(exc, httpx.HTTPStatusError): + return f"query failure: HTTP {exc.response.status_code}" + if isinstance(exc, AuthenticationError): + return f"query failure: {exc.message}" + if isinstance(exc, ProviderError) and settings.log_api_error_tracebacks: + return f"query failure: {exc.message}" + return f"query failure: {type(exc).__name__}" + + +def _referenced_provider_ids(settings: Settings) -> frozenset[str]: + return frozenset(ref.provider_id for ref in settings.configured_chat_model_refs()) + + +def _model_list_provider_ids_for_settings(settings: Settings) -> tuple[str, ...]: + """Return providers worth discovering for this process configuration.""" + referenced_provider_ids = _referenced_provider_ids(settings) + provider_ids: list[str] = [] + for provider_id, descriptor in PROVIDER_DESCRIPTORS.items(): + if descriptor.static_credential is not None: + if provider_id in referenced_provider_ids: + provider_ids.append(provider_id) + continue + if ( + descriptor.credential_env is not None + and _credential_for(descriptor, settings).strip() + ): + provider_ids.append(provider_id) + return tuple(provider_ids) + + +def _log_model_discovery_failure( + provider_id: str, exc: BaseException, settings: Settings +) -> None: + logger.warning( + "Provider model discovery skipped: provider={} reason={}", + provider_id, + _provider_query_failure_reason(exc, settings), + ) + + +class ProviderRegistry: + """Cache and clean up provider instances by provider id.""" + + def __init__(self, providers: MutableMapping[str, BaseProvider] | None = None): + self._providers = providers if providers is not None else {} + self._model_ids_by_provider: dict[str, frozenset[str]] = {} + self._model_infos_by_provider: dict[str, dict[str, ProviderModelInfo]] = {} + self._model_list_refresh_task: asyncio.Task[None] | None = None + + def is_cached(self, provider_id: str) -> bool: + """Return whether a provider for this id is already in the cache.""" + return provider_id in self._providers + + def get(self, provider_id: str, settings: Settings) -> BaseProvider: + if provider_id not in self._providers: + self._providers[provider_id] = create_provider(provider_id, settings) + return self._providers[provider_id] + + def cache_model_ids(self, provider_id: str, model_ids: Iterable[str]) -> None: + """Store a provider model-list result for later instant API responses.""" + self.cache_model_infos(provider_id, model_infos_from_ids(model_ids)) + + def cache_model_infos( + self, provider_id: str, model_infos: Iterable[ProviderModelInfo] + ) -> None: + """Store provider model metadata for later instant API responses.""" + clean_infos = { + info.model_id: info for info in model_infos if info.model_id.strip() + } + self._model_infos_by_provider[provider_id] = clean_infos + self._model_ids_by_provider[provider_id] = frozenset(clean_infos) + + def cached_model_ids(self) -> dict[str, frozenset[str]]: + """Return a copy of cached raw provider model ids.""" + return dict(self._model_ids_by_provider) + + def cached_model_supports_thinking( + self, provider_id: str, model_id: str + ) -> bool | None: + """Return cached thinking support when a provider exposes it.""" + info = self._model_infos_by_provider.get(provider_id, {}).get(model_id) + if info is None: + return None + return info.supports_thinking + + def cached_prefixed_model_refs(self) -> tuple[str, ...]: + """Return cached provider models in user-selectable ``provider/model`` form.""" + return tuple(info.model_id for info in self.cached_prefixed_model_infos()) + + def cached_prefixed_model_infos(self) -> tuple[ProviderModelInfo, ...]: + """Return cached provider models with user-selectable prefixed ids.""" + infos: list[ProviderModelInfo] = [] + for provider_id in SUPPORTED_PROVIDER_IDS: + provider_infos = self._model_infos_by_provider.get(provider_id, {}) + infos.extend( + ProviderModelInfo( + model_id=f"{provider_id}/{info.model_id}", + supports_thinking=info.supports_thinking, + ) + for info in sorted( + provider_infos.values(), key=lambda item: item.model_id + ) + ) + return tuple(infos) + + async def refresh_model_list_cache( + self, settings: Settings, *, only_missing: bool = False + ) -> None: + """Best-effort refresh of model lists for providers usable in this process.""" + provider_ids = _model_list_provider_ids_for_settings(settings) + if only_missing: + provider_ids = tuple( + provider_id + for provider_id in provider_ids + if provider_id not in self._model_ids_by_provider + ) + await self._refresh_model_ids(settings, provider_ids) + + def start_model_list_refresh(self, settings: Settings) -> None: + """Start a non-blocking cache warmup for missing eligible provider lists.""" + if ( + self._model_list_refresh_task is not None + and not self._model_list_refresh_task.done() + ): + return + + provider_ids = tuple( + provider_id + for provider_id in _model_list_provider_ids_for_settings(settings) + if provider_id not in self._model_ids_by_provider + ) + if not provider_ids: + logger.info( + "Provider model discovery cache already warm: providers={}", + len(self._model_ids_by_provider), + ) + return + + self._model_list_refresh_task = asyncio.create_task( + self._run_model_list_refresh(settings, provider_ids) + ) + + async def _run_model_list_refresh( + self, settings: Settings, provider_ids: tuple[str, ...] + ) -> None: + try: + await self._refresh_model_ids(settings, provider_ids) + except asyncio.CancelledError: + raise + except Exception as exc: + logger.warning( + "Provider model discovery task failed: exc_type={}", + type(exc).__name__, + ) + + async def _refresh_model_ids( + self, settings: Settings, provider_ids: tuple[str, ...] + ) -> None: + tasks: dict[str, asyncio.Task[frozenset[ProviderModelInfo]]] = {} + for provider_id in provider_ids: + try: + provider = self.get(provider_id, settings) + except Exception as exc: + _log_model_discovery_failure(provider_id, exc, settings) + continue + tasks[provider_id] = asyncio.create_task(provider.list_model_infos()) + + if not tasks: + return + + logger.info("Starting model discovery for providers: {}", ", ".join(tasks.keys())) + results = await asyncio.gather(*tasks.values(), return_exceptions=True) + logger.info("Model discovery finished for all providers.") + for (provider_id, _task), result in zip(tasks.items(), results, strict=True): + if isinstance(result, BaseException): + if isinstance(result, asyncio.CancelledError): + raise result + _log_model_discovery_failure(provider_id, result, settings) + continue + self.cache_model_infos(provider_id, result) + logger.info( + "Provider model discovery cached: provider={} models={}", + provider_id, + len(result), + ) + + async def validate_configured_models(self, settings: Settings) -> None: + """Fail fast unless every configured chat model exists upstream.""" + refs = settings.configured_chat_model_refs() + refs_by_provider: dict[str, list[ConfiguredChatModelRef]] = defaultdict(list) + for ref in refs: + refs_by_provider[ref.provider_id].append(ref) + + failures: list[str] = [] + tasks: dict[str, asyncio.Task[frozenset[ProviderModelInfo]]] = {} + for provider_id, provider_refs in refs_by_provider.items(): + try: + provider = self.get(provider_id, settings) + except Exception as exc: + failures.extend( + _format_provider_query_failures(provider_refs, exc, settings) + ) + continue + tasks[provider_id] = asyncio.create_task(provider.list_model_infos()) + + if tasks: + results = await asyncio.gather(*tasks.values(), return_exceptions=True) + for (provider_id, _task), result in zip( + tasks.items(), results, strict=True + ): + provider_refs = refs_by_provider[provider_id] + if isinstance(result, BaseException): + if isinstance(result, asyncio.CancelledError): + raise result + failures.extend( + _format_provider_query_failures(provider_refs, result, settings) + ) + continue + self.cache_model_infos(provider_id, result) + model_ids = self._model_ids_by_provider[provider_id] + failures.extend( + _format_missing_model_failure(ref) + for ref in provider_refs + if ref.model_id not in model_ids + ) + + if failures: + message = "Configured model validation failed:\n" + "\n".join( + f"- {failure}" for failure in failures + ) + raise ServiceUnavailableError(message) + + logger.info( + "Configured provider models validated: models={} providers={}", + len(refs), + len(refs_by_provider), + ) + + async def cleanup(self) -> None: + """Call ``cleanup`` on every cached provider, then clear the cache. + + Attempts all providers even if one fails. A single failure is re-raised + as-is; multiple failures are wrapped in :exc:`ExceptionGroup`. + """ + if ( + self._model_list_refresh_task is not None + and not self._model_list_refresh_task.done() + ): + self._model_list_refresh_task.cancel() + with suppress(asyncio.CancelledError): + await self._model_list_refresh_task + + items = list(self._providers.items()) + errors: list[Exception] = [] + try: + for _pid, provider in items: + try: + await provider.cleanup() + except Exception as e: + errors.append(e) + finally: + self._providers.clear() + self._model_ids_by_provider.clear() + self._model_infos_by_provider.clear() + if len(errors) == 1: + raise errors[0] + if len(errors) > 1: + msg = "One or more provider cleanups failed" + raise ExceptionGroup(msg, errors) diff --git a/providers/zen/__init__.py b/providers/zen/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0e0c3cae9281f43de43d712f6daa138e3be0b3cb --- /dev/null +++ b/providers/zen/__init__.py @@ -0,0 +1,7 @@ +"""Zen/OpenCode provider package.""" + +from providers.defaults import ZEN_DEFAULT_BASE + +from .client import ZenProvider + +__all__ = ["ZEN_DEFAULT_BASE", "ZenProvider"] \ No newline at end of file diff --git a/providers/zen/__pycache__/__init__.cpython-314.pyc b/providers/zen/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f358e43a2d3a09bbbf2956e41e2f9f3e4e91d016 Binary files /dev/null and b/providers/zen/__pycache__/__init__.cpython-314.pyc differ diff --git a/providers/zen/__pycache__/client.cpython-314.pyc b/providers/zen/__pycache__/client.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b809ee1bd90df3e2b1f883e7627759d235d41b36 Binary files /dev/null and b/providers/zen/__pycache__/client.cpython-314.pyc differ diff --git a/providers/zen/client.py b/providers/zen/client.py new file mode 100644 index 0000000000000000000000000000000000000000..afb1fff55003dddc13d2350de1a89b626f6a00b9 --- /dev/null +++ b/providers/zen/client.py @@ -0,0 +1,46 @@ +"""Zen/OpenCode provider implementation.""" + +from typing import Any + +from core.anthropic import ReasoningReplayMode, build_base_request_body +from config.settings import Settings +from providers.base import ProviderConfig +from providers.defaults import ZEN_DEFAULT_BASE +from providers.openai_compat import OpenAIChatTransport + + +class ZenProvider(OpenAIChatTransport): + """Zen/OpenCode provider using OpenAI-compatible API.""" + + def __init__( + self, + config: ProviderConfig, + *, + settings: Settings, + ): + # Zen uses /v1/chat/completions so append /v1 to base URL + base_url = (config.base_url or ZEN_DEFAULT_BASE).rstrip("/") + if not base_url.endswith("/v1"): + base_url = base_url + "/v1" + super().__init__( + config, + provider_name="Zen", + base_url=base_url, + api_key=config.api_key, + ) + self._settings = settings + + def _build_request_body( + self, request: Any, thinking_enabled: bool | None = None + ) -> dict: + """Build request body for Zen API.""" + thinking = self._is_thinking_enabled(request, thinking_enabled) + reasoning_replay = ( + ReasoningReplayMode.REASONING_CONTENT + if thinking + else ReasoningReplayMode.DISABLED + ) + return build_base_request_body( + request, + reasoning_replay=reasoning_replay, + ) \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..be4a5d175900c501b2a8f84797c29a4b454c13e0 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,125 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "free-claude-code" +version = "2.0.0" +description = "Middleware between Claude Code CLI (Anthropic API) and NVIDIA NIM - Free Claude Code through NVIDIA NIM models" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "fastapi[standard]>=0.136.1", + "uvicorn>=0.46.0", + "httpx[http2,socks]>=0.28.1", + "markdown-it-py>=3.0.0", + "pydantic>=2.13.3", + "python-dotenv>=1.2.2", + "tiktoken>=0.7.0", + "python-telegram-bot>=22.7", + "discord.py>=2.7.1", + "pydantic-settings>=2.14.0", + "openai>=2.32.0", + "loguru>=0.7.0", + "aiohttp>=3.13.4", + "sqlalchemy>=2.0.49", + "asyncpg>=0.31.0", + "redis>=7.4.0", +] + +[project.scripts] +free-claude-code = "cli.entrypoints:serve" +fcc-init = "cli.entrypoints:init" + +[project.optional-dependencies] +voice = [ + "grpcio>=1.80.0", + "grpcio-tools>=1.80.0", + "nvidia-riva-client>=2.25.1", +] +voice_local = [ + "torch>=2.11.0", + "transformers>=5.6.2", + "accelerate>=1.13.0", + "librosa>=0.10.0", +] + +[tool.hatch.build.targets.wheel] +packages = ["api", "cli", "config", "core", "messaging", "providers"] + +[tool.hatch.build.targets.wheel.force-include] +".env.example" = "cli/env.example" + +[tool.uv.sources] +torch = { index = "pytorch-cu130" } + +[[tool.uv.index]] +name = "pytorch-cu130" +url = "https://download.pytorch.org/whl/cu130" +explicit = true + +[dependency-groups] +dev = [ + "pytest>=9.0.3", + "pytest-asyncio>=1.3.0", + "pytest-cov>=7.1.0", + "ty>=0.0.32", + "ruff>=0.15.12", + "pytest-xdist>=3.8.0", +] + +[tool.ruff] +target-version = "py314" +line-length = 88 + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # Pyflakes (undefined names, unused imports) + "I", # isort (import ordering) + "UP", # pyupgrade (modernise syntax for target Python version) + "B", # flake8-bugbear (common bugs and anti-patterns) + "C4", # flake8-comprehensions (idiomatic comprehensions) + "SIM", # flake8-simplify (simplifiable code patterns) + "PERF", # Perflint (performance anti-patterns) + "RUF", # Ruff-specific rules +] +ignore = [ + "E501", # line too long — enforced by the formatter instead + "B008", # FastAPI Depends() in argument defaults is intentional + "RUF006", # fire-and-forget tasks intentionally not awaited +] + +[tool.ruff.lint.isort] +known-first-party = ["api", "cli", "config", "core", "messaging", "providers", "smoke"] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +line-ending = "auto" +skip-magic-trailing-comma = false + +[tool.pytest.ini_options] +pythonpath = ["."] +addopts = "-n auto" +testpaths = ["tests"] +markers = [ + "live: opt-in local smoke tests that can touch real services", + "interactive: smoke tests requiring manual user interaction", + "provider: live provider checks", + "messaging: live messaging platform checks", + "cli: CLI integration checks", + "clients: client compatibility checks", + "voice: voice transcription checks", + "contract: deterministic feature contract checks", + "smoke_target(name): route a smoke test behind FCC_SMOKE_TARGETS", +] + +[tool.ty.environment] +python-version = "3.14" + +[tool.ty.analysis] +# Optional voice_local extra: torch, transformers, librosa for local whisper transcription +# Optional voice extra: nvidia-riva-client for nvidia_nim transcription provider +allowed-unresolved-imports = ["torch", "transformers", "librosa", "riva.client"] diff --git a/server.py b/server.py new file mode 100644 index 0000000000000000000000000000000000000000..ae53cfb9a3c3d01b898f16a300ade22ad74b931e --- /dev/null +++ b/server.py @@ -0,0 +1,32 @@ +""" +Claude Code Proxy - Entry Point + +Minimal entry point that builds the ASGI app via :func:`api.app.create_app`. +Run with: uv run uvicorn server:app --host 0.0.0.0 --port 8082 --timeout-graceful-shutdown 5 +""" + +from api.app import create_app, create_asgi_app + +app = create_asgi_app() + +__all__ = ["app", "create_app"] + +if __name__ == "__main__": + import uvicorn + + from cli.process_registry import kill_all_best_effort + from config.settings import get_settings + + settings = get_settings() + try: + # timeout_graceful_shutdown ensures uvicorn doesn't hang on task cleanup. + uvicorn.run( + app, + host=settings.host, + port=settings.port, + log_level="debug", + timeout_graceful_shutdown=5, + ) + finally: + # Safety net: cleanup subprocesses if lifespan shutdown doesn't fully run. + kill_all_best_effort() diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000000000000000000000000000000000000..688297ddc282ff128a1bd81a070b9f49fd13d7a7 --- /dev/null +++ b/uv.lock @@ -0,0 +1,3439 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" +resolution-markers = [ + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version < '3.13'", +] + +[[package]] +name = "accelerate" +version = "1.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "psutil" }, + { name = "pyyaml" }, + { name = "safetensors" }, + { name = "torch" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ca/14/787e5498cd062640f0f3d92ef4ae4063174f76f9afd29d13fc52a319daae/accelerate-1.13.0.tar.gz", hash = "sha256:d631b4e0f5b3de4aff2d7e9e6857d164810dfc3237d54d017f075122d057b236", size = 402835, upload-time = "2026-03-04T19:34:12.359Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/46/02ac5e262d4af18054b3e922b2baedbb2a03289ee792162de60a865defc5/accelerate-1.13.0-py3-none-any.whl", hash = "sha256:cf1a3efb96c18f7b152eb0fa7490f3710b19c3f395699358f08decca2b8b62e0", size = 383744, upload-time = "2026-03-04T19:34:10.313Z" }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/9a/152096d4808df8e4268befa55fba462f440f14beab85e8ad9bf990516918/aiohttp-3.13.5.tar.gz", hash = "sha256:9d98cc980ecc96be6eb4c1994ce35d28d8b1f5e5208a23b421187d1209dbb7d1", size = 7858271, upload-time = "2026-03-31T22:01:03.343Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/6f/353954c29e7dcce7cf00280a02c75f30e133c00793c7a2ed3776d7b2f426/aiohttp-3.13.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:023ecba036ddd840b0b19bf195bfae970083fd7024ce1ac22e9bba90464620e9", size = 748876, upload-time = "2026-03-31T21:57:36.319Z" }, + { url = "https://files.pythonhosted.org/packages/f5/1b/428a7c64687b3b2e9cd293186695affc0e1e54a445d0361743b231f11066/aiohttp-3.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15c933ad7920b7d9a20de151efcd05a6e38302cbf0e10c9b2acb9a42210a2416", size = 499557, upload-time = "2026-03-31T21:57:38.236Z" }, + { url = "https://files.pythonhosted.org/packages/29/47/7be41556bfbb6917069d6a6634bb7dd5e163ba445b783a90d40f5ac7e3a7/aiohttp-3.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab2899f9fa2f9f741896ebb6fa07c4c883bfa5c7f2ddd8cf2aafa86fa981b2d2", size = 500258, upload-time = "2026-03-31T21:57:39.923Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/c9ecc5828cb0b3695856c07c0a6817a99d51e2473400f705275a2b3d9239/aiohttp-3.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60eaa2d440cd4707696b52e40ed3e2b0f73f65be07fd0ef23b6b539c9c0b0b4", size = 1749199, upload-time = "2026-03-31T21:57:41.938Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d3/3c6d610e66b495657622edb6ae7c7fd31b2e9086b4ec50b47897ad6042a9/aiohttp-3.13.5-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55b3bdd3292283295774ab585160c4004f4f2f203946997f49aac032c84649e9", size = 1721013, upload-time = "2026-03-31T21:57:43.904Z" }, + { url = "https://files.pythonhosted.org/packages/49/a0/24409c12217456df0bae7babe3b014e460b0b38a8e60753d6cb339f6556d/aiohttp-3.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2b2355dc094e5f7d45a7bb262fe7207aa0460b37a0d87027dcf21b5d890e7d5", size = 1781501, upload-time = "2026-03-31T21:57:46.285Z" }, + { url = "https://files.pythonhosted.org/packages/98/9d/b65ec649adc5bccc008b0957a9a9c691070aeac4e41cea18559fef49958b/aiohttp-3.13.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b38765950832f7d728297689ad78f5f2cf79ff82487131c4d26fe6ceecdc5f8e", size = 1878981, upload-time = "2026-03-31T21:57:48.734Z" }, + { url = "https://files.pythonhosted.org/packages/57/d8/8d44036d7eb7b6a8ec4c5494ea0c8c8b94fbc0ed3991c1a7adf230df03bf/aiohttp-3.13.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b18f31b80d5a33661e08c89e202edabf1986e9b49c42b4504371daeaa11b47c1", size = 1767934, upload-time = "2026-03-31T21:57:51.171Z" }, + { url = "https://files.pythonhosted.org/packages/31/04/d3f8211f273356f158e3464e9e45484d3fb8c4ce5eb2f6fe9405c3273983/aiohttp-3.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:33add2463dde55c4f2d9635c6ab33ce154e5ecf322bd26d09af95c5f81cfa286", size = 1566671, upload-time = "2026-03-31T21:57:53.326Z" }, + { url = "https://files.pythonhosted.org/packages/41/db/073e4ebe00b78e2dfcacff734291651729a62953b48933d765dc513bf798/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:327cc432fdf1356fb4fbc6fe833ad4e9f6aacb71a8acaa5f1855e4b25910e4a9", size = 1705219, upload-time = "2026-03-31T21:57:55.385Z" }, + { url = "https://files.pythonhosted.org/packages/48/45/7dfba71a2f9fd97b15c95c06819de7eb38113d2cdb6319669195a7d64270/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7c35b0bf0b48a70b4cb4fc5d7bed9b932532728e124874355de1a0af8ec4bc88", size = 1743049, upload-time = "2026-03-31T21:57:57.341Z" }, + { url = "https://files.pythonhosted.org/packages/18/71/901db0061e0f717d226386a7f471bb59b19566f2cae5f0d93874b017271f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:df23d57718f24badef8656c49743e11a89fd6f5358fa8a7b96e728fda2abf7d3", size = 1749557, upload-time = "2026-03-31T21:57:59.626Z" }, + { url = "https://files.pythonhosted.org/packages/08/d5/41eebd16066e59cd43728fe74bce953d7402f2b4ddfdfef2c0e9f17ca274/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:02e048037a6501a5ec1f6fc9736135aec6eb8a004ce48838cb951c515f32c80b", size = 1558931, upload-time = "2026-03-31T21:58:01.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/e6/4a799798bf05740e66c3a1161079bda7a3dd8e22ca392481d7a7f9af82a6/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31cebae8b26f8a615d2b546fee45d5ffb76852ae6450e2a03f42c9102260d6fe", size = 1774125, upload-time = "2026-03-31T21:58:04.007Z" }, + { url = "https://files.pythonhosted.org/packages/84/63/7749337c90f92bc2cb18f9560d67aa6258c7060d1397d21529b8004fcf6f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:888e78eb5ca55a615d285c3c09a7a91b42e9dd6fc699b166ebd5dee87c9ccf14", size = 1732427, upload-time = "2026-03-31T21:58:06.337Z" }, + { url = "https://files.pythonhosted.org/packages/98/de/cf2f44ff98d307e72fb97d5f5bbae3bfcb442f0ea9790c0bf5c5c2331404/aiohttp-3.13.5-cp312-cp312-win32.whl", hash = "sha256:8bd3ec6376e68a41f9f95f5ed170e2fcf22d4eb27a1f8cb361d0508f6e0557f3", size = 433534, upload-time = "2026-03-31T21:58:08.712Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ca/eadf6f9c8fa5e31d40993e3db153fb5ed0b11008ad5d9de98a95045bed84/aiohttp-3.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:110e448e02c729bcebb18c60b9214a87ba33bac4a9fa5e9a5f139938b56c6cb1", size = 460446, upload-time = "2026-03-31T21:58:10.945Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/d76bf503005709e390122d34e15256b88f7008e246c4bdbe915cd4f1adce/aiohttp-3.13.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5029cc80718bbd545123cd8fe5d15025eccaaaace5d0eeec6bd556ad6163d61", size = 742930, upload-time = "2026-03-31T21:58:13.155Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/4b7b70223deaebd9bb85984d01a764b0d7bd6526fcdc73cca83bcbe7243e/aiohttp-3.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4bb6bf5811620003614076bdc807ef3b5e38244f9d25ca5fe888eaccea2a9832", size = 496927, upload-time = "2026-03-31T21:58:15.073Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/0fb20fb49f8efdcdce6cd8127604ad2c503e754a8f139f5e02b01626523f/aiohttp-3.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a84792f8631bf5a94e52d9cc881c0b824ab42717165a5579c760b830d9392ac9", size = 497141, upload-time = "2026-03-31T21:58:17.009Z" }, + { url = "https://files.pythonhosted.org/packages/3b/86/b7c870053e36a94e8951b803cb5b909bfbc9b90ca941527f5fcafbf6b0fa/aiohttp-3.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57653eac22c6a4c13eb22ecf4d673d64a12f266e72785ab1c8b8e5940d0e8090", size = 1732476, upload-time = "2026-03-31T21:58:18.925Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e5/4e161f84f98d80c03a238671b4136e6530453d65262867d989bbe78244d0/aiohttp-3.13.5-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5e5f7debc7a57af53fdf5c5009f9391d9f4c12867049d509bf7bb164a6e295b", size = 1706507, upload-time = "2026-03-31T21:58:21.094Z" }, + { url = "https://files.pythonhosted.org/packages/d4/56/ea11a9f01518bd5a2a2fcee869d248c4b8a0cfa0bb13401574fa31adf4d4/aiohttp-3.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c719f65bebcdf6716f10e9eff80d27567f7892d8988c06de12bbbd39307c6e3a", size = 1773465, upload-time = "2026-03-31T21:58:23.159Z" }, + { url = "https://files.pythonhosted.org/packages/eb/40/333ca27fb74b0383f17c90570c748f7582501507307350a79d9f9f3c6eb1/aiohttp-3.13.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d97f93fdae594d886c5a866636397e2bcab146fd7a132fd6bb9ce182224452f8", size = 1873523, upload-time = "2026-03-31T21:58:25.59Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d2/e2f77eef1acb7111405433c707dc735e63f67a56e176e72e9e7a2cd3f493/aiohttp-3.13.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3df334e39d4c2f899a914f1dba283c1aadc311790733f705182998c6f7cae665", size = 1754113, upload-time = "2026-03-31T21:58:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/fb/56/3f653d7f53c89669301ec9e42c95233e2a0c0a6dd051269e6e678db4fdb0/aiohttp-3.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe6970addfea9e5e081401bcbadf865d2b6da045472f58af08427e108d618540", size = 1562351, upload-time = "2026-03-31T21:58:29.918Z" }, + { url = "https://files.pythonhosted.org/packages/ec/a6/9b3e91eb8ae791cce4ee736da02211c85c6f835f1bdfac0594a8a3b7018c/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7becdf835feff2f4f335d7477f121af787e3504b48b449ff737afb35869ba7bb", size = 1693205, upload-time = "2026-03-31T21:58:32.214Z" }, + { url = "https://files.pythonhosted.org/packages/98/fc/bfb437a99a2fcebd6b6eaec609571954de2ed424f01c352f4b5504371dd3/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:676e5651705ad5d8a70aeb8eb6936c436d8ebbd56e63436cb7dd9bb36d2a9a46", size = 1730618, upload-time = "2026-03-31T21:58:34.728Z" }, + { url = "https://files.pythonhosted.org/packages/e4/b6/c8534862126191a034f68153194c389addc285a0f1347d85096d349bbc15/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9b16c653d38eb1a611cc898c41e76859ca27f119d25b53c12875fd0474ae31a8", size = 1745185, upload-time = "2026-03-31T21:58:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/0b/93/4ca8ee2ef5236e2707e0fd5fecb10ce214aee1ff4ab307af9c558bda3b37/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:999802d5fa0389f58decd24b537c54aa63c01c3219ce17d1214cbda3c2b22d2d", size = 1557311, upload-time = "2026-03-31T21:58:39.38Z" }, + { url = "https://files.pythonhosted.org/packages/57/ae/76177b15f18c5f5d094f19901d284025db28eccc5ae374d1d254181d33f4/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ec707059ee75732b1ba130ed5f9580fe10ff75180c812bc267ded039db5128c6", size = 1773147, upload-time = "2026-03-31T21:58:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/01/a4/62f05a0a98d88af59d93b7fcac564e5f18f513cb7471696ac286db970d6a/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d6d44a5b48132053c2f6cd5c8cb14bc67e99a63594e336b0f2af81e94d5530c", size = 1730356, upload-time = "2026-03-31T21:58:44.049Z" }, + { url = "https://files.pythonhosted.org/packages/e4/85/fc8601f59dfa8c9523808281f2da571f8b4699685f9809a228adcc90838d/aiohttp-3.13.5-cp313-cp313-win32.whl", hash = "sha256:329f292ed14d38a6c4c435e465f48bebb47479fd676a0411936cc371643225cc", size = 432637, upload-time = "2026-03-31T21:58:46.167Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/ac685a8882896acf0f6b31d689e3792199cfe7aba37969fa91da63a7fa27/aiohttp-3.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:69f571de7500e0557801c0b51f4780482c0ec5fe2ac851af5a92cfce1af1cb83", size = 458896, upload-time = "2026-03-31T21:58:48.119Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/46572759afc859e867a5bc8ec3487315869013f59281ce61764f76d879de/aiohttp-3.13.5-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:eb4639f32fd4a9904ab8fb45bf3383ba71137f3d9d4ba25b3b3f3109977c5b8c", size = 745721, upload-time = "2026-03-31T21:58:50.229Z" }, + { url = "https://files.pythonhosted.org/packages/13/fe/8a2efd7626dbe6049b2ef8ace18ffda8a4dfcbe1bcff3ac30c0c7575c20b/aiohttp-3.13.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:7e5dc4311bd5ac493886c63cbf76ab579dbe4641268e7c74e48e774c74b6f2be", size = 497663, upload-time = "2026-03-31T21:58:52.232Z" }, + { url = "https://files.pythonhosted.org/packages/9b/91/cc8cc78a111826c54743d88651e1687008133c37e5ee615fee9b57990fac/aiohttp-3.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:756c3c304d394977519824449600adaf2be0ccee76d206ee339c5e76b70ded25", size = 499094, upload-time = "2026-03-31T21:58:54.566Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/a8362cb15cf16a3af7e86ed11962d5cd7d59b449202dc576cdc731310bde/aiohttp-3.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecc26751323224cf8186efcf7fbcbc30f4e1d8c7970659daf25ad995e4032a56", size = 1726701, upload-time = "2026-03-31T21:58:56.864Z" }, + { url = "https://files.pythonhosted.org/packages/45/0c/c091ac5c3a17114bd76cbf85d674650969ddf93387876cf67f754204bd77/aiohttp-3.13.5-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10a75acfcf794edf9d8db50e5a7ec5fc818b2a8d3f591ce93bc7b1210df016d2", size = 1683360, upload-time = "2026-03-31T21:58:59.072Z" }, + { url = "https://files.pythonhosted.org/packages/23/73/bcee1c2b79bc275e964d1446c55c54441a461938e70267c86afaae6fba27/aiohttp-3.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f7a18f258d124cd678c5fe072fe4432a4d5232b0657fca7c1847f599233c83a", size = 1773023, upload-time = "2026-03-31T21:59:01.776Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ef/720e639df03004fee2d869f771799d8c23046dec47d5b81e396c7cda583a/aiohttp-3.13.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:df6104c009713d3a89621096f3e3e88cc323fd269dbd7c20afe18535094320be", size = 1853795, upload-time = "2026-03-31T21:59:04.568Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c9/989f4034fb46841208de7aeeac2c6d8300745ab4f28c42f629ba77c2d916/aiohttp-3.13.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:241a94f7de7c0c3b616627aaad530fe2cb620084a8b144d3be7b6ecfe95bae3b", size = 1730405, upload-time = "2026-03-31T21:59:07.221Z" }, + { url = "https://files.pythonhosted.org/packages/ce/75/ee1fd286ca7dc599d824b5651dad7b3be7ff8d9a7e7b3fe9820d9180f7db/aiohttp-3.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c974fb66180e58709b6fc402846f13791240d180b74de81d23913abe48e96d94", size = 1558082, upload-time = "2026-03-31T21:59:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/c3/20/1e9e6650dfc436340116b7aa89ff8cb2bbdf0abc11dfaceaad8f74273a10/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6e27ea05d184afac78aabbac667450c75e54e35f62238d44463131bd3f96753d", size = 1692346, upload-time = "2026-03-31T21:59:12.068Z" }, + { url = "https://files.pythonhosted.org/packages/d8/40/8ebc6658d48ea630ac7903912fe0dd4e262f0e16825aa4c833c56c9f1f56/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a79a6d399cef33a11b6f004c67bb07741d91f2be01b8d712d52c75711b1e07c7", size = 1698891, upload-time = "2026-03-31T21:59:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/d8/78/ea0ae5ec8ba7a5c10bdd6e318f1ba5e76fcde17db8275188772afc7917a4/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c632ce9c0b534fbe25b52c974515ed674937c5b99f549a92127c85f771a78772", size = 1742113, upload-time = "2026-03-31T21:59:17.068Z" }, + { url = "https://files.pythonhosted.org/packages/8a/66/9d308ed71e3f2491be1acb8769d96c6f0c47d92099f3bc9119cada27b357/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fceedde51fbd67ee2bcc8c0b33d0126cc8b51ef3bbde2f86662bd6d5a6f10ec5", size = 1553088, upload-time = "2026-03-31T21:59:19.541Z" }, + { url = "https://files.pythonhosted.org/packages/da/a6/6cc25ed8dfc6e00c90f5c6d126a98e2cf28957ad06fa1036bd34b6f24a2c/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f92995dfec9420bb69ae629abf422e516923ba79ba4403bc750d94fb4a6c68c1", size = 1757976, upload-time = "2026-03-31T21:59:22.311Z" }, + { url = "https://files.pythonhosted.org/packages/c1/2b/cce5b0ffe0de99c83e5e36d8f828e4161e415660a9f3e58339d07cce3006/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20ae0ff08b1f2c8788d6fb85afcb798654ae6ba0b747575f8562de738078457b", size = 1712444, upload-time = "2026-03-31T21:59:24.635Z" }, + { url = "https://files.pythonhosted.org/packages/6c/cf/9e1795b4160c58d29421eafd1a69c6ce351e2f7c8d3c6b7e4ca44aea1a5b/aiohttp-3.13.5-cp314-cp314-win32.whl", hash = "sha256:b20df693de16f42b2472a9c485e1c948ee55524786a0a34345511afdd22246f3", size = 438128, upload-time = "2026-03-31T21:59:27.291Z" }, + { url = "https://files.pythonhosted.org/packages/22/4d/eaedff67fc805aeba4ba746aec891b4b24cebb1a7d078084b6300f79d063/aiohttp-3.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:f85c6f327bf0b8c29da7d93b1cabb6363fb5e4e160a32fa241ed2dce21b73162", size = 464029, upload-time = "2026-03-31T21:59:29.429Z" }, + { url = "https://files.pythonhosted.org/packages/79/11/c27d9332ee20d68dd164dc12a6ecdef2e2e35ecc97ed6cf0d2442844624b/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1efb06900858bb618ff5cee184ae2de5828896c448403d51fb633f09e109be0a", size = 778758, upload-time = "2026-03-31T21:59:31.547Z" }, + { url = "https://files.pythonhosted.org/packages/04/fb/377aead2e0a3ba5f09b7624f702a964bdf4f08b5b6728a9799830c80041e/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fee86b7c4bd29bdaf0d53d14739b08a106fdda809ca5fe032a15f52fae5fe254", size = 512883, upload-time = "2026-03-31T21:59:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a6/aa109a33671f7a5d3bd78b46da9d852797c5e665bfda7d6b373f56bff2ec/aiohttp-3.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:20058e23909b9e65f9da62b396b77dfa95965cbe840f8def6e572538b1d32e36", size = 516668, upload-time = "2026-03-31T21:59:36.497Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/ca078f9f2fa9563c36fb8ef89053ea2bb146d6f792c5104574d49d8acb63/aiohttp-3.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cf20a8d6868cb15a73cab329ffc07291ba8c22b1b88176026106ae39aa6df0f", size = 1883461, upload-time = "2026-03-31T21:59:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e3/a7ad633ca1ca497b852233a3cce6906a56c3225fb6d9217b5e5e60b7419d/aiohttp-3.13.5-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:330f5da04c987f1d5bdb8ae189137c77139f36bd1cb23779ca1a354a4b027800", size = 1747661, upload-time = "2026-03-31T21:59:41.187Z" }, + { url = "https://files.pythonhosted.org/packages/33/b9/cd6fe579bed34a906d3d783fe60f2fa297ef55b27bb4538438ee49d4dc41/aiohttp-3.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f1cbf0c7926d315c3c26c2da41fd2b5d2fe01ac0e157b78caefc51a782196cf", size = 1863800, upload-time = "2026-03-31T21:59:43.84Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3f/2c1e2f5144cefa889c8afd5cf431994c32f3b29da9961698ff4e3811b79a/aiohttp-3.13.5-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:53fc049ed6390d05423ba33103ded7281fe897cf97878f369a527070bd95795b", size = 1958382, upload-time = "2026-03-31T21:59:46.187Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/f31ec3f1013723b3babe3609e7f119c2c2fb6ef33da90061a705ef3e1bc8/aiohttp-3.13.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:898703aa2667e3c5ca4c54ca36cd73f58b7a38ef87a5606414799ebce4d3fd3a", size = 1803724, upload-time = "2026-03-31T21:59:48.656Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b4/57712dfc6f1542f067daa81eb61da282fab3e6f1966fca25db06c4fc62d5/aiohttp-3.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0494a01ca9584eea1e5fbd6d748e61ecff218c51b576ee1999c23db7066417d8", size = 1640027, upload-time = "2026-03-31T21:59:51.284Z" }, + { url = "https://files.pythonhosted.org/packages/25/3c/734c878fb43ec083d8e31bf029daae1beafeae582d1b35da234739e82ee7/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6cf81fe010b8c17b09495cbd15c1d35afbc8fb405c0c9cf4738e5ae3af1d65be", size = 1806644, upload-time = "2026-03-31T21:59:53.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/a5/f671e5cbec1c21d044ff3078223f949748f3a7f86b14e34a365d74a5d21f/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:c564dd5f09ddc9d8f2c2d0a301cd30a79a2cc1b46dd1a73bef8f0038863d016b", size = 1791630, upload-time = "2026-03-31T21:59:56.239Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/fb8d0ad63a0b8a99be97deac8c04dacf0785721c158bdf23d679a87aa99e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:2994be9f6e51046c4f864598fd9abeb4fba6e88f0b2152422c9666dcd4aea9c6", size = 1809403, upload-time = "2026-03-31T21:59:59.103Z" }, + { url = "https://files.pythonhosted.org/packages/59/0c/bfed7f30662fcf12206481c2aac57dedee43fe1c49275e85b3a1e1742294/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:157826e2fa245d2ef46c83ea8a5faf77ca19355d278d425c29fda0beb3318037", size = 1634924, upload-time = "2026-03-31T22:00:02.116Z" }, + { url = "https://files.pythonhosted.org/packages/17/d6/fd518d668a09fd5a3319ae5e984d4d80b9a4b3df4e21c52f02251ef5a32e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a8aca50daa9493e9e13c0f566201a9006f080e7c50e5e90d0b06f53146a54500", size = 1836119, upload-time = "2026-03-31T22:00:04.756Z" }, + { url = "https://files.pythonhosted.org/packages/78/b7/15fb7a9d52e112a25b621c67b69c167805cb1f2ab8f1708a5c490d1b52fe/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b13560160d07e047a93f23aaa30718606493036253d5430887514715b67c9d9", size = 1772072, upload-time = "2026-03-31T22:00:07.494Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/57ba7f0c4a553fc2bd8b6321df236870ec6fd64a2a473a8a13d4f733214e/aiohttp-3.13.5-cp314-cp314t-win32.whl", hash = "sha256:9a0f4474b6ea6818b41f82172d799e4b3d29e22c2c520ce4357856fced9af2f8", size = 471819, upload-time = "2026-03-31T22:00:10.277Z" }, + { url = "https://files.pythonhosted.org/packages/62/29/2f8418269e46454a26171bfdd6a055d74febf32234e474930f2f60a17145/aiohttp-3.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:18a2f6c1182c51baa1d28d68fea51513cb2a76612f038853c0ad3c145423d3d9", size = 505441, upload-time = "2026-03-31T22:00:12.791Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, +] + +[[package]] +name = "asyncpg" +version = "0.31.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" }, + { url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" }, + { url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" }, + { url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" }, + { url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" }, + { url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" }, + { url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" }, + { url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" }, + { url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" }, + { url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" }, + { url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" }, + { url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + +[[package]] +name = "audioop-lts" +version = "0.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/53/946db57842a50b2da2e0c1e34bd37f36f5aadba1a929a3971c5d7841dbca/audioop_lts-0.2.2.tar.gz", hash = "sha256:64d0c62d88e67b98a1a5e71987b7aa7b5bcffc7dcee65b635823dbdd0a8dbbd0", size = 30686, upload-time = "2025-08-05T16:43:17.409Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/d4/94d277ca941de5a507b07f0b592f199c22454eeaec8f008a286b3fbbacd6/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_universal2.whl", hash = "sha256:fd3d4602dc64914d462924a08c1a9816435a2155d74f325853c1f1ac3b2d9800", size = 46523, upload-time = "2025-08-05T16:42:20.836Z" }, + { url = "https://files.pythonhosted.org/packages/f8/5a/656d1c2da4b555920ce4177167bfeb8623d98765594af59702c8873f60ec/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_x86_64.whl", hash = "sha256:550c114a8df0aafe9a05442a1162dfc8fec37e9af1d625ae6060fed6e756f303", size = 27455, upload-time = "2025-08-05T16:42:22.283Z" }, + { url = "https://files.pythonhosted.org/packages/1b/83/ea581e364ce7b0d41456fb79d6ee0ad482beda61faf0cab20cbd4c63a541/audioop_lts-0.2.2-cp313-abi3-macosx_11_0_arm64.whl", hash = "sha256:9a13dc409f2564de15dd68be65b462ba0dde01b19663720c68c1140c782d1d75", size = 26997, upload-time = "2025-08-05T16:42:23.849Z" }, + { url = "https://files.pythonhosted.org/packages/b8/3b/e8964210b5e216e5041593b7d33e97ee65967f17c282e8510d19c666dab4/audioop_lts-0.2.2-cp313-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:51c916108c56aa6e426ce611946f901badac950ee2ddaf302b7ed35d9958970d", size = 85844, upload-time = "2025-08-05T16:42:25.208Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2e/0a1c52faf10d51def20531a59ce4c706cb7952323b11709e10de324d6493/audioop_lts-0.2.2-cp313-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47eba38322370347b1c47024defbd36374a211e8dd5b0dcbce7b34fdb6f8847b", size = 85056, upload-time = "2025-08-05T16:42:26.559Z" }, + { url = "https://files.pythonhosted.org/packages/75/e8/cd95eef479656cb75ab05dfece8c1f8c395d17a7c651d88f8e6e291a63ab/audioop_lts-0.2.2-cp313-abi3-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba7c3a7e5f23e215cb271516197030c32aef2e754252c4c70a50aaff7031a2c8", size = 93892, upload-time = "2025-08-05T16:42:27.902Z" }, + { url = "https://files.pythonhosted.org/packages/5c/1e/a0c42570b74f83efa5cca34905b3eef03f7ab09fe5637015df538a7f3345/audioop_lts-0.2.2-cp313-abi3-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:def246fe9e180626731b26e89816e79aae2276f825420a07b4a647abaa84becc", size = 96660, upload-time = "2025-08-05T16:42:28.9Z" }, + { url = "https://files.pythonhosted.org/packages/50/d5/8a0ae607ca07dbb34027bac8db805498ee7bfecc05fd2c148cc1ed7646e7/audioop_lts-0.2.2-cp313-abi3-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e160bf9df356d841bb6c180eeeea1834085464626dc1b68fa4e1d59070affdc3", size = 79143, upload-time = "2025-08-05T16:42:29.929Z" }, + { url = "https://files.pythonhosted.org/packages/12/17/0d28c46179e7910bfb0bb62760ccb33edb5de973052cb2230b662c14ca2e/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4b4cd51a57b698b2d06cb9993b7ac8dfe89a3b2878e96bc7948e9f19ff51dba6", size = 84313, upload-time = "2025-08-05T16:42:30.949Z" }, + { url = "https://files.pythonhosted.org/packages/84/ba/bd5d3806641564f2024e97ca98ea8f8811d4e01d9b9f9831474bc9e14f9e/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_ppc64le.whl", hash = "sha256:4a53aa7c16a60a6857e6b0b165261436396ef7293f8b5c9c828a3a203147ed4a", size = 93044, upload-time = "2025-08-05T16:42:31.959Z" }, + { url = "https://files.pythonhosted.org/packages/f9/5e/435ce8d5642f1f7679540d1e73c1c42d933331c0976eb397d1717d7f01a3/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_riscv64.whl", hash = "sha256:3fc38008969796f0f689f1453722a0f463da1b8a6fbee11987830bfbb664f623", size = 78766, upload-time = "2025-08-05T16:42:33.302Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/b909e76b606cbfd53875693ec8c156e93e15a1366a012f0b7e4fb52d3c34/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_s390x.whl", hash = "sha256:15ab25dd3e620790f40e9ead897f91e79c0d3ce65fe193c8ed6c26cffdd24be7", size = 87640, upload-time = "2025-08-05T16:42:34.854Z" }, + { url = "https://files.pythonhosted.org/packages/30/e7/8f1603b4572d79b775f2140d7952f200f5e6c62904585d08a01f0a70393a/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:03f061a1915538fd96272bac9551841859dbb2e3bf73ebe4a23ef043766f5449", size = 86052, upload-time = "2025-08-05T16:42:35.839Z" }, + { url = "https://files.pythonhosted.org/packages/b5/96/c37846df657ccdda62ba1ae2b6534fa90e2e1b1742ca8dcf8ebd38c53801/audioop_lts-0.2.2-cp313-abi3-win32.whl", hash = "sha256:3bcddaaf6cc5935a300a8387c99f7a7fbbe212a11568ec6cf6e4bc458c048636", size = 26185, upload-time = "2025-08-05T16:42:37.04Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/9d78fdb5b844a83da8a71226c7bdae7cc638861085fff7a1d707cb4823fa/audioop_lts-0.2.2-cp313-abi3-win_amd64.whl", hash = "sha256:a2c2a947fae7d1062ef08c4e369e0ba2086049a5e598fda41122535557012e9e", size = 30503, upload-time = "2025-08-05T16:42:38.427Z" }, + { url = "https://files.pythonhosted.org/packages/34/25/20d8fde083123e90c61b51afb547bb0ea7e77bab50d98c0ab243d02a0e43/audioop_lts-0.2.2-cp313-abi3-win_arm64.whl", hash = "sha256:5f93a5db13927a37d2d09637ccca4b2b6b48c19cd9eda7b17a2e9f77edee6a6f", size = 24173, upload-time = "2025-08-05T16:42:39.704Z" }, + { url = "https://files.pythonhosted.org/packages/58/a7/0a764f77b5c4ac58dc13c01a580f5d32ae8c74c92020b961556a43e26d02/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:73f80bf4cd5d2ca7814da30a120de1f9408ee0619cc75da87d0641273d202a09", size = 47096, upload-time = "2025-08-05T16:42:40.684Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ed/ebebedde1a18848b085ad0fa54b66ceb95f1f94a3fc04f1cd1b5ccb0ed42/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:106753a83a25ee4d6f473f2be6b0966fc1c9af7e0017192f5531a3e7463dce58", size = 27748, upload-time = "2025-08-05T16:42:41.992Z" }, + { url = "https://files.pythonhosted.org/packages/cb/6e/11ca8c21af79f15dbb1c7f8017952ee8c810c438ce4e2b25638dfef2b02c/audioop_lts-0.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fbdd522624141e40948ab3e8cdae6e04c748d78710e9f0f8d4dae2750831de19", size = 27329, upload-time = "2025-08-05T16:42:42.987Z" }, + { url = "https://files.pythonhosted.org/packages/84/52/0022f93d56d85eec5da6b9da6a958a1ef09e80c39f2cc0a590c6af81dcbb/audioop_lts-0.2.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:143fad0311e8209ece30a8dbddab3b65ab419cbe8c0dde6e8828da25999be911", size = 92407, upload-time = "2025-08-05T16:42:44.336Z" }, + { url = "https://files.pythonhosted.org/packages/87/1d/48a889855e67be8718adbc7a01f3c01d5743c325453a5e81cf3717664aad/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dfbbc74ec68a0fd08cfec1f4b5e8cca3d3cd7de5501b01c4b5d209995033cde9", size = 91811, upload-time = "2025-08-05T16:42:45.325Z" }, + { url = "https://files.pythonhosted.org/packages/98/a6/94b7213190e8077547ffae75e13ed05edc488653c85aa5c41472c297d295/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cfcac6aa6f42397471e4943e0feb2244549db5c5d01efcd02725b96af417f3fe", size = 100470, upload-time = "2025-08-05T16:42:46.468Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e9/78450d7cb921ede0cfc33426d3a8023a3bda755883c95c868ee36db8d48d/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:752d76472d9804ac60f0078c79cdae8b956f293177acd2316cd1e15149aee132", size = 103878, upload-time = "2025-08-05T16:42:47.576Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e2/cd5439aad4f3e34ae1ee852025dc6aa8f67a82b97641e390bf7bd9891d3e/audioop_lts-0.2.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:83c381767e2cc10e93e40281a04852facc4cd9334550e0f392f72d1c0a9c5753", size = 84867, upload-time = "2025-08-05T16:42:49.003Z" }, + { url = "https://files.pythonhosted.org/packages/68/4b/9d853e9076c43ebba0d411e8d2aa19061083349ac695a7d082540bad64d0/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c0022283e9556e0f3643b7c3c03f05063ca72b3063291834cca43234f20c60bb", size = 90001, upload-time = "2025-08-05T16:42:50.038Z" }, + { url = "https://files.pythonhosted.org/packages/58/26/4bae7f9d2f116ed5593989d0e521d679b0d583973d203384679323d8fa85/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a2d4f1513d63c795e82948e1305f31a6d530626e5f9f2605408b300ae6095093", size = 99046, upload-time = "2025-08-05T16:42:51.111Z" }, + { url = "https://files.pythonhosted.org/packages/b2/67/a9f4fb3e250dda9e9046f8866e9fa7d52664f8985e445c6b4ad6dfb55641/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:c9c8e68d8b4a56fda8c025e538e639f8c5953f5073886b596c93ec9b620055e7", size = 84788, upload-time = "2025-08-05T16:42:52.198Z" }, + { url = "https://files.pythonhosted.org/packages/70/f7/3de86562db0121956148bcb0fe5b506615e3bcf6e63c4357a612b910765a/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:96f19de485a2925314f5020e85911fb447ff5fbef56e8c7c6927851b95533a1c", size = 94472, upload-time = "2025-08-05T16:42:53.59Z" }, + { url = "https://files.pythonhosted.org/packages/f1/32/fd772bf9078ae1001207d2df1eef3da05bea611a87dd0e8217989b2848fa/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e541c3ef484852ef36545f66209444c48b28661e864ccadb29daddb6a4b8e5f5", size = 92279, upload-time = "2025-08-05T16:42:54.632Z" }, + { url = "https://files.pythonhosted.org/packages/4f/41/affea7181592ab0ab560044632571a38edaf9130b84928177823fbf3176a/audioop_lts-0.2.2-cp313-cp313t-win32.whl", hash = "sha256:d5e73fa573e273e4f2e5ff96f9043858a5e9311e94ffefd88a3186a910c70917", size = 26568, upload-time = "2025-08-05T16:42:55.627Z" }, + { url = "https://files.pythonhosted.org/packages/28/2b/0372842877016641db8fc54d5c88596b542eec2f8f6c20a36fb6612bf9ee/audioop_lts-0.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9191d68659eda01e448188f60364c7763a7ca6653ed3f87ebb165822153a8547", size = 30942, upload-time = "2025-08-05T16:42:56.674Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ca/baf2b9cc7e96c179bb4a54f30fcd83e6ecb340031bde68f486403f943768/audioop_lts-0.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c174e322bb5783c099aaf87faeb240c8d210686b04bd61dfd05a8e5a83d88969", size = 24603, upload-time = "2025-08-05T16:42:57.571Z" }, + { url = "https://files.pythonhosted.org/packages/5c/73/413b5a2804091e2c7d5def1d618e4837f1cb82464e230f827226278556b7/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f9ee9b52f5f857fbaf9d605a360884f034c92c1c23021fb90b2e39b8e64bede6", size = 47104, upload-time = "2025-08-05T16:42:58.518Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8c/daa3308dc6593944410c2c68306a5e217f5c05b70a12e70228e7dd42dc5c/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:49ee1a41738a23e98d98b937a0638357a2477bc99e61b0f768a8f654f45d9b7a", size = 27754, upload-time = "2025-08-05T16:43:00.132Z" }, + { url = "https://files.pythonhosted.org/packages/4e/86/c2e0f627168fcf61781a8f72cab06b228fe1da4b9fa4ab39cfb791b5836b/audioop_lts-0.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b00be98ccd0fc123dcfad31d50030d25fcf31488cde9e61692029cd7394733b", size = 27332, upload-time = "2025-08-05T16:43:01.666Z" }, + { url = "https://files.pythonhosted.org/packages/c7/bd/35dce665255434f54e5307de39e31912a6f902d4572da7c37582809de14f/audioop_lts-0.2.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a6d2e0f9f7a69403e388894d4ca5ada5c47230716a03f2847cfc7bd1ecb589d6", size = 92396, upload-time = "2025-08-05T16:43:02.991Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d2/deeb9f51def1437b3afa35aeb729d577c04bcd89394cb56f9239a9f50b6f/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9b0b8a03ef474f56d1a842af1a2e01398b8f7654009823c6d9e0ecff4d5cfbf", size = 91811, upload-time = "2025-08-05T16:43:04.096Z" }, + { url = "https://files.pythonhosted.org/packages/76/3b/09f8b35b227cee28cc8231e296a82759ed80c1a08e349811d69773c48426/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2b267b70747d82125f1a021506565bdc5609a2b24bcb4773c16d79d2bb260bbd", size = 100483, upload-time = "2025-08-05T16:43:05.085Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/05b48a935cf3b130c248bfdbdea71ce6437f5394ee8533e0edd7cfd93d5e/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0337d658f9b81f4cd0fdb1f47635070cc084871a3d4646d9de74fdf4e7c3d24a", size = 103885, upload-time = "2025-08-05T16:43:06.197Z" }, + { url = "https://files.pythonhosted.org/packages/83/80/186b7fce6d35b68d3d739f228dc31d60b3412105854edb975aa155a58339/audioop_lts-0.2.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:167d3b62586faef8b6b2275c3218796b12621a60e43f7e9d5845d627b9c9b80e", size = 84899, upload-time = "2025-08-05T16:43:07.291Z" }, + { url = "https://files.pythonhosted.org/packages/49/89/c78cc5ac6cb5828f17514fb12966e299c850bc885e80f8ad94e38d450886/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0d9385e96f9f6da847f4d571ce3cb15b5091140edf3db97276872647ce37efd7", size = 89998, upload-time = "2025-08-05T16:43:08.335Z" }, + { url = "https://files.pythonhosted.org/packages/4c/4b/6401888d0c010e586c2ca50fce4c903d70a6bb55928b16cfbdfd957a13da/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:48159d96962674eccdca9a3df280e864e8ac75e40a577cc97c5c42667ffabfc5", size = 99046, upload-time = "2025-08-05T16:43:09.367Z" }, + { url = "https://files.pythonhosted.org/packages/de/f8/c874ca9bb447dae0e2ef2e231f6c4c2b0c39e31ae684d2420b0f9e97ee68/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8fefe5868cd082db1186f2837d64cfbfa78b548ea0d0543e9b28935ccce81ce9", size = 84843, upload-time = "2025-08-05T16:43:10.749Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c0/0323e66f3daebc13fd46b36b30c3be47e3fc4257eae44f1e77eb828c703f/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:58cf54380c3884fb49fdd37dfb7a772632b6701d28edd3e2904743c5e1773602", size = 94490, upload-time = "2025-08-05T16:43:12.131Z" }, + { url = "https://files.pythonhosted.org/packages/98/6b/acc7734ac02d95ab791c10c3f17ffa3584ccb9ac5c18fd771c638ed6d1f5/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:088327f00488cdeed296edd9215ca159f3a5a5034741465789cad403fcf4bec0", size = 92297, upload-time = "2025-08-05T16:43:13.139Z" }, + { url = "https://files.pythonhosted.org/packages/13/c3/c3dc3f564ce6877ecd2a05f8d751b9b27a8c320c2533a98b0c86349778d0/audioop_lts-0.2.2-cp314-cp314t-win32.whl", hash = "sha256:068aa17a38b4e0e7de771c62c60bbca2455924b67a8814f3b0dee92b5820c0b3", size = 27331, upload-time = "2025-08-05T16:43:14.19Z" }, + { url = "https://files.pythonhosted.org/packages/72/bb/b4608537e9ffcb86449091939d52d24a055216a36a8bf66b936af8c3e7ac/audioop_lts-0.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:a5bf613e96f49712073de86f20dbdd4014ca18efd4d34ed18c75bd808337851b", size = 31697, upload-time = "2025-08-05T16:43:15.193Z" }, + { url = "https://files.pythonhosted.org/packages/f6/22/91616fe707a5c5510de2cac9b046a30defe7007ba8a0c04f9c08f27df312/audioop_lts-0.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:b492c3b040153e68b9fdaff5913305aaaba5bb433d8a7f73d5cf6a64ed3cc1dd", size = 25206, upload-time = "2025-08-05T16:43:16.444Z" }, +] + +[[package]] +name = "audioread" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "standard-aifc", marker = "python_full_version >= '3.13'" }, + { name = "standard-sunau", marker = "python_full_version >= '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/4a/874ecf9b472f998130c2b5e145dcdb9f6131e84786111489103b66772143/audioread-3.1.0.tar.gz", hash = "sha256:1c4ab2f2972764c896a8ac61ac53e261c8d29f0c6ccd652f84e18f08a4cab190", size = 20082, upload-time = "2025-10-26T19:44:13.484Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/16/fbe8e1e185a45042f7cd3a282def5bb8d95bb69ab9e9ef6a5368aa17e426/audioread-3.1.0-py3-none-any.whl", hash = "sha256:b30d1df6c5d3de5dcef0fb0e256f6ea17bdcf5f979408df0297d8a408e2971b4", size = 23143, upload-time = "2025-10-26T19:44:12.016Z" }, +] + +[[package]] +name = "certifi" +version = "2026.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, + { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, + { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" }, + { url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" }, + { url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" }, + { url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" }, + { url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" }, + { url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" }, + { url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" }, + { url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" }, + { url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" }, + { url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" }, + { url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" }, + { url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" }, + { url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" }, + { url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" }, + { url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" }, + { url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" }, + { url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" }, + { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" }, + { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" }, + { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" }, + { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" }, + { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" }, + { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" }, + { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" }, + { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" }, + { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" }, + { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" }, + { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" }, + { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" }, + { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" }, + { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, +] + +[[package]] +name = "cuda-bindings" +version = "13.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cuda-pathfinder" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/3c/c33fd3aa5fcc89aa1c135e477a0561f29142ab5fe028ca425fc87f7f0a74/cuda_bindings-13.0.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b899e5a513c11eaa18648f9bf5265d8de2a93f76ef66a6bfca0a2887303965cd", size = 11709086, upload-time = "2025-10-21T15:09:00.005Z" }, + { url = "https://files.pythonhosted.org/packages/21/ac/6b34452a3836c9fbabcd360689a353409d15f500dd9d9ced7f837549e383/cuda_bindings-13.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf41d9e69019939aa15296fa66ea7d3fdb8d2c6383f729f4b1156c8b37808a06", size = 12128303, upload-time = "2025-10-21T15:09:02.889Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/41ccc303eb6be8ae82c5edd2ccae938876e8a794660e8bb96a193174a978/cuda_bindings-13.0.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb16a7f769c9c67469add7a1d9f6c14dd44637f6921cb6b9eb82cb5015b35c3d", size = 11537064, upload-time = "2025-10-21T15:09:07.84Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ac/699889100536f1b63779646291e74eefa818087a0974eb271314d850f5dc/cuda_bindings-13.0.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:512d0d803a5e47a8a42d5a34ce0932802bf72fe952fdb11ac798715a35c6e5cb", size = 11910447, upload-time = "2025-10-21T15:09:09.942Z" }, + { url = "https://files.pythonhosted.org/packages/11/67/9656e003f18c5b32e1a2496998b24f4355ec978c5f3639b0eb9f6d0ff83f/cuda_bindings-13.0.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c859e326c776a47e66c50386a10c84fe34291eb6e711610c9fd7cc27d446334f", size = 11522409, upload-time = "2025-10-21T15:09:14.674Z" }, + { url = "https://files.pythonhosted.org/packages/18/d8/a83379caa7c1bed4195e704c24467a6c07fe8e29c7055ccd4f00c5702363/cuda_bindings-13.0.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e675dbd009fb5e66d63fd13a8ff35f849120f01bcc4dafadbced3004605c3588", size = 11903148, upload-time = "2025-10-21T15:09:16.918Z" }, + { url = "https://files.pythonhosted.org/packages/e8/99/0042dc5e98e3364480b1aaabc0f5c150d037825b264bba35ac7a883e46ee/cuda_bindings-13.0.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7c7e6e89cdfc9b34f16a065cc6ad6c4bab19ce5dcef8da3ace8ad10bda899fa0", size = 11594384, upload-time = "2025-10-21T15:09:21.938Z" }, + { url = "https://files.pythonhosted.org/packages/7a/c4/a931a90ce763bd7d587e18e73e4ce246b8547c78247c4f50ee24efc0e984/cuda_bindings-13.0.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e93866465e7ff4b7ebdf711cf9cd680499cd875f992058c68be08d4775ac233d", size = 11920899, upload-time = "2025-10-21T15:09:26.306Z" }, + { url = "https://files.pythonhosted.org/packages/6f/2c/ec611e27ba48a9056f3b0610c5e27727e539f3905356cfe07acea18e772c/cuda_bindings-13.0.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed06ef3507bd0aefb0da367e3d15676a8c7443bd68a88f298562d60b41078c20", size = 11521928, upload-time = "2025-10-21T15:09:30.714Z" }, + { url = "https://files.pythonhosted.org/packages/d4/2e/02cebf281ef5201b6bb9ea193b1a4d26e6233c46571cfb04c4a7dede12b9/cuda_bindings-13.0.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3ab845487ca2c14accdcb393a559a3070469ea4b591d05e6ef439471f47f3e24", size = 11902749, upload-time = "2025-10-21T15:09:32.688Z" }, +] + +[[package]] +name = "cuda-pathfinder" +version = "1.3.4" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/5e/db279a3bfbd18d59d0598922a3b3c1454908d0969e8372260afec9736376/cuda_pathfinder-1.3.4-py3-none-any.whl", hash = "sha256:fb983f6e0d43af27ef486e14d5989b5f904ef45cedf40538bfdcbffa6bb01fb2", size = 30878, upload-time = "2026-02-11T18:50:31.008Z" }, +] + +[[package]] +name = "cuda-toolkit" +version = "13.0.2" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/57/b2/453099f5f3b698d7d0eab38916aac44c7f76229f451709e2eb9db6615dcd/cuda_toolkit-13.0.2-py2.py3-none-any.whl", hash = "sha256:b198824cf2f54003f50d64ada3a0f184b42ca0846c1c94192fa269ecd97a66eb", size = 2364, upload-time = "2025-12-19T23:24:07.328Z" }, +] + +[package.optional-dependencies] +cublas = [ + { name = "nvidia-cublas", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +cudart = [ + { name = "nvidia-cuda-runtime", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +cufft = [ + { name = "nvidia-cufft", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +cufile = [ + { name = "nvidia-cufile", marker = "sys_platform == 'linux'" }, +] +cupti = [ + { name = "nvidia-cuda-cupti", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +curand = [ + { name = "nvidia-curand", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +cusolver = [ + { name = "nvidia-cusolver", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +cusparse = [ + { name = "nvidia-cusparse", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +nvjitlink = [ + { name = "nvidia-nvjitlink", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +nvrtc = [ + { name = "nvidia-cuda-nvrtc", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +nvtx = [ + { name = "nvidia-nvtx", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + +[[package]] +name = "discord-py" +version = "2.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/57/9a2d9abdabdc9db8ef28ce0cf4129669e1c8717ba28d607b5ba357c4de3b/discord_py-2.7.1.tar.gz", hash = "sha256:24d5e6a45535152e4b98148a9dd6b550d25dc2c9fb41b6d670319411641249da", size = 1106326, upload-time = "2026-03-03T18:40:46.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/a7/17208c3b3f92319e7fad259f1c6d5a5baf8fd0654c54846ced329f83c3eb/discord_py-2.7.1-py3-none-any.whl", hash = "sha256:849dca2c63b171146f3a7f3f8acc04248098e9e6203412ce3cf2745f284f7439", size = 1227550, upload-time = "2026-03-03T18:40:44.492Z" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "dnspython" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, +] + +[[package]] +name = "email-validator" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, +] + +[[package]] +name = "fastapi" +version = "0.136.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5d/45/c130091c2dfa061bbfe3150f2a5091ef1adf149f2a8d2ae769ecaf6e99a2/fastapi-0.136.1.tar.gz", hash = "sha256:7af665ad7acfa0a3baf8983d393b6b471b9da10ede59c60045f49fbc89a0fa7f", size = 397448, upload-time = "2026-04-23T16:49:44.046Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/ff/2e4eca3ade2c22fe1dea7043b8ee9dabe47753349eb1b56a202de8af6349/fastapi-0.136.1-py3-none-any.whl", hash = "sha256:a6e9d7eeada96c93a4d69cb03836b44fa34e2854accb7244a1ece36cd4781c3f", size = 117683, upload-time = "2026-04-23T16:49:42.437Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "email-validator" }, + { name = "fastapi-cli", extra = ["standard"] }, + { name = "fastar" }, + { name = "httpx" }, + { name = "jinja2" }, + { name = "pydantic-extra-types" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cli" +version = "0.0.21" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rich-toolkit" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4a/5a/500ec4deaa9a5d6bc7909cbd7b252fa37fe80d418c55a65ce5ed11c53505/fastapi_cli-0.0.21.tar.gz", hash = "sha256:457134b8f3e08d2d203a18db923a18bbc1a01d9de36fbe1fa7905c4d02a0e5c0", size = 19664, upload-time = "2026-02-11T15:27:59.65Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/cf/d1f3ea2a1661d80c62c7b1537184ec28ec832eefb7ad1ff3047813d19452/fastapi_cli-0.0.21-py3-none-any.whl", hash = "sha256:57c6e043694c68618eee04d00b4d93213c37f5a854b369d2871a77dfeff57e91", size = 12391, upload-time = "2026-02-11T15:27:58.181Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "fastapi-cloud-cli" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cloud-cli" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastar" }, + { name = "httpx" }, + { name = "pydantic", extra = ["email"] }, + { name = "rich-toolkit" }, + { name = "rignore" }, + { name = "sentry-sdk" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/59/3def056ec8350df78a0786b7ca40a167cbf28ac26552ced4e19e1f83e872/fastapi_cloud_cli-0.12.0.tar.gz", hash = "sha256:c897d1d5e27f5b4148ed2601076785155ec8fb385a6a62d3e8801880f929629f", size = 38508, upload-time = "2026-02-13T19:39:57.877Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/6f/badabb5a21388b0af2b9cd0c2a5d81aaecfca57bf382872890e802eaed98/fastapi_cloud_cli-0.12.0-py3-none-any.whl", hash = "sha256:9c666c2ab1684cee48a5b0a29ac1ae0bd395b9a13bf6858448b4369ea68beda1", size = 27735, upload-time = "2026-02-13T19:39:58.705Z" }, +] + +[[package]] +name = "fastar" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/0f/0aeb3fc50046617702acc0078b277b58367fd62eb727b9ec733ae0e8bbcc/fastar-0.11.0.tar.gz", hash = "sha256:aa7f100f7313c03fdb20f1385927ba95671071ba308ad0c1763fef295e1895ce", size = 70238, upload-time = "2026-04-13T17:11:17.143Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/06/a5773706afc8bd496769786590bbc56d2d0ee419a299cc12ea3f5717fcf3/fastar-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3c51f1c2cdddbd1420d2897ace7738e36c65e17f6ae84e0bfe763f8d1068bb97", size = 708394, upload-time = "2026-04-13T17:09:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a6/d5e2a4e48495616440a21eed07558219ca90243ad00b0502586f95bd4833/fastar-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0d9d6b052baf5380baea866675dab6ccd04ec2460d12b1c46f10ce3f4ee6a820", size = 628417, upload-time = "2026-04-13T17:09:42.145Z" }, + { url = "https://files.pythonhosted.org/packages/ab/69/9816d69ac8265c9e50456637a487ccfb7a9c566efd9dbcd673df9c2558c2/fastar-0.11.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bd2f05666d4df7e14885b5c38fefd92a785917387513d33d837ff42ec143a22f", size = 863950, upload-time = "2026-04-13T17:09:11.506Z" }, + { url = "https://files.pythonhosted.org/packages/5b/0d/f88daad53aff2e754b6b5ff2a7113f72447a34f6ef17cc23ca99988117b7/fastar-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e6e74aba1ae77ca4aedcaf1697cd413319f4c88a5ccbe5b42c709517c5097e", size = 760737, upload-time = "2026-04-13T17:07:55.958Z" }, + { url = "https://files.pythonhosted.org/packages/2f/a6/82ef4ecd969d50d92ed3ed9dbd8fe77faa24be5e5736f716edc9f4ce8d62/fastar-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38ef77fe940bbc9b37a98bd838727f844b11731cd39358a2640ff864fb385086", size = 757603, upload-time = "2026-04-13T17:08:10.623Z" }, + { url = "https://files.pythonhosted.org/packages/03/35/50249f0d827251f8ac511495e2eacccebda80a00a0ad73e9615b8113b84f/fastar-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8955e61b32d6aff82c983217abf80933fd823b0e727586fc72f08043d996fd59", size = 923952, upload-time = "2026-04-13T17:08:25.526Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d8/faee41659e9c379d906d24eaee6d6833ac8cfef0a5df480e5c2a8d3efb33/fastar-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:483532442cdb08fbff0169510224eae0836f2f672cea6aacb52847d90fefdc46", size = 816574, upload-time = "2026-04-13T17:08:56.076Z" }, + { url = "https://files.pythonhosted.org/packages/22/47/0448ea7992b997dad2bf004bfd98eca74b5858630eae080b50c7b17d9ddc/fastar-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef5a6071121e05d8287fc75bccb054bcbac8bb0501200a0c0a8feeace5303ea4", size = 819382, upload-time = "2026-04-13T17:09:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/33/ef/0d63eb43586831b7a6f8b22c4d77125a7c594423af1f4f090fa9541b9b40/fastar-0.11.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:e45e598af5afe8412197d4786efd6cf29be02e7d3d4f6a3461149eae5d7e94f1", size = 885254, upload-time = "2026-04-13T17:08:40.9Z" }, + { url = "https://files.pythonhosted.org/packages/01/25/edd584675d69e49a165052c3ee886df1c5d574f3e7d813c990306387c623/fastar-0.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e160919b1c47ddb8538e7e8eb4cd527281b40f0bf75110a75993838ef61f286", size = 971239, upload-time = "2026-04-13T17:10:12.997Z" }, + { url = "https://files.pythonhosted.org/packages/a5/37/e8bb24f506ba2b08fbaf36c5800e843bd4d542954e9331f00418e2d23349/fastar-0.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:4bb4dc0fc8f7a6807febcebce8a2f3626ba4955a9263d81ecc630aad83be84c0", size = 1035185, upload-time = "2026-04-13T17:10:30.207Z" }, + { url = "https://files.pythonhosted.org/packages/9a/bf/be753736296338149ee4cb3e92e2b5423d6ba17c7b951d15218fd7e99bbf/fastar-0.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4ec95af56aa173f6e320e1183001bf108ba59beaf13edd1fc8200648db203588", size = 1072191, upload-time = "2026-04-13T17:10:47.072Z" }, + { url = "https://files.pythonhosted.org/packages/d2/cd/a81c1aaafb5a22ce57c98ae22f39c89413ed53e4ee6e1b1444b0bd666a6c/fastar-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:136cf342735464091c39dc3708168f9fdeb9ebea40b1ead937c61afaf46143d9", size = 1028054, upload-time = "2026-04-13T17:11:04.293Z" }, + { url = "https://files.pythonhosted.org/packages/ec/88/1ce4eed3d70627c95f49ca017f6bbbf2ddcc4b0c601d293259de7689bc20/fastar-0.11.0-cp312-cp312-win32.whl", hash = "sha256:35f23c11b556cc4d3704587faacbc0037f7bdf6c4525cd1d09c70bda4b1c6809", size = 454198, upload-time = "2026-04-13T17:11:45.168Z" }, + { url = "https://files.pythonhosted.org/packages/8f/1d/26ce92f4331cd61a69840db9ca6115829805eec24f285481a854f578e917/fastar-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:920bc56c3c0b8a8ca492904941d1883c1c947c858cd93343356c29122a38f44c", size = 486697, upload-time = "2026-04-13T17:11:31.084Z" }, + { url = "https://files.pythonhosted.org/packages/ed/96/e6eda4480559c69b05d466e7b5ea9170e81fef3795a73e059959a3258319/fastar-0.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:395248faf89e8a6bd5dc1fd544c8465113b627cb6d7c8b296796b60ebea33593", size = 462591, upload-time = "2026-04-13T17:11:20.577Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d6/3be260037e86fb694e88d47f583bac3a0188c99cee1a6b257ac26cb6b53c/fastar-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:33f544b08b4541b678e53749b4552a44720d96761fb79c172b005b1089c443ed", size = 707975, upload-time = "2026-04-13T17:09:58.866Z" }, + { url = "https://files.pythonhosted.org/packages/e1/cd/7867aefb1784662554a335f2952c75a50f0c70585ed0d2210d6cc15e5627/fastar-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:91c1c792447e4a642745f347ff9847c52af39633071c57ee67ed53c157fc3506", size = 628460, upload-time = "2026-04-13T17:09:43.776Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2b/d11d84bdd5e0e377771b955755771e3460b290da5809cb78c1b735ee2228/fastar-0.11.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:881247e6b6eaea59fc6569f9b61447aa6b9fc2ee864e048b4643d69c52745805", size = 863054, upload-time = "2026-04-13T17:09:13.048Z" }, + { url = "https://files.pythonhosted.org/packages/25/39/d3f428b318fa940b1b6e785b8d54fc895dfb5d5b945ef8d5442ffa904fb2/fastar-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:863b7929845c9fec92ef6c8d59579cf46af5136655e5342f8df5cebe46cab06c", size = 760247, upload-time = "2026-04-13T17:07:57.396Z" }, + { url = "https://files.pythonhosted.org/packages/9e/04/03949aee82aabb8ede06ac5a4a5579ffaf98a8fe59ce958494508ff15513/fastar-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:96b4a57df12bf3211662627a3ea29d62ecb314a2434a0d0843f9fc23e47536e5", size = 756512, upload-time = "2026-04-13T17:08:12.415Z" }, + { url = "https://files.pythonhosted.org/packages/3f/0c/2ca1ae0a3828ca51047962d932b80daca2522db73e8cb9d040cb6ebe28d5/fastar-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceef1c2c4df7b7b8ebd3f5d718bbf457b9bbdf25ce0bd07870211ec4fbd9aff4", size = 922183, upload-time = "2026-04-13T17:08:27.187Z" }, + { url = "https://files.pythonhosted.org/packages/65/68/7fe808b1f73a68e686f25434f538c6dc10ef4dfb3db0ace22cd861744bf8/fastar-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8e545918441910a779659d4759ad0eef349e935fbdb4668a666d3681567eb05", size = 816394, upload-time = "2026-04-13T17:08:57.657Z" }, + { url = "https://files.pythonhosted.org/packages/1f/17/07d086080f8a83b8d7966955e29bcdbd6a060f5bd949dc9d5abd3658cead/fastar-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28095bb8f821e85fc2764e1a55f03e5e2876dee2abe7cd0ee9420d929905d643", size = 818983, upload-time = "2026-04-13T17:09:28.46Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e2/2c4edf0910af2e814ff6d65b77a91196d472ca8a9fb2033bd983f6856caa/fastar-0.11.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0fafb95ecbe70f666a5e9b35dd63974ccdc9bb3d99ccdbd4014a823ec3e659b5", size = 884689, upload-time = "2026-04-13T17:08:42.763Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/04fdcbd6558e60de4ced3b55230fac47675d181252582b2fcec3c74608e5/fastar-0.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:af48fed039b94016629dcdad1c95c90c486326dd068de2b0a4df419ee09b6821", size = 970677, upload-time = "2026-04-13T17:10:15.124Z" }, + { url = "https://files.pythonhosted.org/packages/df/b3/2b860a9658550167dbd5824c85e88d0b4b912bf493e42a6322544d6e483d/fastar-0.11.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:74cd96163f39b8638ab4e8d49708ca887959672a22871d8170d01f067319533b", size = 1034026, upload-time = "2026-04-13T17:10:32.318Z" }, + { url = "https://files.pythonhosted.org/packages/b7/9b/fa42ea1188b144bac4b1b60753dfd449974a4d5eda132029ee7711569f94/fastar-0.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4e8b993cb5613bab495ed482810bedc0986633fcb9a3b55c37ec88e0d6714f6a", size = 1071147, upload-time = "2026-04-13T17:10:48.833Z" }, + { url = "https://files.pythonhosted.org/packages/95/c8/d2e501556dca9f1fbc9246111a31792fb49ad908fa4927f34938a97a3604/fastar-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dfe39d91fc28e37e06162d94afe01050220edb7df554acb5b702b5503e564816", size = 1028377, upload-time = "2026-04-13T17:11:06.374Z" }, + { url = "https://files.pythonhosted.org/packages/db/33/5f11f23eca0a569cd052507bc45dda2e5468697f8665728d25be44120f7d/fastar-0.11.0-cp313-cp313-win32.whl", hash = "sha256:c5f63d4d99ff4bfb37c659982ec413358bdee747005348756cc50a04d412d989", size = 454089, upload-time = "2026-04-13T17:11:46.821Z" }, + { url = "https://files.pythonhosted.org/packages/da/2f/35ff03c939cba7a255a9132367873fec6c355fd06a7f84fedcbaf4c8129f/fastar-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:8690ed1928d31ded3ada308e1086525fb3871f5fa81e1b69601a3f7774004583", size = 486312, upload-time = "2026-04-13T17:11:32.86Z" }, + { url = "https://files.pythonhosted.org/packages/ef/71/ee9246cbfcbfd4144558f35e7e9a306ffe0a7564730a5188c45f21d2dab8/fastar-0.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:d977ded9d98a0719a305e0a4d5ee811f1d3e856d853a50acb8ae833c3cd6d5d2", size = 461975, upload-time = "2026-04-13T17:11:22.589Z" }, + { url = "https://files.pythonhosted.org/packages/7a/cd/3644c48ecac456f928c12d47ec3bed36c36555b17c3859856f1ff860265d/fastar-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:71375bd6f03c2a43eb47bd949ea38ff45434917f9cdac79675c5b9f60de4fa73", size = 707860, upload-time = "2026-04-13T17:10:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/69/ca/dee04476ae3626b2b040a60ad84628f77e1ffd8444232f2426b0ca1e0d7e/fastar-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:eddfd9cab16e19ae247fe44bf992cb403ccfe27d3931d6de29a4695d95ad386c", size = 628216, upload-time = "2026-04-13T17:09:45.355Z" }, + { url = "https://files.pythonhosted.org/packages/dc/5e/9395c7353d079cb4f5be0f7982ce0dc9f2e7dec5fd175eef466729d6023a/fastar-0.11.0-cp314-cp314-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c371f1d4386c699018bb64eb2fa785feacf32785559049d2bb72fe4af023f53", size = 864378, upload-time = "2026-04-13T17:09:14.611Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/1e4f67148223ff219612b6281a6000357abbcc2417964fa5c83f11d68fce/fastar-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cad7fa41e3e66554387481c1a09365e4638becd322904932674159d5f4046728", size = 760921, upload-time = "2026-04-13T17:07:59.138Z" }, + { url = "https://files.pythonhosted.org/packages/0f/82/09d11fb6d12f17993ffaf32ffd30c3c121a11e2966e84f19fb6f66430118/fastar-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf36652fa71b83761717c9899b98732498f8a2cb6327ff16bbf07f6be85c3437", size = 757012, upload-time = "2026-04-13T17:08:14.186Z" }, + { url = "https://files.pythonhosted.org/packages/52/1f/5aeeacc4cb65615e2c9292cd9c5b0cd6fb6d2e6ee472ca6adc6c1b1b22ef/fastar-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f68ff8c17833053da4841720e95edde80ce45bb994b6b7d51418dddaac70ee47", size = 924510, upload-time = "2026-04-13T17:08:28.741Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1a/1e5bdabbeaf2e856928956292609f2ff6a650f94480fb8afaca30229e483/fastar-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4563ed37a12ea1cdc398af8571258d24b988bf342b7b3bf5451bd5891243280c", size = 816602, upload-time = "2026-04-13T17:08:59.461Z" }, + { url = "https://files.pythonhosted.org/packages/87/24/f960147910da3bed41a3adfcb026e17d5f50f4cf467a3324237a7088f61a/fastar-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cee63c9875cba3b70dc44338c560facc5d6e763047dcc4a30501f9a68cf5f890", size = 819452, upload-time = "2026-04-13T17:09:29.926Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f4/3e77d7901d5707fd7f8a352e153c8ae09ea974e6fabad0b7c4eb9944b8d4/fastar-0.11.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:bd76bfffae6d0a91f4ac4a612f721e7aec108db97dccdd120ae063cd66959f27", size = 885254, upload-time = "2026-04-13T17:08:44.285Z" }, + { url = "https://files.pythonhosted.org/packages/47/01/1585edd5ec47782ae93cd94edf05828e0ab02ef00aec00aea4194a600464/fastar-0.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f5b707501ec01c1bc0518f741f01d322e50c9adc19a451aa24f67a2316e9397", size = 971496, upload-time = "2026-04-13T17:10:17.024Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e9/6874c9d1236ded565a0bed54b320ac9f165f287b1d89490fb70f9f323c81/fastar-0.11.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:37c0b5a88a657839aad98b0a6c9e4ac4c2c15d6b49c44ee3935c6b08e9d3e479", size = 1034685, upload-time = "2026-04-13T17:10:34.063Z" }, + { url = "https://files.pythonhosted.org/packages/14/d8/4ab20613ce2983427aee958e39be878dba874aa227c530a845e32429c4f6/fastar-0.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6c55f536c62a6efb180c1af0d5182948bff576bbfe6276e8e1359c9c7d2215d8", size = 1072675, upload-time = "2026-04-13T17:10:50.53Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ae/5ac3b7c20ce4b08f011dd2b979f96caabe64f9b10b157f211ea91bdfadca/fastar-0.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3082eeca59e189b9039335862f4c2780c0c8871d656bfdf559db4414a105b251", size = 1029330, upload-time = "2026-04-13T17:11:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e7/37cd6a1d4e288292170b64e19d79ecce2a7de8bb76790323399a2abc4619/fastar-0.11.0-cp314-cp314-win32.whl", hash = "sha256:b201a0a4e29f9fec2a177e13154b8725ec65ab9f83bd6415483efaa2aa18344b", size = 453940, upload-time = "2026-04-13T17:11:48.713Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1c/795c878b1ee29d79021cf8ed81f18f2b25ccde58453b0d34b9bdc7e025ea/fastar-0.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:868fddb26072a43e870a8819134b9f80ee602931be5a76e6fb873e04da343637", size = 486334, upload-time = "2026-04-13T17:11:34.882Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a4/113f104301df8bddcc0b3775b611a30cb7610baa3add933c7ccac9386467/fastar-0.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:3db39c9cc42abb0c780a26b299f24dfbc8be455985e969e15336d70d7b2f833b", size = 461534, upload-time = "2026-04-13T17:11:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a6/5c5f2c2c8e0c63e56a5636ebc7721589c889e94c0092cec7eb28ae7207e6/fastar-0.11.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:49c3299dec5e125e7ebaa27545714da9c7391777366015427e0ae62d548b442b", size = 707156, upload-time = "2026-04-13T17:10:02.176Z" }, + { url = "https://files.pythonhosted.org/packages/df/f7/982c01b61f0fc135ad2b16d01e6d0ee53cf8791e68827f5f7c5a65b2e5b1/fastar-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3328ed1ed56d31f5198350b17dd60449b8d6b9d47abb4688bab6aef4450a165b", size = 627032, upload-time = "2026-04-13T17:09:46.978Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c3/38f1dac77ae0c71c37b176277c96d830796b8ce2fe69705f917829b53829/fastar-0.11.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bd3eca3bbfec84a614bcb4143b4ad4f784d0895babc26cfc88436af88ca23c7a", size = 864403, upload-time = "2026-04-13T17:09:16.58Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f0/e69c363bdb3e5a5848e937b662b5469581ee6682c51bc1c0556494773929/fastar-0.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff86a967acb0d621dd24063dda090daa67bf4993b9570e97fe156de88a9006ca", size = 759480, upload-time = "2026-04-13T17:08:00.599Z" }, + { url = "https://files.pythonhosted.org/packages/3b/29/4d8737590c2a6357d614d7cc7288e8f68e7e449680b8922997cc4349e65e/fastar-0.11.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:86eaf7c0e985d93a7734168be2fb232b2a8cca53e41431c2782d7c12b12c03b1", size = 756219, upload-time = "2026-04-13T17:08:15.699Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ec/400de7b3b7d48801908f19cf5462177104395799472671b3e8152b2b04ca/fastar-0.11.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91f07b0b8eb67e2f177733a1f884edad7dfb9f8977ffef15927b20cb9604027d", size = 923669, upload-time = "2026-04-13T17:08:30.574Z" }, + { url = "https://files.pythonhosted.org/packages/5d/01/8926c53da923fed7ab4b96e7fbf7f73b663beb4f02095b654d6fab46f9ad/fastar-0.11.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f85c896885eb4abf1a635d54dea22cac6ae48d04fc2ea26ae652fcf1febe1220", size = 815729, upload-time = "2026-04-13T17:09:01.204Z" }, + { url = "https://files.pythonhosted.org/packages/89/f0/5fef4c7946e352651b504b1a4235dac3505e7cfd24020788ab50552e84bf/fastar-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:075c07095c8de4b774ba8f28b9c0a02b1a2cd254da50cbe464dd3bb2432e9158", size = 819812, upload-time = "2026-04-13T17:09:31.907Z" }, + { url = "https://files.pythonhosted.org/packages/b3/c8/0ebc3298b4a45e7bddc50b169ae6a6f5b80c939394d4befe6e60de535ee7/fastar-0.11.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:07f028933820c65750baf3383b807ecce1cd9385cf00ce192b79d263ad6b856c", size = 884074, upload-time = "2026-04-13T17:08:45.802Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9f/7baa4cdff8d6fbca41fa5c764b48a941fed8a9ec6c4cc92de65895a28299/fastar-0.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:039f875efa0f01fa43c20bf4e2fc7305489c61d0ac76eda991acfba7820a0e63", size = 969450, upload-time = "2026-04-13T17:10:18.667Z" }, + { url = "https://files.pythonhosted.org/packages/d4/dc/1ebbfb58a47056ba866494f19efbcdd2ba2897096b94f36e796594b4d05b/fastar-0.11.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:fff12452a9a5c6814a012445f26365541cc3d99dcca61f09762e6a389f7a32ea", size = 1033775, upload-time = "2026-04-13T17:10:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/c2/5f/ce4e3914066f08c99eb8c32952cc07c1a013e81b1db1b0f598130bf6b974/fastar-0.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:2bf733e09f942b6fa876efe30a90508d1f4caef5630c00fb2a84fba355873712", size = 1072158, upload-time = "2026-04-13T17:10:52.497Z" }, + { url = "https://files.pythonhosted.org/packages/03/2a/6bca72992c84151c387cc6558f3867f5ebe5fb3684ee6fa9b76280ba4b8e/fastar-0.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d1531fa848fdd3677d2dce0a4b436ea64d9ae38fb8babe2ddbc180dd153cb7a3", size = 1028577, upload-time = "2026-04-13T17:11:09.934Z" }, + { url = "https://files.pythonhosted.org/packages/83/18/7a7c15657a3da5569b26fc51cde6a80f8d84cb54b3b1aea6d74a103db4ad/fastar-0.11.0-cp314-cp314t-win32.whl", hash = "sha256:5744551bc67c6fc6581cbd0e34a0fd6e2cd0bd30b43e94b1c3119cf35064b162", size = 453601, upload-time = "2026-04-13T17:11:53.726Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d8/331b59a6de279f3ad75c10c02c40a12f21d64a437d9c3d6f1af2dcbd7a76/fastar-0.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f4ce44e3b56c47cf38244b98d29f269b259740a580c47a2552efa5b96a5458fb", size = 486436, upload-time = "2026-04-13T17:11:40.089Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fd/5390ec4f49100f3ecb9968a392f9e6d039f1e3fe0ecd28443716ff01e589/fastar-0.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:76c1359314355eafbc6989f20fb1ad565a3d10200117923b9da765a17e2f6f11", size = 461049, upload-time = "2026-04-13T17:11:25.918Z" }, +] + +[[package]] +name = "filelock" +version = "3.24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/a8/dae62680be63cbb3ff87cfa2f51cf766269514ea5488479d42fec5aa6f3a/filelock-3.24.2.tar.gz", hash = "sha256:c22803117490f156e59fafce621f0550a7a853e2bbf4f87f112b11d469b6c81b", size = 37601, upload-time = "2026-02-16T02:50:45.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/04/a94ebfb4eaaa08db56725a40de2887e95de4e8641b9e902c311bfa00aa39/filelock-3.24.2-py3-none-any.whl", hash = "sha256:667d7dc0b7d1e1064dd5f8f8e80bdac157a6482e8d2e02cd16fd3b6b33bd6556", size = 24152, upload-time = "2026-02-16T02:50:44Z" }, +] + +[[package]] +name = "free-claude-code" +version = "2.0.0" +source = { editable = "." } +dependencies = [ + { name = "aiohttp" }, + { name = "asyncpg" }, + { name = "discord-py" }, + { name = "fastapi", extra = ["standard"] }, + { name = "httpx", extra = ["http2", "socks"] }, + { name = "loguru" }, + { name = "markdown-it-py" }, + { name = "openai" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-dotenv" }, + { name = "python-telegram-bot" }, + { name = "redis" }, + { name = "sqlalchemy" }, + { name = "tiktoken" }, + { name = "uvicorn" }, +] + +[package.optional-dependencies] +voice = [ + { name = "grpcio" }, + { name = "grpcio-tools" }, + { name = "nvidia-riva-client" }, +] +voice-local = [ + { name = "accelerate" }, + { name = "librosa" }, + { name = "torch" }, + { name = "transformers" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-xdist" }, + { name = "ruff" }, + { name = "ty" }, +] + +[package.metadata] +requires-dist = [ + { name = "accelerate", marker = "extra == 'voice-local'", specifier = ">=1.13.0" }, + { name = "aiohttp", specifier = ">=3.13.4" }, + { name = "asyncpg", specifier = ">=0.31.0" }, + { name = "discord-py", specifier = ">=2.7.1" }, + { name = "fastapi", extras = ["standard"], specifier = ">=0.136.1" }, + { name = "grpcio", marker = "extra == 'voice'", specifier = ">=1.80.0" }, + { name = "grpcio-tools", marker = "extra == 'voice'", specifier = ">=1.80.0" }, + { name = "httpx", extras = ["http2", "socks"], specifier = ">=0.28.1" }, + { name = "librosa", marker = "extra == 'voice-local'", specifier = ">=0.10.0" }, + { name = "loguru", specifier = ">=0.7.0" }, + { name = "markdown-it-py", specifier = ">=3.0.0" }, + { name = "nvidia-riva-client", marker = "extra == 'voice'", specifier = ">=2.25.1" }, + { name = "openai", specifier = ">=2.32.0" }, + { name = "pydantic", specifier = ">=2.13.3" }, + { name = "pydantic-settings", specifier = ">=2.14.0" }, + { name = "python-dotenv", specifier = ">=1.2.2" }, + { name = "python-telegram-bot", specifier = ">=22.7" }, + { name = "redis", specifier = ">=7.4.0" }, + { name = "sqlalchemy", specifier = ">=2.0.49" }, + { name = "tiktoken", specifier = ">=0.7.0" }, + { name = "torch", marker = "extra == 'voice-local'", specifier = ">=2.11.0", index = "https://download.pytorch.org/whl/cu130" }, + { name = "transformers", marker = "extra == 'voice-local'", specifier = ">=5.6.2" }, + { name = "uvicorn", specifier = ">=0.46.0" }, +] +provides-extras = ["voice", "voice-local"] + +[package.metadata.requires-dev] +dev = [ + { name = "pytest", specifier = ">=9.0.3" }, + { name = "pytest-asyncio", specifier = ">=1.3.0" }, + { name = "pytest-cov", specifier = ">=7.1.0" }, + { name = "pytest-xdist", specifier = ">=3.8.0" }, + { name = "ruff", specifier = ">=0.15.12" }, + { name = "ty", specifier = ">=0.0.32" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + +[[package]] +name = "fsspec" +version = "2026.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/7c/f60c259dcbf4f0c47cc4ddb8f7720d2dcdc8888c8e5ad84c73ea4531cc5b/fsspec-2026.2.0.tar.gz", hash = "sha256:6544e34b16869f5aacd5b90bdf1a71acb37792ea3ddf6125ee69a22a53fb8bff", size = 313441, upload-time = "2026-02-05T21:50:53.743Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437", size = 202505, upload-time = "2026-02-05T21:50:51.819Z" }, +] + +[[package]] +name = "greenlet" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/3f/dbf99fb14bfeb88c28f16729215478c0e265cacd6dc22270c8f31bb6892f/greenlet-3.5.0.tar.gz", hash = "sha256:d419647372241bc68e957bf38d5c1f98852155e4146bd1e4121adea81f4f01e4", size = 196995, upload-time = "2026-04-27T13:37:15.544Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/32/f2ce6d4cac3e55bc6173f92dbe627e782e1850f89d986c3606feb63aafa7/greenlet-3.5.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:db2910d3c809444e0a20147361f343fe2798e106af8d9d8506f5305302655a9f", size = 286228, upload-time = "2026-04-27T12:20:34.421Z" }, + { url = "https://files.pythonhosted.org/packages/b7/aa/caed9e5adf742315fc7be2a84196373aab4816e540e38ba0d76cb7584d68/greenlet-3.5.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ec9ea74e7268ace7f9aab1b1a4e730193fc661b39a993cd91c606c32d4a3628", size = 601775, upload-time = "2026-04-27T12:52:41.045Z" }, + { url = "https://files.pythonhosted.org/packages/c7/af/90ae08497400a941595d12774447f752d3dfe0fbb012e35b76bc5c0ff37e/greenlet-3.5.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54d243512da35485fc7a6bf3c178fdda6327a9d6506fcdd62b1abd1e41b2927b", size = 614436, upload-time = "2026-04-27T12:59:41.595Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e0/2e13df68f367e2f9960616927d60857dd7e56aaadd59a47c644216b2f920/greenlet-3.5.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d280a7f5c331622c69f97eb167f33577ff2d1df282c41cd15907fc0a3ca198c", size = 611388, upload-time = "2026-04-27T12:25:28.008Z" }, + { url = "https://files.pythonhosted.org/packages/82/f7/393c64055132ac0d488ef6be549253b7e6274194863967ddc0bc8f5b87b8/greenlet-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1eb67d5adefb5bd2e182d42678a328979a209e4e82eb93575708185d31d1f588", size = 1570768, upload-time = "2026-04-27T12:53:28.099Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4b/eaf7735253522cf56d1b74d672a58f54fc114702ceaf05def59aae72f6e1/greenlet-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2628d6c86f6cb0cb45e0c3c54058bbec559f57eaae699447748cb3928150577e", size = 1635983, upload-time = "2026-04-27T12:25:26.903Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fe/4fb3a0805bd5165da5ebf858da7cc01cce8061674106d2cf5bdab32cbfde/greenlet-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4d9f0624c775f2dfc56ba54d515a8c771044346852a918b405914f6b19d7fd8", size = 238840, upload-time = "2026-04-27T12:23:54.806Z" }, + { url = "https://files.pythonhosted.org/packages/cb/cb/baa584cb00532126ffe12d9787db0a60c5a4f55c27bfe2666df5d4c30a32/greenlet-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:83ed9f27f1680b50e89f40f6df348a290ea234b249a4003d366663a12eab94f2", size = 235615, upload-time = "2026-04-27T12:21:38.57Z" }, + { url = "https://files.pythonhosted.org/packages/0c/58/fc576f99037ce19c5aa16628e4c3226b6d1419f72a62c79f5f40576e6eb3/greenlet-3.5.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5a5ed18de6a0f6cc7087f1563f6bd93fc7df1c19165ca01e9bde5a5dc281d106", size = 285066, upload-time = "2026-04-27T12:23:05.033Z" }, + { url = "https://files.pythonhosted.org/packages/4a/ba/b28ddbe6bfad6a8ac196ef0e8cff37bc65b79735995b9e410923fffeeb70/greenlet-3.5.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a717fbc46d8a354fa675f7c1e813485b6ba3885f9bef0cd56e5ba27d758ff5b", size = 604414, upload-time = "2026-04-27T12:52:42.358Z" }, + { url = "https://files.pythonhosted.org/packages/09/06/4b69f8f0b67603a8be2790e55107a190b376f2627fe0eaf5695d85ffb3cd/greenlet-3.5.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ddc090c5c1792b10246a78e8c2163ebbe04cf877f9d785c230a7b27b39ad038e", size = 617349, upload-time = "2026-04-27T12:59:43.32Z" }, + { url = "https://files.pythonhosted.org/packages/8a/17/a3918541fd0ddefe024a69de6d16aa7b46d36ac19562adaa63c7fa180eff/greenlet-3.5.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2094acd54b272cb6eae8c03dd87b3fa1820a4cef18d6889c378d503500a1dc13", size = 613927, upload-time = "2026-04-27T12:25:30.28Z" }, + { url = "https://files.pythonhosted.org/packages/ee/e1/bd0af6213c7dd33175d8a462d4c1fe1175124ebed4855bc1475a5b5242c2/greenlet-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5e05ba267789ea87b5a155cf0e810b1ab88bf18e9e8740813945ceb8ee4350ba", size = 1570893, upload-time = "2026-04-27T12:53:29.483Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2a/0789702f864f5382cb476b93d7a9c823c10472658102ccd65f415747d2e2/greenlet-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0ecec963079cd58cbd14723582384f11f166fd58883c15dcbfb342e0bc9b5846", size = 1636060, upload-time = "2026-04-27T12:25:28.845Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8f/22bf9df92bbff0eb07842b60f7e63bf7675a9742df628437a9f02d09137f/greenlet-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:728d9667d8f2f586644b748dbd9bb67e50d6a9381767d1357714ea6825bb3bf5", size = 238740, upload-time = "2026-04-27T12:24:01.341Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b7/9c5c3d653bd4ff614277c049ac676422e2c557db47b4fe43e6313fc005dc/greenlet-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:47422135b1d308c14b2c6e758beedb1acd33bb91679f5670edf77bf46244722b", size = 235525, upload-time = "2026-04-27T12:23:12.308Z" }, + { url = "https://files.pythonhosted.org/packages/94/5e/a70f31e3e8d961c4ce589c15b28e4225d63704e431a23932a3808cbcc867/greenlet-3.5.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:f35807464c4c58c55f0d31dfa83c541a5615d825c2fe3d2b95360cf7c4e3c0a8", size = 285564, upload-time = "2026-04-27T12:23:08.555Z" }, + { url = "https://files.pythonhosted.org/packages/af/a6/046c0a28e21833e4086918218cfb3d8bed51c075a1b700f20b9d7861c0f4/greenlet-3.5.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55fa7ea52771be44af0de27d8b80c02cd18c2c3cddde6c847ecebdf72418b6a1", size = 651166, upload-time = "2026-04-27T12:52:43.644Z" }, + { url = "https://files.pythonhosted.org/packages/47/f8/4af27f71c5ff32a7fbc516adb46370d9c4ae2bc7bd3dc7d066ac542b4b15/greenlet-3.5.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a97e4821aa710603f94de0da25f25096454d78ffdace5dc77f3a006bc01abba3", size = 663792, upload-time = "2026-04-27T12:59:44.93Z" }, + { url = "https://files.pythonhosted.org/packages/a3/59/1bd6d7428d6ed9106efbb8c52310c60fd04f6672490f452aeaa3829aa436/greenlet-3.5.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f52a464e4ed91780bdfbbdd2b97197f3accaa629b98c200f4dffada759f3ae7", size = 660933, upload-time = "2026-04-27T12:25:33.276Z" }, + { url = "https://files.pythonhosted.org/packages/83/e4/b903e5a5fae1e8a28cdd32a0cfbfd560b668c25b692f67768822ddc5f40f/greenlet-3.5.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:762612baf1161ccb8437c0161c668a688223cba28e1bf038f4eb47b13e39ccdf", size = 1618401, upload-time = "2026-04-27T12:53:31.062Z" }, + { url = "https://files.pythonhosted.org/packages/0e/e3/5ec408a329acb854fb607a122e1ee5fb3ff649f9a97952948a90803c0d8e/greenlet-3.5.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:57a43c6079a89713522bc4bcb9f75070ecf5d3dbad7792bfe42239362cbf2a16", size = 1682038, upload-time = "2026-04-27T12:25:31.838Z" }, + { url = "https://files.pythonhosted.org/packages/91/20/6b165108058767ee643c55c5c4904d591a830ee2b3c7dbd359828fbc829f/greenlet-3.5.0-cp314-cp314-win_amd64.whl", hash = "sha256:3bc59be3945ae9750b9e7d45067d01ae3fe90ea5f9ade99239dabdd6e28a5033", size = 239835, upload-time = "2026-04-27T12:24:54.136Z" }, + { url = "https://files.pythonhosted.org/packages/4e/62/1c498375cee177b55d980c1db319f26470e5309e54698c8f8fc06c0fd539/greenlet-3.5.0-cp314-cp314-win_arm64.whl", hash = "sha256:a96fcee45e03fe30a62669fd16ab5c9d3c172660d3085605cb1e2d1280d3c988", size = 236862, upload-time = "2026-04-27T12:23:24.957Z" }, + { url = "https://files.pythonhosted.org/packages/78/a8/4522939255bb5409af4e87132f915446bf3622c2c292d14d3c38d128ae82/greenlet-3.5.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:a10a732421ab4fec934783ce3e54763470d0181db6e3468f9103a275c3ed1853", size = 293614, upload-time = "2026-04-27T12:24:12.874Z" }, + { url = "https://files.pythonhosted.org/packages/15/5e/8744c52e2c027b5a8772a01561934c8835f869733e101f62075c60430340/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fc391b1566f2907d17aaebe78f8855dc45675159a775fcf9e61f8ee0078e87f", size = 650723, upload-time = "2026-04-27T12:52:45.412Z" }, + { url = "https://files.pythonhosted.org/packages/00/ef/7b4c39c03cf46ceca512c5d3f914afd85aa30b2cc9a93015b0dd73e4be6c/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:680bd0e7ad5e8daa8a4aa89f68fd6adc834b8a8036dc256533f7e08f4a4b01f7", size = 656529, upload-time = "2026-04-27T12:59:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b5/c7768f352f5c010f92064d0063f987e7dc0cd290a6d92a34109015ce4aa1/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddb36c7d6c9c0a65f18c7258634e0c416c6ab59caac8c987b96f80c2ebda0112", size = 654364, upload-time = "2026-04-27T12:25:35.64Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d0/079ebe12e4b1fc758857ce5be1a5e73f06870f2101e52611d1e71925ce54/greenlet-3.5.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e5ddf316ced87539144621453c3aef229575825fe60c604e62bedc4003f372b2", size = 1614204, upload-time = "2026-04-27T12:53:32.618Z" }, + { url = "https://files.pythonhosted.org/packages/6d/89/6c2fb63df3596552d20e58fb4d96669243388cf680cff222758812c7bfaa/greenlet-3.5.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4a448128607be0de65342dc9b31be7f948ef4cc0bc8832069350abefd310a8f2", size = 1675480, upload-time = "2026-04-27T12:25:34.168Z" }, + { url = "https://files.pythonhosted.org/packages/15/32/77ee8a6c1564fc345a491a4e85b3bf360e4cf26eac98c4532d2fdb96e01f/greenlet-3.5.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d60097128cb0a1cab9ea541186ea13cd7b847b8449a7787c2e2350da0cb82d86", size = 245324, upload-time = "2026-04-27T12:24:40.295Z" }, +] + +[[package]] +name = "grpcio" +version = "1.80.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b7/48/af6173dbca4454f4637a4678b67f52ca7e0c1ed7d5894d89d434fecede05/grpcio-1.80.0.tar.gz", hash = "sha256:29aca15edd0688c22ba01d7cc01cb000d72b2033f4a3c72a81a19b56fd143257", size = 12978905, upload-time = "2026-03-30T08:49:10.502Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/e8/a2b749265eb3415abc94f2e619bbd9e9707bebdda787e61c593004ec927a/grpcio-1.80.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:c624cc9f1008361014378c9d776de7182b11fe8b2e5a81bc69f23a295f2a1ad0", size = 6015616, upload-time = "2026-03-30T08:47:13.428Z" }, + { url = "https://files.pythonhosted.org/packages/3e/97/b1282161a15d699d1e90c360df18d19165a045ce1c343c7f313f5e8a0b77/grpcio-1.80.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:f49eddcac43c3bf350c0385366a58f36bed8cc2c0ec35ef7b74b49e56552c0c2", size = 12014204, upload-time = "2026-03-30T08:47:15.873Z" }, + { url = "https://files.pythonhosted.org/packages/6e/5e/d319c6e997b50c155ac5a8cb12f5173d5b42677510e886d250d50264949d/grpcio-1.80.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d334591df610ab94714048e0d5b4f3dd5ad1bee74dfec11eee344220077a79de", size = 6563866, upload-time = "2026-03-30T08:47:18.588Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f6/fdd975a2cb4d78eb67769a7b3b3830970bfa2e919f1decf724ae4445f42c/grpcio-1.80.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0cb517eb1d0d0aaf1d87af7cc5b801d686557c1d88b2619f5e31fab3c2315921", size = 7273060, upload-time = "2026-03-30T08:47:21.113Z" }, + { url = "https://files.pythonhosted.org/packages/db/f0/a3deb5feba60d9538a962913e37bd2e69a195f1c3376a3dd44fe0427e996/grpcio-1.80.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4e78c4ac0d97dc2e569b2f4bcbbb447491167cb358d1a389fc4af71ab6f70411", size = 6782121, upload-time = "2026-03-30T08:47:23.827Z" }, + { url = "https://files.pythonhosted.org/packages/ca/84/36c6dcfddc093e108141f757c407902a05085e0c328007cb090d56646cdf/grpcio-1.80.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2ed770b4c06984f3b47eb0517b1c69ad0b84ef3f40128f51448433be904634cd", size = 7383811, upload-time = "2026-03-30T08:47:26.517Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ef/f3a77e3dc5b471a0ec86c564c98d6adfa3510d38f8ee99010410858d591e/grpcio-1.80.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:256507e2f524092f1473071a05e65a5b10d84b82e3ff24c5b571513cfaa61e2f", size = 8393860, upload-time = "2026-03-30T08:47:29.439Z" }, + { url = "https://files.pythonhosted.org/packages/9b/8d/9d4d27ed7f33d109c50d6b5ce578a9914aa68edab75d65869a17e630a8d1/grpcio-1.80.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a6284a5d907c37db53350645567c522be314bac859a64a7a5ca63b77bb7958f", size = 7830132, upload-time = "2026-03-30T08:47:33.254Z" }, + { url = "https://files.pythonhosted.org/packages/14/e4/9990b41c6d7a44e1e9dee8ac11d7a9802ba1378b40d77468a7761d1ad288/grpcio-1.80.0-cp312-cp312-win32.whl", hash = "sha256:c71309cfce2f22be26aa4a847357c502db6c621f1a49825ae98aa0907595b193", size = 4140904, upload-time = "2026-03-30T08:47:35.319Z" }, + { url = "https://files.pythonhosted.org/packages/2f/2c/296f6138caca1f4b92a31ace4ae1b87dab692fc16a7a3417af3bb3c805bf/grpcio-1.80.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe648599c0e37594c4809d81a9e77bd138cc82eb8baa71b6a86af65426723ff", size = 4880944, upload-time = "2026-03-30T08:47:37.831Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/7c3c25789e3f069e581dc342e03613c5b1cb012c4e8c7d9d5cf960a75856/grpcio-1.80.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e9e408fc016dffd20661f0126c53d8a31c2821b5c13c5d67a0f5ed5de93319ad", size = 6017243, upload-time = "2026-03-30T08:47:40.075Z" }, + { url = "https://files.pythonhosted.org/packages/04/19/21a9806eb8240e174fd1ab0cd5b9aa948bb0e05c2f2f55f9d5d7405e6d08/grpcio-1.80.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:92d787312e613754d4d8b9ca6d3297e69994a7912a32fa38c4c4e01c272974b0", size = 12010840, upload-time = "2026-03-30T08:47:43.11Z" }, + { url = "https://files.pythonhosted.org/packages/18/3a/23347d35f76f639e807fb7a36fad3068aed100996849a33809591f26eca6/grpcio-1.80.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac393b58aa16991a2f1144ec578084d544038c12242da3a215966b512904d0f", size = 6567644, upload-time = "2026-03-30T08:47:46.806Z" }, + { url = "https://files.pythonhosted.org/packages/ff/40/96e07ecb604a6a67ae6ab151e3e35b132875d98bc68ec65f3e5ab3e781d7/grpcio-1.80.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:68e5851ac4b9afe07e7f84483803ad167852570d65326b34d54ca560bfa53fb6", size = 7277830, upload-time = "2026-03-30T08:47:49.643Z" }, + { url = "https://files.pythonhosted.org/packages/9b/e2/da1506ecea1f34a5e365964644b35edef53803052b763ca214ba3870c856/grpcio-1.80.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:873ff5d17d68992ef6605330127425d2fc4e77e612fa3c3e0ed4e668685e3140", size = 6783216, upload-time = "2026-03-30T08:47:52.817Z" }, + { url = "https://files.pythonhosted.org/packages/44/83/3b20ff58d0c3b7f6caaa3af9a4174d4023701df40a3f39f7f1c8e7c48f9d/grpcio-1.80.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2bea16af2750fd0a899bf1abd9022244418b55d1f37da2202249ba4ba673838d", size = 7385866, upload-time = "2026-03-30T08:47:55.687Z" }, + { url = "https://files.pythonhosted.org/packages/47/45/55c507599c5520416de5eefecc927d6a0d7af55e91cfffb2e410607e5744/grpcio-1.80.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba0db34f7e1d803a878284cd70e4c63cb6ae2510ba51937bf8f45ba997cefcf7", size = 8391602, upload-time = "2026-03-30T08:47:58.303Z" }, + { url = "https://files.pythonhosted.org/packages/10/bb/dd06f4c24c01db9cf11341b547d0a016b2c90ed7dbbb086a5710df7dd1d7/grpcio-1.80.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8eb613f02d34721f1acf3626dfdb3545bd3c8505b0e52bf8b5710a28d02e8aa7", size = 7826752, upload-time = "2026-03-30T08:48:01.311Z" }, + { url = "https://files.pythonhosted.org/packages/f9/1e/9d67992ba23371fd63d4527096eb8c6b76d74d52b500df992a3343fd7251/grpcio-1.80.0-cp313-cp313-win32.whl", hash = "sha256:93b6f823810720912fd131f561f91f5fed0fda372b6b7028a2681b8194d5d294", size = 4142310, upload-time = "2026-03-30T08:48:04.594Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e6/283326a27da9e2c3038bc93eeea36fb118ce0b2d03922a9cda6688f53c5b/grpcio-1.80.0-cp313-cp313-win_amd64.whl", hash = "sha256:e172cf795a3ba5246d3529e4d34c53db70e888fa582a8ffebd2e6e48bc0cba50", size = 4882833, upload-time = "2026-03-30T08:48:07.363Z" }, + { url = "https://files.pythonhosted.org/packages/c5/6d/e65307ce20f5a09244ba9e9d8476e99fb039de7154f37fb85f26978b59c3/grpcio-1.80.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:3d4147a97c8344d065d01bbf8b6acec2cf86fb0400d40696c8bdad34a64ffc0e", size = 6017376, upload-time = "2026-03-30T08:48:10.005Z" }, + { url = "https://files.pythonhosted.org/packages/69/10/9cef5d9650c72625a699c549940f0abb3c4bfdb5ed45a5ce431f92f31806/grpcio-1.80.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d8e11f167935b3eb089ac9038e1a063e6d7dbe995c0bb4a661e614583352e76f", size = 12018133, upload-time = "2026-03-30T08:48:12.927Z" }, + { url = "https://files.pythonhosted.org/packages/04/82/983aabaad82ba26113caceeb9091706a0696b25da004fe3defb5b346e15b/grpcio-1.80.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f14b618fc30de822681ee986cfdcc2d9327229dc4c98aed16896761cacd468b9", size = 6574748, upload-time = "2026-03-30T08:48:16.386Z" }, + { url = "https://files.pythonhosted.org/packages/07/d7/031666ef155aa0bf399ed7e19439656c38bbd143779ae0861b038ce82abd/grpcio-1.80.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4ed39fbdcf9b87370f6e8df4e39ca7b38b3e5e9d1b0013c7b6be9639d6578d14", size = 7277711, upload-time = "2026-03-30T08:48:19.627Z" }, + { url = "https://files.pythonhosted.org/packages/e8/43/f437a78f7f4f1d311804189e8f11fb311a01049b2e08557c1068d470cb2e/grpcio-1.80.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2dcc70e9f0ba987526e8e8603a610fb4f460e42899e74e7a518bf3c68fe1bf05", size = 6785372, upload-time = "2026-03-30T08:48:22.373Z" }, + { url = "https://files.pythonhosted.org/packages/93/3d/f6558e9c6296cb4227faa5c43c54a34c68d32654b829f53288313d16a86e/grpcio-1.80.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:448c884b668b868562b1bda833c5fce6272d26e1926ec46747cda05741d302c1", size = 7395268, upload-time = "2026-03-30T08:48:25.638Z" }, + { url = "https://files.pythonhosted.org/packages/06/21/0fdd77e84720b08843c371a2efa6f2e19dbebf56adc72df73d891f5506f0/grpcio-1.80.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a1dc80fe55685b4a543555e6eef975303b36c8db1023b1599b094b92aa77965f", size = 8392000, upload-time = "2026-03-30T08:48:28.974Z" }, + { url = "https://files.pythonhosted.org/packages/f5/68/67f4947ed55d2e69f2cc199ab9fd85e0a0034d813bbeef84df6d2ba4d4b7/grpcio-1.80.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:31b9ac4ad1aa28ffee5503821fafd09e4da0a261ce1c1281c6c8da0423c83b6e", size = 7828477, upload-time = "2026-03-30T08:48:32.054Z" }, + { url = "https://files.pythonhosted.org/packages/44/b6/8d4096691b2e385e8271911a0de4f35f0a6c7d05aff7098e296c3de86939/grpcio-1.80.0-cp314-cp314-win32.whl", hash = "sha256:367ce30ba67d05e0592470428f0ec1c31714cab9ef19b8f2e37be1f4c7d32fae", size = 4218563, upload-time = "2026-03-30T08:48:34.538Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8c/bbe6baf2557262834f2070cf668515fa308b2d38a4bbf771f8f7872a7036/grpcio-1.80.0-cp314-cp314-win_amd64.whl", hash = "sha256:3b01e1f5464c583d2f567b2e46ff0d516ef979978f72091fd81f5ab7fa6e2e7f", size = 5019457, upload-time = "2026-03-30T08:48:37.308Z" }, +] + +[[package]] +name = "grpcio-tools" +version = "1.80.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/c8/1223f29c84a143ae9a56c084fc96894de0ba84b6e8d60a26241abd81d278/grpcio_tools-1.80.0.tar.gz", hash = "sha256:26052b19c6ce0dcf52d1024496aea3e2bdfa864159f06dc7b97b22d041a94b26", size = 6133212, upload-time = "2026-03-30T08:52:39.077Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/b9/65929df8c9614792db900a8e45d4997fadbd1734c827da3f0eb1f2fe4866/grpcio_tools-1.80.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:d19d5a8244311947b96f749c417b32d144641c6953f1164824579e1f0a51d040", size = 2550856, upload-time = "2026-03-30T08:50:57.3Z" }, + { url = "https://files.pythonhosted.org/packages/28/17/af1557544d68d1aeca9d9ea53ed16524022d521fec6ba334ab3530e9c1a6/grpcio_tools-1.80.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:fb599a3dc89ed1bb24489a2724b2f6dd4cddbbf0f7bdd69c073477bab0dc7554", size = 5710883, upload-time = "2026-03-30T08:51:00.077Z" }, + { url = "https://files.pythonhosted.org/packages/cc/48/aa9b4f7519ca972bc40d315d5c28f05ca28fa08de13d4e8b69f551b798ab/grpcio_tools-1.80.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:623ee31fc2ff7df9a987b4f3d139c30af17ce46a861ae0e25fb8c112daa32dd8", size = 2598004, upload-time = "2026-03-30T08:51:02.102Z" }, + { url = "https://files.pythonhosted.org/packages/b4/b8/b01371c119924b3beca1fe3f047b1bc2cdc66b3d37f0f3acc9d10c567a43/grpcio_tools-1.80.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b46570a68378539ee2b75a5a43202561f8d753c832798b1047099e3c551cf5d6", size = 2909568, upload-time = "2026-03-30T08:51:04.159Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7c/1108f7bdb58475a7e701ec89b55eb494538b6e76acd211ba0d4cc5fd28e8/grpcio_tools-1.80.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:51caf99c28999e7e0f97e9cea190c1405b7681a57bb2e0631205accd92b43fa4", size = 2660938, upload-time = "2026-03-30T08:51:06.126Z" }, + { url = "https://files.pythonhosted.org/packages/67/59/d1c0063d4cd3b85363c7044ff3e5159d6d5df96e2692a9a5312d9c8cb290/grpcio_tools-1.80.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cdaa1c9aa8d3a87891a96700cadd29beec214711d6522818d207277f6452567c", size = 3113814, upload-time = "2026-03-30T08:51:08.834Z" }, + { url = "https://files.pythonhosted.org/packages/76/21/18d34a4efe524c903cf66b0cfa5260d81f277b6ae668b647edf795df9ce5/grpcio_tools-1.80.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3399b5fd7b59bcffd59c6b9975a969d9f37a3c87f3e3d63c3a09c147907acb0d", size = 3662793, upload-time = "2026-03-30T08:51:11.094Z" }, + { url = "https://files.pythonhosted.org/packages/f3/40/cf2d9295a6bd593244ea703858f8fc2efd315046ca3ef7c6f9ebc5b810fa/grpcio_tools-1.80.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9c6abc08d3485b2aac99bb58afcd31dc6cd4316ce36cf263ff09cb6df15f287f", size = 3329149, upload-time = "2026-03-30T08:51:13.066Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1d/fc34b32167966df20d69429b71dfca83c48434b047a5ac4fd6cd91ca4eed/grpcio_tools-1.80.0-cp312-cp312-win32.whl", hash = "sha256:18c51e07652ac7386fcdbd11866f8d55a795de073337c12447b5805575339f74", size = 997519, upload-time = "2026-03-30T08:51:14.87Z" }, + { url = "https://files.pythonhosted.org/packages/91/98/6d6563cdf51085b75f8ec24605c6f2ce84197571878ca8ab4af949c6be2d/grpcio_tools-1.80.0-cp312-cp312-win_amd64.whl", hash = "sha256:ac6fdd42d5bb18f0d903a067e2825be172deff70cf197164b6f65676cb506c9b", size = 1162407, upload-time = "2026-03-30T08:51:16.793Z" }, + { url = "https://files.pythonhosted.org/packages/44/d9/f7887a4805939e9a85d03744b66fc02575dc1df3c3e8b4d9ec000ee7a33d/grpcio_tools-1.80.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e7046837859bbfd10b01786056145480155c16b222c9e209215b68d3be13060e", size = 2550319, upload-time = "2026-03-30T08:51:19.117Z" }, + { url = "https://files.pythonhosted.org/packages/57/5a/c8a05b32bd7203f1b9f4c0151090a2d6179d6c97692d32f2066dc29c67a6/grpcio_tools-1.80.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a447f28958a8fe84ff0d9d3d9473868feb27ee4a9c9c805e66f5b670121cec59", size = 5709681, upload-time = "2026-03-30T08:51:21.991Z" }, + { url = "https://files.pythonhosted.org/packages/82/6b/794350ed645c12c310008f97068f6a6fd927150b0d0d08aad1d909e880b1/grpcio_tools-1.80.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:75f00450e08fe648ad8a1eeb25bc52219679d54cdd02f04dfdddc747309d83f6", size = 2596820, upload-time = "2026-03-30T08:51:24.323Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b2/b39e7b79f7c878135e0784a53cd7260ee77260c8c7f2c9e46bca8e05d017/grpcio_tools-1.80.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3db830eaff1f2c2797328f2fa86c9dcdbd7d81af573a68db81e27afa2182a611", size = 2909193, upload-time = "2026-03-30T08:51:27.025Z" }, + { url = "https://files.pythonhosted.org/packages/10/f3/abe089b058f87f9910c9a458409505cbeb0b3e1c2d993a79721d02ee6a32/grpcio_tools-1.80.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7982b5fe42f012686b667dda12916884de95c4b1c65ff64371fb7232a1474b23", size = 2660197, upload-time = "2026-03-30T08:51:29.392Z" }, + { url = "https://files.pythonhosted.org/packages/09/c3/3f7806ad8b731d8a89fe3c6ed496473abd1ef4c9c42c9e9a8836ce96e377/grpcio_tools-1.80.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6451b3f4eb52d12c7f32d04bf8e0185f80521f3f088ad04b8d222b3a4819c71e", size = 3113144, upload-time = "2026-03-30T08:51:31.671Z" }, + { url = "https://files.pythonhosted.org/packages/fe/f5/415ef205e0b7e75d2a2005df6120145c4f02fda28d7b3715b55d924fe1a4/grpcio_tools-1.80.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:258bc30654a9a2236be4ca8e2ad443e2ac6db7c8cc20454d34cce60265922726", size = 3661897, upload-time = "2026-03-30T08:51:34.849Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d3/2ad54764c2a9547080dd8518f4a4dc7899c7e6e747a1b1de542ce6a12066/grpcio_tools-1.80.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:865a2b8e6334c838976ab02a322cbd55c863d2eaf3c1e1a0255883c63996772a", size = 3328786, upload-time = "2026-03-30T08:51:37.265Z" }, + { url = "https://files.pythonhosted.org/packages/eb/63/23ab7db01f9630ab4f3742a2fc9fbff38b0cfc30c976114f913950664a75/grpcio_tools-1.80.0-cp313-cp313-win32.whl", hash = "sha256:f760ac1722f33e774814c37b6aa0444143f612e85088ead7447a0e9cd306a1f1", size = 997087, upload-time = "2026-03-30T08:51:39.137Z" }, + { url = "https://files.pythonhosted.org/packages/9b/af/b1c1c4423fb49cb7c8e9d2c02196b038c44160b7028b425466743c6c81fa/grpcio_tools-1.80.0-cp313-cp313-win_amd64.whl", hash = "sha256:7843b9ac6ff8ca508424d0dd968bd9a1a4559967e4a290f26be5bd6f04af2234", size = 1162167, upload-time = "2026-03-30T08:51:41.498Z" }, + { url = "https://files.pythonhosted.org/packages/0e/44/7beeee2348f9f412804f5bf80b7d13b81d522bf926a338ae3da46b2213b7/grpcio_tools-1.80.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:12f950470449dbeec78317dbc090add7a00eb6ca812af7b0538ab7441e0a42c3", size = 2550303, upload-time = "2026-03-30T08:51:44.373Z" }, + { url = "https://files.pythonhosted.org/packages/2d/aa/f77dd85409a1855f8c6319ffc69d81e8c3ffe122ee3a7136653e1991d8b6/grpcio_tools-1.80.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d3f9a376a29c9adf62bb56f7ff5bc81eb4abeaf53d1e7dde5015564832901a51", size = 5709778, upload-time = "2026-03-30T08:51:47.112Z" }, + { url = "https://files.pythonhosted.org/packages/9c/7c/ab7af4883ebdfdc228b853de89fed409703955e8d47285b321a5794856bd/grpcio_tools-1.80.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ba1ffbf2cff71533615e2c5a138ed5569611eec9ae7f9c67b8898e127b54ac0", size = 2597928, upload-time = "2026-03-30T08:51:49.494Z" }, + { url = "https://files.pythonhosted.org/packages/22/e8/4381a963d472e3ab6690ba067ed2b1f1abf8518b10f402678bd2dcb79a54/grpcio_tools-1.80.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:13f60f8d9397c514c6745a967d22b5c8c698347e88deebca1ff2e1b94555e450", size = 2909333, upload-time = "2026-03-30T08:51:52.124Z" }, + { url = "https://files.pythonhosted.org/packages/94/cb/356b5fdf79dd99455b425fb16302fe60995554ceb721afbf3cf770a19208/grpcio_tools-1.80.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:88d77bad5dd3cd5e6f952c4ecdd0ee33e0c02ecfc2e4b0cbee3391ac19e0a431", size = 2660217, upload-time = "2026-03-30T08:51:55.066Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d7/1752018cc2c36b2c5612051379e2e5f59f2dbe612de23e817d2f066a9487/grpcio_tools-1.80.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:017945c3e98a4ed1c4e21399781b4137fc08dfc1f802c8ace2e64ef52d32b142", size = 3113896, upload-time = "2026-03-30T08:51:57.3Z" }, + { url = "https://files.pythonhosted.org/packages/cc/17/695bbe454f70df35c03e22b48c5314683b913d3e6ed35ec90d065418c1ab/grpcio_tools-1.80.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a33e265d4db803495007a6c623eafb0f6b9bb123ff4a0af89e44567dad809b88", size = 3661950, upload-time = "2026-03-30T08:51:59.867Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d0/533d87629ec823c02c9169ee20228f734c264b209dcdf55268b5a14cde0a/grpcio_tools-1.80.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6c129da370c5f85f569be2e545317dda786a60dd51d7deea29b03b0c05f6aac3", size = 3328755, upload-time = "2026-03-30T08:52:02.942Z" }, + { url = "https://files.pythonhosted.org/packages/08/a1/504d7838770c73a9761e8a8ff4869dba1146b44f297ff0ac6641481942d3/grpcio_tools-1.80.0-cp314-cp314-win32.whl", hash = "sha256:25742de5958ae4325249a37e724e7c0e5120f8e302a24a977ebd1737b48a5e97", size = 1019620, upload-time = "2026-03-30T08:52:05.342Z" }, + { url = "https://files.pythonhosted.org/packages/f3/75/8b7cd281c5cdfb4ca2c308f7e9b2799bab2be6e7a9e9212ea5a82e2aecd4/grpcio_tools-1.80.0-cp314-cp314-win_amd64.whl", hash = "sha256:bbf8eeef78fda1966f732f79c1c802fadd5cfd203d845d2af4d314d18569069c", size = 1194210, upload-time = "2026-03-30T08:52:08.105Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "h2" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, +] + +[[package]] +name = "hf-xet" +version = "1.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/92/ec9ad04d0b5728dca387a45af7bc98fbb0d73b2118759f5f6038b61a57e8/hf_xet-1.4.3.tar.gz", hash = "sha256:8ddedb73c8c08928c793df2f3401ec26f95be7f7e516a7bee2fbb546f6676113", size = 670477, upload-time = "2026-03-31T22:40:07.874Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/43/724d307b34e353da0abd476e02f72f735cdd2bc86082dee1b32ea0bfee1d/hf_xet-1.4.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:7551659ba4f1e1074e9623996f28c3873682530aee0a846b7f2f066239228144", size = 3800935, upload-time = "2026-03-31T22:39:49.618Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d2/8bee5996b699262edb87dbb54118d287c0e1b2fc78af7cdc41857ba5e3c4/hf_xet-1.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bee693ada985e7045997f05f081d0e12c4c08bd7626dc397f8a7c487e6c04f7f", size = 3558942, upload-time = "2026-03-31T22:39:47.938Z" }, + { url = "https://files.pythonhosted.org/packages/c3/a1/e993d09cbe251196fb60812b09a58901c468127b7259d2bf0f68bf6088eb/hf_xet-1.4.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21644b404bb0100fe3857892f752c4d09642586fd988e61501c95bbf44b393a3", size = 4207657, upload-time = "2026-03-31T22:39:39.69Z" }, + { url = "https://files.pythonhosted.org/packages/64/44/9eb6d21e5c34c63e5e399803a6932fa983cabdf47c0ecbcfe7ea97684b8c/hf_xet-1.4.3-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:987f09cfe418237812896a6736b81b1af02a3a6dcb4b4944425c4c4fca7a7cf8", size = 3986765, upload-time = "2026-03-31T22:39:37.936Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7b/8ad6f16fdb82f5f7284a34b5ec48645bd575bdcd2f6f0d1644775909c486/hf_xet-1.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:60cf7fc43a99da0a853345cf86d23738c03983ee5249613a6305d3e57a5dca74", size = 4188162, upload-time = "2026-03-31T22:39:58.382Z" }, + { url = "https://files.pythonhosted.org/packages/1b/c4/39d6e136cbeea9ca5a23aad4b33024319222adbdc059ebcda5fc7d9d5ff4/hf_xet-1.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2815a49a7a59f3e2edf0cf113ae88e8cb2ca2a221bf353fb60c609584f4884d4", size = 4424525, upload-time = "2026-03-31T22:40:00.225Z" }, + { url = "https://files.pythonhosted.org/packages/46/f2/adc32dae6bdbc367853118b9878139ac869419a4ae7ba07185dc31251b76/hf_xet-1.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:42ee323265f1e6a81b0e11094564fb7f7e0ec75b5105ffd91ae63f403a11931b", size = 3671610, upload-time = "2026-03-31T22:40:10.42Z" }, + { url = "https://files.pythonhosted.org/packages/e2/19/25d897dcc3f81953e0c2cde9ec186c7a0fee413eb0c9a7a9130d87d94d3a/hf_xet-1.4.3-cp313-cp313t-win_arm64.whl", hash = "sha256:27c976ba60079fb8217f485b9c5c7fcd21c90b0367753805f87cb9f3cdc4418a", size = 3528529, upload-time = "2026-03-31T22:40:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/ec/36/3e8f85ca9fe09b8de2b2e10c63b3b3353d7dda88a0b3d426dffbe7b8313b/hf_xet-1.4.3-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5251d5ece3a81815bae9abab41cf7ddb7bcb8f56411bce0827f4a3071c92fdc6", size = 3801019, upload-time = "2026-03-31T22:39:56.651Z" }, + { url = "https://files.pythonhosted.org/packages/b5/9c/defb6cb1de28bccb7bd8d95f6e60f72a3d3fa4cb3d0329c26fb9a488bfe7/hf_xet-1.4.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1feb0f3abeacee143367c326a128a2e2b60868ec12a36c225afb1d6c5a05e6d2", size = 3558746, upload-time = "2026-03-31T22:39:54.766Z" }, + { url = "https://files.pythonhosted.org/packages/c1/bd/8d001191893178ff8e826e46ad5299446e62b93cd164e17b0ffea08832ec/hf_xet-1.4.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8b301fc150290ca90b4fccd079829b84bb4786747584ae08b94b4577d82fb791", size = 4207692, upload-time = "2026-03-31T22:39:46.246Z" }, + { url = "https://files.pythonhosted.org/packages/ce/48/6790b402803250e9936435613d3a78b9aaeee7973439f0918848dde58309/hf_xet-1.4.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:d972fbe95ddc0d3c0fc49b31a8a69f47db35c1e3699bf316421705741aab6653", size = 3986281, upload-time = "2026-03-31T22:39:44.648Z" }, + { url = "https://files.pythonhosted.org/packages/51/56/ea62552fe53db652a9099eda600b032d75554d0e86c12a73824bfedef88b/hf_xet-1.4.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c5b48db1ee344a805a1b9bd2cda9b6b65fe77ed3787bd6e87ad5521141d317cd", size = 4187414, upload-time = "2026-03-31T22:40:04.951Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f5/bc1456d4638061bea997e6d2db60a1a613d7b200e0755965ec312dc1ef79/hf_xet-1.4.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:22bdc1f5fb8b15bf2831440b91d1c9bbceeb7e10c81a12e8d75889996a5c9da8", size = 4424368, upload-time = "2026-03-31T22:40:06.347Z" }, + { url = "https://files.pythonhosted.org/packages/e4/76/ab597bae87e1f06d18d3ecb8ed7f0d3c9a37037fc32ce76233d369273c64/hf_xet-1.4.3-cp314-cp314t-win_amd64.whl", hash = "sha256:0392c79b7cf48418cd61478c1a925246cf10639f4cd9d94368d8ca1e8df9ea07", size = 3672280, upload-time = "2026-03-31T22:40:16.401Z" }, + { url = "https://files.pythonhosted.org/packages/62/05/2e462d34e23a09a74d73785dbed71cc5dbad82a72eee2ad60a72a554155d/hf_xet-1.4.3-cp314-cp314t-win_arm64.whl", hash = "sha256:681c92a07796325778a79d76c67011764ecc9042a8c3579332b61b63ae512075", size = 3528945, upload-time = "2026-03-31T22:40:14.995Z" }, + { url = "https://files.pythonhosted.org/packages/ac/9f/9c23e4a447b8f83120798f9279d0297a4d1360bdbf59ef49ebec78fe2545/hf_xet-1.4.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d0da85329eaf196e03e90b84c2d0aca53bd4573d097a75f99609e80775f98025", size = 3805048, upload-time = "2026-03-31T22:39:53.105Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f8/7aacb8e5f4a7899d39c787b5984e912e6c18b11be136ef13947d7a66d265/hf_xet-1.4.3-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:e23717ce4186b265f69afa66e6f0069fe7efbf331546f5c313d00e123dc84583", size = 3562178, upload-time = "2026-03-31T22:39:51.295Z" }, + { url = "https://files.pythonhosted.org/packages/df/9a/a24b26dc8a65f0ecc0fe5be981a19e61e7ca963b85e062c083f3a9100529/hf_xet-1.4.3-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc360b70c815bf340ed56c7b8c63aacf11762a4b099b2fe2c9bd6d6068668c08", size = 4212320, upload-time = "2026-03-31T22:39:42.922Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/46d493db155d2ee2801b71fb1b0fd67696359047fdd8caee2c914cc50c79/hf_xet-1.4.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:39f2d2e9654cd9b4319885733993807aab6de9dfbd34c42f0b78338d6617421f", size = 3991546, upload-time = "2026-03-31T22:39:41.335Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f5/067363e1c96c6b17256910830d1b54099d06287e10f4ec6ec4e7e08371fc/hf_xet-1.4.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:49ad8a8cead2b56051aa84d7fce3e1335efe68df3cf6c058f22a65513885baac", size = 4193200, upload-time = "2026-03-31T22:40:01.936Z" }, + { url = "https://files.pythonhosted.org/packages/42/4b/53951592882d9c23080c7644542fda34a3813104e9e11fa1a7d82d419cb8/hf_xet-1.4.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7716d62015477a70ea272d2d68cd7cad140f61c52ee452e133e139abfe2c17ba", size = 4429392, upload-time = "2026-03-31T22:40:03.492Z" }, + { url = "https://files.pythonhosted.org/packages/8a/21/75a6c175b4e79662ad8e62f46a40ce341d8d6b206b06b4320d07d55b188c/hf_xet-1.4.3-cp37-abi3-win_amd64.whl", hash = "sha256:6b591fcad34e272a5b02607485e4f2a1334aebf1bc6d16ce8eb1eb8978ac2021", size = 3677359, upload-time = "2026-03-31T22:40:13.619Z" }, + { url = "https://files.pythonhosted.org/packages/8a/7c/44314ecd0e89f8b2b51c9d9e5e7a60a9c1c82024ac471d415860557d3cd8/hf_xet-1.4.3-cp37-abi3-win_arm64.whl", hash = "sha256:7c2c7e20bcfcc946dc67187c203463f5e932e395845d098cc2a93f5b67ca0b47", size = 3533664, upload-time = "2026-03-31T22:40:12.152Z" }, +] + +[[package]] +name = "hpack" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httptools" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, + { url = "https://files.pythonhosted.org/packages/09/8f/c77b1fcbfd262d422f12da02feb0d218fa228d52485b77b953832105bb90/httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3", size = 202889, upload-time = "2025-10-10T03:54:47.089Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1a/22887f53602feaa066354867bc49a68fc295c2293433177ee90870a7d517/httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca", size = 108180, upload-time = "2025-10-10T03:54:48.052Z" }, + { url = "https://files.pythonhosted.org/packages/32/6a/6aaa91937f0010d288d3d124ca2946d48d60c3a5ee7ca62afe870e3ea011/httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c", size = 478596, upload-time = "2025-10-10T03:54:48.919Z" }, + { url = "https://files.pythonhosted.org/packages/6d/70/023d7ce117993107be88d2cbca566a7c1323ccbaf0af7eabf2064fe356f6/httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66", size = 473268, upload-time = "2025-10-10T03:54:49.993Z" }, + { url = "https://files.pythonhosted.org/packages/32/4d/9dd616c38da088e3f436e9a616e1d0cc66544b8cdac405cc4e81c8679fc7/httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346", size = 455517, upload-time = "2025-10-10T03:54:51.066Z" }, + { url = "https://files.pythonhosted.org/packages/1d/3a/a6c595c310b7df958e739aae88724e24f9246a514d909547778d776799be/httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650", size = 458337, upload-time = "2025-10-10T03:54:52.196Z" }, + { url = "https://files.pythonhosted.org/packages/fd/82/88e8d6d2c51edc1cc391b6e044c6c435b6aebe97b1abc33db1b0b24cd582/httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6", size = 85743, upload-time = "2025-10-10T03:54:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270", size = 203619, upload-time = "2025-10-10T03:54:54.321Z" }, + { url = "https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3", size = 108714, upload-time = "2025-10-10T03:54:55.163Z" }, + { url = "https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1", size = 472909, upload-time = "2025-10-10T03:54:56.056Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4a/a548bdfae6369c0d078bab5769f7b66f17f1bfaa6fa28f81d6be6959066b/httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b", size = 470831, upload-time = "2025-10-10T03:54:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/14df99e1c43bd132eec921c2e7e11cda7852f65619bc0fc5bdc2d0cb126c/httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60", size = 452631, upload-time = "2025-10-10T03:54:58.219Z" }, + { url = "https://files.pythonhosted.org/packages/22/d2/b7e131f7be8d854d48cb6d048113c30f9a46dca0c9a8b08fcb3fcd588cdc/httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca", size = 452910, upload-time = "2025-10-10T03:54:59.366Z" }, + { url = "https://files.pythonhosted.org/packages/53/cf/878f3b91e4e6e011eff6d1fa9ca39f7eb17d19c9d7971b04873734112f30/httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96", size = 88205, upload-time = "2025-10-10T03:55:00.389Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] +socks = [ + { name = "socksio" }, +] + +[[package]] +name = "huggingface-hub" +version = "1.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" }, + { name = "httpx" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "tqdm" }, + { name = "typer" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/52/1b54cb569509c725a32c1315261ac9fd0e6b91bbbf74d86fca10d3376164/huggingface_hub-1.12.0.tar.gz", hash = "sha256:7c3fe85e24b652334e5d456d7a812cd9a071e75630fac4365d9165ab5e4a34b6", size = 763091, upload-time = "2026-04-24T13:32:08.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/2b/ef03ddb96bd1123503c2bd6932001020292deea649e9bf4caa2cb65a85bf/huggingface_hub-1.12.0-py3-none-any.whl", hash = "sha256:d74939969585ee35748bd66de09baf84099d461bda7287cd9043bfb99b0e424d", size = 646806, upload-time = "2026-04-24T13:32:06.717Z" }, +] + +[[package]] +name = "hyperframe" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "jiter" +version = "0.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" }, + { url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" }, + { url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/61/db/c1efc32b8ba4c740ab3fc2d037d8753f67685f475e26b9d6536a4322bcdd/jiter-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04670992b576fa65bd056dbac0c39fe8bd67681c380cb2b48efa885711d9d726", size = 364163, upload-time = "2026-02-02T12:36:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/55/8a/fb75556236047c8806995671a18e4a0ad646ed255276f51a20f32dceaeec/jiter-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1aff1fbdb803a376d4d22a8f63f8e7ccbce0b4890c26cc7af9e501ab339ef0", size = 483709, upload-time = "2026-02-02T12:36:03.41Z" }, + { url = "https://files.pythonhosted.org/packages/7e/16/43512e6ee863875693a8e6f6d532e19d650779d6ba9a81593ae40a9088ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b3fb8c2053acaef8580809ac1d1f7481a0a0bdc012fd7f5d8b18fb696a5a089", size = 370480, upload-time = "2026-02-02T12:36:04.791Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4c/09b93e30e984a187bc8aaa3510e1ec8dcbdcd71ca05d2f56aac0492453aa/jiter-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaba7d87e66f26a2c45d8cbadcbfc4bf7884182317907baf39cfe9775bb4d93", size = 360735, upload-time = "2026-02-02T12:36:06.994Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1b/46c5e349019874ec5dfa508c14c37e29864ea108d376ae26d90bee238cd7/jiter-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b88d649135aca526da172e48083da915ec086b54e8e73a425ba50999468cc08", size = 391814, upload-time = "2026-02-02T12:36:08.368Z" }, + { url = "https://files.pythonhosted.org/packages/15/9e/26184760e85baee7162ad37b7912797d2077718476bf91517641c92b3639/jiter-0.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e404ea551d35438013c64b4f357b0474c7abf9f781c06d44fcaf7a14c69ff9e2", size = 513990, upload-time = "2026-02-02T12:36:09.993Z" }, + { url = "https://files.pythonhosted.org/packages/e9/34/2c9355247d6debad57a0a15e76ab1566ab799388042743656e566b3b7de1/jiter-0.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f4748aad1b4a93c8bdd70f604d0f748cdc0e8744c5547798acfa52f10e79228", size = 548021, upload-time = "2026-02-02T12:36:11.376Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4a/9f2c23255d04a834398b9c2e0e665382116911dc4d06b795710503cdad25/jiter-0.13.0-cp312-cp312-win32.whl", hash = "sha256:0bf670e3b1445fc4d31612199f1744f67f889ee1bbae703c4b54dc097e5dd394", size = 203024, upload-time = "2026-02-02T12:36:12.682Z" }, + { url = "https://files.pythonhosted.org/packages/09/ee/f0ae675a957ae5a8f160be3e87acea6b11dc7b89f6b7ab057e77b2d2b13a/jiter-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:15db60e121e11fe186c0b15236bd5d18381b9ddacdcf4e659feb96fc6c969c92", size = 205424, upload-time = "2026-02-02T12:36:13.93Z" }, + { url = "https://files.pythonhosted.org/packages/1b/02/ae611edf913d3cbf02c97cdb90374af2082c48d7190d74c1111dde08bcdd/jiter-0.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:41f92313d17989102f3cb5dd533a02787cdb99454d494344b0361355da52fcb9", size = 186818, upload-time = "2026-02-02T12:36:15.308Z" }, + { url = "https://files.pythonhosted.org/packages/91/9c/7ee5a6ff4b9991e1a45263bfc46731634c4a2bde27dfda6c8251df2d958c/jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf", size = 306897, upload-time = "2026-02-02T12:36:16.748Z" }, + { url = "https://files.pythonhosted.org/packages/7c/02/be5b870d1d2be5dd6a91bdfb90f248fbb7dcbd21338f092c6b89817c3dbf/jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a", size = 317507, upload-time = "2026-02-02T12:36:18.351Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/b25d2ec333615f5f284f3a4024f7ce68cfa0604c322c6808b2344c7f5d2b/jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb", size = 350560, upload-time = "2026-02-02T12:36:19.746Z" }, + { url = "https://files.pythonhosted.org/packages/be/ec/74dcb99fef0aca9fbe56b303bf79f6bd839010cb18ad41000bf6cc71eec0/jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2", size = 363232, upload-time = "2026-02-02T12:36:21.243Z" }, + { url = "https://files.pythonhosted.org/packages/1b/37/f17375e0bb2f6a812d4dd92d7616e41917f740f3e71343627da9db2824ce/jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f", size = 483727, upload-time = "2026-02-02T12:36:22.688Z" }, + { url = "https://files.pythonhosted.org/packages/77/d2/a71160a5ae1a1e66c1395b37ef77da67513b0adba73b993a27fbe47eb048/jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159", size = 370799, upload-time = "2026-02-02T12:36:24.106Z" }, + { url = "https://files.pythonhosted.org/packages/01/99/ed5e478ff0eb4e8aa5fd998f9d69603c9fd3f32de3bd16c2b1194f68361c/jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663", size = 359120, upload-time = "2026-02-02T12:36:25.519Z" }, + { url = "https://files.pythonhosted.org/packages/16/be/7ffd08203277a813f732ba897352797fa9493faf8dc7995b31f3d9cb9488/jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa", size = 390664, upload-time = "2026-02-02T12:36:26.866Z" }, + { url = "https://files.pythonhosted.org/packages/d1/84/e0787856196d6d346264d6dcccb01f741e5f0bd014c1d9a2ebe149caf4f3/jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820", size = 513543, upload-time = "2026-02-02T12:36:28.217Z" }, + { url = "https://files.pythonhosted.org/packages/65/50/ecbd258181c4313cf79bca6c88fb63207d04d5bf5e4f65174114d072aa55/jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68", size = 547262, upload-time = "2026-02-02T12:36:29.678Z" }, + { url = "https://files.pythonhosted.org/packages/27/da/68f38d12e7111d2016cd198161b36e1f042bd115c169255bcb7ec823a3bf/jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72", size = 200630, upload-time = "2026-02-02T12:36:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/25/65/3bd1a972c9a08ecd22eb3b08a95d1941ebe6938aea620c246cf426ae09c2/jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc", size = 202602, upload-time = "2026-02-02T12:36:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/13bd3678a311aa67686bb303654792c48206a112068f8b0b21426eb6851e/jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b", size = 185939, upload-time = "2026-02-02T12:36:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/49/19/a929ec002ad3228bc97ca01dbb14f7632fffdc84a95ec92ceaf4145688ae/jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10", size = 316616, upload-time = "2026-02-02T12:36:36.579Z" }, + { url = "https://files.pythonhosted.org/packages/52/56/d19a9a194afa37c1728831e5fb81b7722c3de18a3109e8f282bfc23e587a/jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef", size = 346850, upload-time = "2026-02-02T12:36:38.058Z" }, + { url = "https://files.pythonhosted.org/packages/36/4a/94e831c6bf287754a8a019cb966ed39ff8be6ab78cadecf08df3bb02d505/jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6", size = 358551, upload-time = "2026-02-02T12:36:39.417Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ec/a4c72c822695fa80e55d2b4142b73f0012035d9fcf90eccc56bc060db37c/jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d", size = 201950, upload-time = "2026-02-02T12:36:40.791Z" }, + { url = "https://files.pythonhosted.org/packages/b6/00/393553ec27b824fbc29047e9c7cd4a3951d7fbe4a76743f17e44034fa4e4/jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d", size = 185852, upload-time = "2026-02-02T12:36:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f5/f1997e987211f6f9bd71b8083047b316208b4aca0b529bb5f8c96c89ef3e/jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0", size = 308804, upload-time = "2026-02-02T12:36:43.496Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8f/5482a7677731fd44881f0204981ce2d7175db271f82cba2085dd2212e095/jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91", size = 318787, upload-time = "2026-02-02T12:36:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b9/7257ac59778f1cd025b26a23c5520a36a424f7f1b068f2442a5b499b7464/jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09", size = 353880, upload-time = "2026-02-02T12:36:47.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/87/719eec4a3f0841dad99e3d3604ee4cba36af4419a76f3cb0b8e2e691ad67/jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607", size = 366702, upload-time = "2026-02-02T12:36:48.871Z" }, + { url = "https://files.pythonhosted.org/packages/d2/65/415f0a75cf6921e43365a1bc227c565cb949caca8b7532776e430cbaa530/jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66", size = 486319, upload-time = "2026-02-02T12:36:53.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a2/9e12b48e82c6bbc6081fd81abf915e1443add1b13d8fc586e1d90bb02bb8/jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2", size = 372289, upload-time = "2026-02-02T12:36:54.593Z" }, + { url = "https://files.pythonhosted.org/packages/4e/c1/e4693f107a1789a239c759a432e9afc592366f04e901470c2af89cfd28e1/jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad", size = 360165, upload-time = "2026-02-02T12:36:56.112Z" }, + { url = "https://files.pythonhosted.org/packages/17/08/91b9ea976c1c758240614bd88442681a87672eebc3d9a6dde476874e706b/jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d", size = 389634, upload-time = "2026-02-02T12:36:57.495Z" }, + { url = "https://files.pythonhosted.org/packages/18/23/58325ef99390d6d40427ed6005bf1ad54f2577866594bcf13ce55675f87d/jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df", size = 514933, upload-time = "2026-02-02T12:36:58.909Z" }, + { url = "https://files.pythonhosted.org/packages/5b/25/69f1120c7c395fd276c3996bb8adefa9c6b84c12bb7111e5c6ccdcd8526d/jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d", size = 548842, upload-time = "2026-02-02T12:37:00.433Z" }, + { url = "https://files.pythonhosted.org/packages/18/05/981c9669d86850c5fbb0d9e62bba144787f9fba84546ba43d624ee27ef29/jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6", size = 202108, upload-time = "2026-02-02T12:37:01.718Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/cdcf54dd0b0341db7d25413229888a346c7130bd20820530905fdb65727b/jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f", size = 204027, upload-time = "2026-02-02T12:37:03.075Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f9/724bcaaab7a3cd727031fe4f6995cb86c4bd344909177c186699c8dec51a/jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d", size = 187199, upload-time = "2026-02-02T12:37:04.414Z" }, + { url = "https://files.pythonhosted.org/packages/62/92/1661d8b9fd6a3d7a2d89831db26fe3c1509a287d83ad7838831c7b7a5c7e/jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0", size = 318423, upload-time = "2026-02-02T12:37:05.806Z" }, + { url = "https://files.pythonhosted.org/packages/4f/3b/f77d342a54d4ebcd128e520fc58ec2f5b30a423b0fd26acdfc0c6fef8e26/jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40", size = 351438, upload-time = "2026-02-02T12:37:07.189Z" }, + { url = "https://files.pythonhosted.org/packages/76/b3/ba9a69f0e4209bd3331470c723c2f5509e6f0482e416b612431a5061ed71/jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202", size = 364774, upload-time = "2026-02-02T12:37:08.579Z" }, + { url = "https://files.pythonhosted.org/packages/b3/16/6cdb31fa342932602458dbb631bfbd47f601e03d2e4950740e0b2100b570/jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0", size = 487238, upload-time = "2026-02-02T12:37:10.066Z" }, + { url = "https://files.pythonhosted.org/packages/ed/b1/956cc7abaca8d95c13aa8d6c9b3f3797241c246cd6e792934cc4c8b250d2/jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95", size = 372892, upload-time = "2026-02-02T12:37:11.656Z" }, + { url = "https://files.pythonhosted.org/packages/26/c4/97ecde8b1e74f67b8598c57c6fccf6df86ea7861ed29da84629cdbba76c4/jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59", size = 360309, upload-time = "2026-02-02T12:37:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d7/eabe3cf46715854ccc80be2cd78dd4c36aedeb30751dbf85a1d08c14373c/jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe", size = 389607, upload-time = "2026-02-02T12:37:14.881Z" }, + { url = "https://files.pythonhosted.org/packages/df/2d/03963fc0804e6109b82decfb9974eb92df3797fe7222428cae12f8ccaa0c/jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939", size = 514986, upload-time = "2026-02-02T12:37:16.326Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/8c83b45eb3eb1c1e18d841fe30b4b5bc5619d781267ca9bc03e005d8fd0a/jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9", size = 548756, upload-time = "2026-02-02T12:37:17.736Z" }, + { url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" }, + { url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" }, + { url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" }, + { url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" }, + { url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" }, + { url = "https://files.pythonhosted.org/packages/67/8a/a342b2f0251f3dac4ca17618265d93bf244a2a4d089126e81e4c1056ac50/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19", size = 343768, upload-time = "2026-02-02T12:37:55.055Z" }, +] + +[[package]] +name = "joblib" +version = "1.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/f2/d34e8b3a08a9cc79a50b2208a93dce981fe615b64d5a4d4abee421d898df/joblib-1.5.3.tar.gz", hash = "sha256:8561a3269e6801106863fd0d6d84bb737be9e7631e33aaed3fb9ce5953688da3", size = 331603, upload-time = "2025-12-15T08:41:46.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/91/984aca2ec129e2757d1e4e3c81c3fcda9d0f85b74670a094cc443d9ee949/joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713", size = 309071, upload-time = "2025-12-15T08:41:44.973Z" }, +] + +[[package]] +name = "lazy-loader" +version = "0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6b/c875b30a1ba490860c93da4cabf479e03f584eba06fe5963f6f6644653d8/lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1", size = 15431, upload-time = "2024-04-05T13:03:12.261Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/60/d497a310bde3f01cb805196ac61b7ad6dc5dcf8dce66634dc34364b20b4f/lazy_loader-0.4-py3-none-any.whl", hash = "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc", size = 12097, upload-time = "2024-04-05T13:03:10.514Z" }, +] + +[[package]] +name = "librosa" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "audioread" }, + { name = "decorator" }, + { name = "joblib" }, + { name = "lazy-loader" }, + { name = "msgpack" }, + { name = "numba" }, + { name = "numpy" }, + { name = "pooch" }, + { name = "scikit-learn" }, + { name = "scipy" }, + { name = "soundfile" }, + { name = "soxr" }, + { name = "standard-aifc", marker = "python_full_version >= '3.13'" }, + { name = "standard-sunau", marker = "python_full_version >= '3.13'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/36/360b5aafa0238e29758729e9486c6ed92a6f37fa403b7875e06c115cdf4a/librosa-0.11.0.tar.gz", hash = "sha256:f5ed951ca189b375bbe2e33b2abd7e040ceeee302b9bbaeeffdfddb8d0ace908", size = 327001, upload-time = "2025-03-11T15:09:54.884Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/ba/c63c5786dfee4c3417094c4b00966e61e4a63efecee22cb7b4c0387dda83/librosa-0.11.0-py3-none-any.whl", hash = "sha256:0b6415c4fd68bff4c29288abe67c6d80b587e0e1e2cfb0aad23e4559504a7fa1", size = 260749, upload-time = "2025-03-11T15:09:52.982Z" }, +] + +[[package]] +name = "llvmlite" +version = "0.46.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/cd/08ae687ba099c7e3d21fe2ea536500563ef1943c5105bf6ab4ee3829f68e/llvmlite-0.46.0.tar.gz", hash = "sha256:227c9fd6d09dce2783c18b754b7cd9d9b3b3515210c46acc2d3c5badd9870ceb", size = 193456, upload-time = "2025-12-08T18:15:36.295Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2b/f8/4db016a5e547d4e054ff2f3b99203d63a497465f81ab78ec8eb2ff7b2304/llvmlite-0.46.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b9588ad4c63b4f0175a3984b85494f0c927c6b001e3a246a3a7fb3920d9a137", size = 37232767, upload-time = "2025-12-08T18:15:00.737Z" }, + { url = "https://files.pythonhosted.org/packages/aa/85/4890a7c14b4fa54400945cb52ac3cd88545bbdb973c440f98ca41591cdc5/llvmlite-0.46.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3535bd2bb6a2d7ae4012681ac228e5132cdb75fefb1bcb24e33f2f3e0c865ed4", size = 56275176, upload-time = "2025-12-08T18:15:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/6a/07/3d31d39c1a1a08cd5337e78299fca77e6aebc07c059fbd0033e3edfab45c/llvmlite-0.46.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cbfd366e60ff87ea6cc62f50bc4cd800ebb13ed4c149466f50cf2163a473d1e", size = 55128630, upload-time = "2025-12-08T18:15:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/2a/6b/d139535d7590a1bba1ceb68751bef22fadaa5b815bbdf0e858e3875726b2/llvmlite-0.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:398b39db462c39563a97b912d4f2866cd37cba60537975a09679b28fbbc0fb38", size = 38138940, upload-time = "2025-12-08T18:15:10.162Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ff/3eba7eb0aed4b6fca37125387cd417e8c458e750621fce56d2c541f67fa8/llvmlite-0.46.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:30b60892d034bc560e0ec6654737aaa74e5ca327bd8114d82136aa071d611172", size = 37232767, upload-time = "2025-12-08T18:15:13.22Z" }, + { url = "https://files.pythonhosted.org/packages/0e/54/737755c0a91558364b9200702c3c9c15d70ed63f9b98a2c32f1c2aa1f3ba/llvmlite-0.46.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6cc19b051753368a9c9f31dc041299059ee91aceec81bd57b0e385e5d5bf1a54", size = 56275176, upload-time = "2025-12-08T18:15:16.339Z" }, + { url = "https://files.pythonhosted.org/packages/e6/91/14f32e1d70905c1c0aa4e6609ab5d705c3183116ca02ac6df2091868413a/llvmlite-0.46.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bca185892908f9ede48c0acd547fe4dc1bafefb8a4967d47db6cf664f9332d12", size = 55128629, upload-time = "2025-12-08T18:15:19.493Z" }, + { url = "https://files.pythonhosted.org/packages/4a/a7/d526ae86708cea531935ae777b6dbcabe7db52718e6401e0fb9c5edea80e/llvmlite-0.46.0-cp313-cp313-win_amd64.whl", hash = "sha256:67438fd30e12349ebb054d86a5a1a57fd5e87d264d2451bcfafbbbaa25b82a35", size = 38138941, upload-time = "2025-12-08T18:15:22.536Z" }, + { url = "https://files.pythonhosted.org/packages/95/ae/af0ffb724814cc2ea64445acad05f71cff5f799bb7efb22e47ee99340dbc/llvmlite-0.46.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:d252edfb9f4ac1fcf20652258e3f102b26b03eef738dc8a6ffdab7d7d341d547", size = 37232768, upload-time = "2025-12-08T18:15:25.055Z" }, + { url = "https://files.pythonhosted.org/packages/c9/19/5018e5352019be753b7b07f7759cdabb69ca5779fea2494be8839270df4c/llvmlite-0.46.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:379fdd1c59badeff8982cb47e4694a6143bec3bb49aa10a466e095410522064d", size = 56275173, upload-time = "2025-12-08T18:15:28.109Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c9/d57877759d707e84c082163c543853245f91b70c804115a5010532890f18/llvmlite-0.46.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e8cbfff7f6db0fa2c771ad24154e2a7e457c2444d7673e6de06b8b698c3b269", size = 55128628, upload-time = "2025-12-08T18:15:31.098Z" }, + { url = "https://files.pythonhosted.org/packages/30/a8/e61a8c2b3cc7a597073d9cde1fcbb567e9d827f1db30c93cf80422eac70d/llvmlite-0.46.0-cp314-cp314-win_amd64.whl", hash = "sha256:7821eda3ec1f18050f981819756631d60b6d7ab1a6cf806d9efefbe3f4082d61", size = 39153056, upload-time = "2025-12-08T18:15:33.938Z" }, +] + +[[package]] +name = "loguru" +version = "0.7.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "win32-setctime", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, +] + +[[package]] +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" }, + { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" }, + { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" }, + { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" }, + { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" }, + { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" }, + { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" }, + { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" }, + { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" }, + { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, + { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, + { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, + { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, + { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, + { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, + { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, + { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, + { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, + { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, + { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, + { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, + { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, + { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, + { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, + { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, + { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, +] + +[[package]] +name = "networkx" +version = "3.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025, upload-time = "2025-12-08T17:02:39.908Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504, upload-time = "2025-12-08T17:02:38.159Z" }, +] + +[[package]] +name = "numba" +version = "0.63.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "llvmlite" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/60/0145d479b2209bd8fdae5f44201eceb8ce5a23e0ed54c71f57db24618665/numba-0.63.1.tar.gz", hash = "sha256:b320aa675d0e3b17b40364935ea52a7b1c670c9037c39cf92c49502a75902f4b", size = 2761666, upload-time = "2025-12-10T02:57:39.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/9c/c0974cd3d00ff70d30e8ff90522ba5fbb2bcee168a867d2321d8d0457676/numba-0.63.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2819cd52afa5d8d04e057bdfd54367575105f8829350d8fb5e4066fb7591cc71", size = 2680981, upload-time = "2025-12-10T02:57:17.579Z" }, + { url = "https://files.pythonhosted.org/packages/cb/70/ea2bc45205f206b7a24ee68a159f5097c9ca7e6466806e7c213587e0c2b1/numba-0.63.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5cfd45dbd3d409e713b1ccfdc2ee72ca82006860254429f4ef01867fdba5845f", size = 3801656, upload-time = "2025-12-10T02:57:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/0d/82/4f4ba4fd0f99825cbf3cdefd682ca3678be1702b63362011de6e5f71f831/numba-0.63.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69a599df6976c03b7ecf15d05302696f79f7e6d10d620367407517943355bcb0", size = 3501857, upload-time = "2025-12-10T02:57:20.721Z" }, + { url = "https://files.pythonhosted.org/packages/af/fd/6540456efa90b5f6604a86ff50dabefb187e43557e9081adcad3be44f048/numba-0.63.1-cp312-cp312-win_amd64.whl", hash = "sha256:bbad8c63e4fc7eb3cdb2c2da52178e180419f7969f9a685f283b313a70b92af3", size = 2750282, upload-time = "2025-12-10T02:57:22.474Z" }, + { url = "https://files.pythonhosted.org/packages/57/f7/e19e6eff445bec52dde5bed1ebb162925a8e6f988164f1ae4b3475a73680/numba-0.63.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:0bd4fd820ef7442dcc07da184c3f54bb41d2bdb7b35bacf3448e73d081f730dc", size = 2680954, upload-time = "2025-12-10T02:57:24.145Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6c/1e222edba1e20e6b113912caa9b1665b5809433cbcb042dfd133c6f1fd38/numba-0.63.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:53de693abe4be3bd4dee38e1c55f01c55ff644a6a3696a3670589e6e4c39cde2", size = 3809736, upload-time = "2025-12-10T02:57:25.836Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/590bad11a8b3feeac30a24d01198d46bdb76ad15c70d3a530691ce3cae58/numba-0.63.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:81227821a72a763c3d4ac290abbb4371d855b59fdf85d5af22a47c0e86bf8c7e", size = 3508854, upload-time = "2025-12-10T02:57:27.438Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f5/3800384a24eed1e4d524669cdbc0b9b8a628800bb1e90d7bd676e5f22581/numba-0.63.1-cp313-cp313-win_amd64.whl", hash = "sha256:eb227b07c2ac37b09432a9bda5142047a2d1055646e089d4a240a2643e508102", size = 2750228, upload-time = "2025-12-10T02:57:30.36Z" }, + { url = "https://files.pythonhosted.org/packages/36/2f/53be2aa8a55ee2608ebe1231789cbb217f6ece7f5e1c685d2f0752e95a5b/numba-0.63.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f180883e5508940cc83de8a8bea37fc6dd20fbe4e5558d4659b8b9bef5ff4731", size = 2681153, upload-time = "2025-12-10T02:57:32.016Z" }, + { url = "https://files.pythonhosted.org/packages/13/91/53e59c86759a0648282368d42ba732c29524a745fd555ed1fb1df83febbe/numba-0.63.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0938764afa82a47c0e895637a6c55547a42c9e1d35cac42285b1fa60a8b02bb", size = 3778718, upload-time = "2025-12-10T02:57:33.764Z" }, + { url = "https://files.pythonhosted.org/packages/6c/0c/2be19eba50b0b7636f6d1f69dfb2825530537708a234ba1ff34afc640138/numba-0.63.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f90a929fa5094e062d4e0368ede1f4497d5e40f800e80aa5222c4734236a2894", size = 3478712, upload-time = "2025-12-10T02:57:35.518Z" }, + { url = "https://files.pythonhosted.org/packages/0d/5f/4d0c9e756732577a52211f31da13a3d943d185f7fb90723f56d79c696caa/numba-0.63.1-cp314-cp314-win_amd64.whl", hash = "sha256:8d6d5ce85f572ed4e1a135dbb8c0114538f9dd0e3657eeb0bb64ab204cbe2a8f", size = 2752161, upload-time = "2025-12-10T02:57:37.12Z" }, +] + +[[package]] +name = "numpy" +version = "2.3.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950, upload-time = "2025-11-16T22:52:42.067Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873, upload-time = "2025-11-16T22:49:49.84Z" }, + { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838, upload-time = "2025-11-16T22:49:52.863Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378, upload-time = "2025-11-16T22:49:55.055Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9c/1ca85fb86708724275103b81ec4cf1ac1d08f465368acfc8da7ab545bdae/numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4", size = 6628559, upload-time = "2025-11-16T22:49:57.371Z" }, + { url = "https://files.pythonhosted.org/packages/74/78/fcd41e5a0ce4f3f7b003da85825acddae6d7ecb60cf25194741b036ca7d6/numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d", size = 14250702, upload-time = "2025-11-16T22:49:59.632Z" }, + { url = "https://files.pythonhosted.org/packages/b6/23/2a1b231b8ff672b4c450dac27164a8b2ca7d9b7144f9c02d2396518352eb/numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28", size = 16606086, upload-time = "2025-11-16T22:50:02.127Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c5/5ad26fbfbe2012e190cc7d5003e4d874b88bb18861d0829edc140a713021/numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b", size = 16025985, upload-time = "2025-11-16T22:50:04.536Z" }, + { url = "https://files.pythonhosted.org/packages/d2/fa/dd48e225c46c819288148d9d060b047fd2a6fb1eb37eae25112ee4cb4453/numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c", size = 18542976, upload-time = "2025-11-16T22:50:07.557Z" }, + { url = "https://files.pythonhosted.org/packages/05/79/ccbd23a75862d95af03d28b5c6901a1b7da4803181513d52f3b86ed9446e/numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952", size = 6285274, upload-time = "2025-11-16T22:50:10.746Z" }, + { url = "https://files.pythonhosted.org/packages/2d/57/8aeaf160312f7f489dea47ab61e430b5cb051f59a98ae68b7133ce8fa06a/numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa", size = 12782922, upload-time = "2025-11-16T22:50:12.811Z" }, + { url = "https://files.pythonhosted.org/packages/78/a6/aae5cc2ca78c45e64b9ef22f089141d661516856cf7c8a54ba434576900d/numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013", size = 10194667, upload-time = "2025-11-16T22:50:16.16Z" }, + { url = "https://files.pythonhosted.org/packages/db/69/9cde09f36da4b5a505341180a3f2e6fadc352fd4d2b7096ce9778db83f1a/numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff", size = 16728251, upload-time = "2025-11-16T22:50:19.013Z" }, + { url = "https://files.pythonhosted.org/packages/79/fb/f505c95ceddd7027347b067689db71ca80bd5ecc926f913f1a23e65cf09b/numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188", size = 12254652, upload-time = "2025-11-16T22:50:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/78/da/8c7738060ca9c31b30e9301ee0cf6c5ffdbf889d9593285a1cead337f9a5/numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0", size = 5083172, upload-time = "2025-11-16T22:50:24.562Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b4/ee5bb2537fb9430fd2ef30a616c3672b991a4129bb1c7dcc42aa0abbe5d7/numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903", size = 6622990, upload-time = "2025-11-16T22:50:26.47Z" }, + { url = "https://files.pythonhosted.org/packages/95/03/dc0723a013c7d7c19de5ef29e932c3081df1c14ba582b8b86b5de9db7f0f/numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d", size = 14248902, upload-time = "2025-11-16T22:50:28.861Z" }, + { url = "https://files.pythonhosted.org/packages/f5/10/ca162f45a102738958dcec8023062dad0cbc17d1ab99d68c4e4a6c45fb2b/numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017", size = 16597430, upload-time = "2025-11-16T22:50:31.56Z" }, + { url = "https://files.pythonhosted.org/packages/2a/51/c1e29be863588db58175175f057286900b4b3327a1351e706d5e0f8dd679/numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf", size = 16024551, upload-time = "2025-11-16T22:50:34.242Z" }, + { url = "https://files.pythonhosted.org/packages/83/68/8236589d4dbb87253d28259d04d9b814ec0ecce7cb1c7fed29729f4c3a78/numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce", size = 18533275, upload-time = "2025-11-16T22:50:37.651Z" }, + { url = "https://files.pythonhosted.org/packages/40/56/2932d75b6f13465239e3b7b7e511be27f1b8161ca2510854f0b6e521c395/numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e", size = 6277637, upload-time = "2025-11-16T22:50:40.11Z" }, + { url = "https://files.pythonhosted.org/packages/0c/88/e2eaa6cffb115b85ed7c7c87775cb8bcf0816816bc98ca8dbfa2ee33fe6e/numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b", size = 12779090, upload-time = "2025-11-16T22:50:42.503Z" }, + { url = "https://files.pythonhosted.org/packages/8f/88/3f41e13a44ebd4034ee17baa384acac29ba6a4fcc2aca95f6f08ca0447d1/numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae", size = 10194710, upload-time = "2025-11-16T22:50:44.971Z" }, + { url = "https://files.pythonhosted.org/packages/13/cb/71744144e13389d577f867f745b7df2d8489463654a918eea2eeb166dfc9/numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd", size = 16827292, upload-time = "2025-11-16T22:50:47.715Z" }, + { url = "https://files.pythonhosted.org/packages/71/80/ba9dc6f2a4398e7f42b708a7fdc841bb638d353be255655498edbf9a15a8/numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f", size = 12378897, upload-time = "2025-11-16T22:50:51.327Z" }, + { url = "https://files.pythonhosted.org/packages/2e/6d/db2151b9f64264bcceccd51741aa39b50150de9b602d98ecfe7e0c4bff39/numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a", size = 5207391, upload-time = "2025-11-16T22:50:54.542Z" }, + { url = "https://files.pythonhosted.org/packages/80/ae/429bacace5ccad48a14c4ae5332f6aa8ab9f69524193511d60ccdfdc65fa/numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139", size = 6721275, upload-time = "2025-11-16T22:50:56.794Z" }, + { url = "https://files.pythonhosted.org/packages/74/5b/1919abf32d8722646a38cd527bc3771eb229a32724ee6ba340ead9b92249/numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e", size = 14306855, upload-time = "2025-11-16T22:50:59.208Z" }, + { url = "https://files.pythonhosted.org/packages/a5/87/6831980559434973bebc30cd9c1f21e541a0f2b0c280d43d3afd909b66d0/numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9", size = 16657359, upload-time = "2025-11-16T22:51:01.991Z" }, + { url = "https://files.pythonhosted.org/packages/dd/91/c797f544491ee99fd00495f12ebb7802c440c1915811d72ac5b4479a3356/numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946", size = 16093374, upload-time = "2025-11-16T22:51:05.291Z" }, + { url = "https://files.pythonhosted.org/packages/74/a6/54da03253afcbe7a72785ec4da9c69fb7a17710141ff9ac5fcb2e32dbe64/numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1", size = 18594587, upload-time = "2025-11-16T22:51:08.585Z" }, + { url = "https://files.pythonhosted.org/packages/80/e9/aff53abbdd41b0ecca94285f325aff42357c6b5abc482a3fcb4994290b18/numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3", size = 6405940, upload-time = "2025-11-16T22:51:11.541Z" }, + { url = "https://files.pythonhosted.org/packages/d5/81/50613fec9d4de5480de18d4f8ef59ad7e344d497edbef3cfd80f24f98461/numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234", size = 12920341, upload-time = "2025-11-16T22:51:14.312Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ab/08fd63b9a74303947f34f0bd7c5903b9c5532c2d287bead5bdf4c556c486/numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7", size = 10262507, upload-time = "2025-11-16T22:51:16.846Z" }, + { url = "https://files.pythonhosted.org/packages/ba/97/1a914559c19e32d6b2e233cf9a6a114e67c856d35b1d6babca571a3e880f/numpy-2.3.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:bf06bc2af43fa8d32d30fae16ad965663e966b1a3202ed407b84c989c3221e82", size = 16735706, upload-time = "2025-11-16T22:51:19.558Z" }, + { url = "https://files.pythonhosted.org/packages/57/d4/51233b1c1b13ecd796311216ae417796b88b0616cfd8a33ae4536330748a/numpy-2.3.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:052e8c42e0c49d2575621c158934920524f6c5da05a1d3b9bab5d8e259e045f0", size = 12264507, upload-time = "2025-11-16T22:51:22.492Z" }, + { url = "https://files.pythonhosted.org/packages/45/98/2fe46c5c2675b8306d0b4a3ec3494273e93e1226a490f766e84298576956/numpy-2.3.5-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:1ed1ec893cff7040a02c8aa1c8611b94d395590d553f6b53629a4461dc7f7b63", size = 5093049, upload-time = "2025-11-16T22:51:25.171Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0e/0698378989bb0ac5f1660c81c78ab1fe5476c1a521ca9ee9d0710ce54099/numpy-2.3.5-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:2dcd0808a421a482a080f89859a18beb0b3d1e905b81e617a188bd80422d62e9", size = 6626603, upload-time = "2025-11-16T22:51:27Z" }, + { url = "https://files.pythonhosted.org/packages/5e/a6/9ca0eecc489640615642a6cbc0ca9e10df70df38c4d43f5a928ff18d8827/numpy-2.3.5-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:727fd05b57df37dc0bcf1a27767a3d9a78cbbc92822445f32cc3436ba797337b", size = 14262696, upload-time = "2025-11-16T22:51:29.402Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f6/07ec185b90ec9d7217a00eeeed7383b73d7e709dae2a9a021b051542a708/numpy-2.3.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fffe29a1ef00883599d1dc2c51aa2e5d80afe49523c261a74933df395c15c520", size = 16597350, upload-time = "2025-11-16T22:51:32.167Z" }, + { url = "https://files.pythonhosted.org/packages/75/37/164071d1dde6a1a84c9b8e5b414fa127981bad47adf3a6b7e23917e52190/numpy-2.3.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f7f0e05112916223d3f438f293abf0727e1181b5983f413dfa2fefc4098245c", size = 16040190, upload-time = "2025-11-16T22:51:35.403Z" }, + { url = "https://files.pythonhosted.org/packages/08/3c/f18b82a406b04859eb026d204e4e1773eb41c5be58410f41ffa511d114ae/numpy-2.3.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2e2eb32ddb9ccb817d620ac1d8dae7c3f641c1e5f55f531a33e8ab97960a75b8", size = 18536749, upload-time = "2025-11-16T22:51:39.698Z" }, + { url = "https://files.pythonhosted.org/packages/40/79/f82f572bf44cf0023a2fe8588768e23e1592585020d638999f15158609e1/numpy-2.3.5-cp314-cp314-win32.whl", hash = "sha256:66f85ce62c70b843bab1fb14a05d5737741e74e28c7b8b5a064de10142fad248", size = 6335432, upload-time = "2025-11-16T22:51:42.476Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2e/235b4d96619931192c91660805e5e49242389742a7a82c27665021db690c/numpy-2.3.5-cp314-cp314-win_amd64.whl", hash = "sha256:e6a0bc88393d65807d751a614207b7129a310ca4fe76a74e5c7da5fa5671417e", size = 12919388, upload-time = "2025-11-16T22:51:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/07/2b/29fd75ce45d22a39c61aad74f3d718e7ab67ccf839ca8b60866054eb15f8/numpy-2.3.5-cp314-cp314-win_arm64.whl", hash = "sha256:aeffcab3d4b43712bb7a60b65f6044d444e75e563ff6180af8f98dd4b905dfd2", size = 10476651, upload-time = "2025-11-16T22:51:47.749Z" }, + { url = "https://files.pythonhosted.org/packages/17/e1/f6a721234ebd4d87084cfa68d081bcba2f5cfe1974f7de4e0e8b9b2a2ba1/numpy-2.3.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:17531366a2e3a9e30762c000f2c43a9aaa05728712e25c11ce1dbe700c53ad41", size = 16834503, upload-time = "2025-11-16T22:51:50.443Z" }, + { url = "https://files.pythonhosted.org/packages/5c/1c/baf7ffdc3af9c356e1c135e57ab7cf8d247931b9554f55c467efe2c69eff/numpy-2.3.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d21644de1b609825ede2f48be98dfde4656aefc713654eeee280e37cadc4e0ad", size = 12381612, upload-time = "2025-11-16T22:51:53.609Z" }, + { url = "https://files.pythonhosted.org/packages/74/91/f7f0295151407ddc9ba34e699013c32c3c91944f9b35fcf9281163dc1468/numpy-2.3.5-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:c804e3a5aba5460c73955c955bdbd5c08c354954e9270a2c1565f62e866bdc39", size = 5210042, upload-time = "2025-11-16T22:51:56.213Z" }, + { url = "https://files.pythonhosted.org/packages/2e/3b/78aebf345104ec50dd50a4d06ddeb46a9ff5261c33bcc58b1c4f12f85ec2/numpy-2.3.5-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:cc0a57f895b96ec78969c34f682c602bf8da1a0270b09bc65673df2e7638ec20", size = 6724502, upload-time = "2025-11-16T22:51:58.584Z" }, + { url = "https://files.pythonhosted.org/packages/02/c6/7c34b528740512e57ef1b7c8337ab0b4f0bddf34c723b8996c675bc2bc91/numpy-2.3.5-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:900218e456384ea676e24ea6a0417f030a3b07306d29d7ad843957b40a9d8d52", size = 14308962, upload-time = "2025-11-16T22:52:01.698Z" }, + { url = "https://files.pythonhosted.org/packages/80/35/09d433c5262bc32d725bafc619e095b6a6651caf94027a03da624146f655/numpy-2.3.5-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a1bea522b25109bf8e6f3027bd810f7c1085c64a0c7ce050c1676ad0ba010b", size = 16655054, upload-time = "2025-11-16T22:52:04.267Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ab/6a7b259703c09a88804fa2430b43d6457b692378f6b74b356155283566ac/numpy-2.3.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04822c00b5fd0323c8166d66c701dc31b7fbd252c100acd708c48f763968d6a3", size = 16091613, upload-time = "2025-11-16T22:52:08.651Z" }, + { url = "https://files.pythonhosted.org/packages/c2/88/330da2071e8771e60d1038166ff9d73f29da37b01ec3eb43cb1427464e10/numpy-2.3.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d6889ec4ec662a1a37eb4b4fb26b6100841804dac55bd9df579e326cdc146227", size = 18591147, upload-time = "2025-11-16T22:52:11.453Z" }, + { url = "https://files.pythonhosted.org/packages/51/41/851c4b4082402d9ea860c3626db5d5df47164a712cb23b54be028b184c1c/numpy-2.3.5-cp314-cp314t-win32.whl", hash = "sha256:93eebbcf1aafdf7e2ddd44c2923e2672e1010bddc014138b229e49725b4d6be5", size = 6479806, upload-time = "2025-11-16T22:52:14.641Z" }, + { url = "https://files.pythonhosted.org/packages/90/30/d48bde1dfd93332fa557cff1972fbc039e055a52021fbef4c2c4b1eefd17/numpy-2.3.5-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a9958e88b65c3b27e22ca2a076311636850b612d6bbfb76e8d156aacde2aaf", size = 13105760, upload-time = "2025-11-16T22:52:17.975Z" }, + { url = "https://files.pythonhosted.org/packages/2d/fd/4b5eb0b3e888d86aee4d198c23acec7d214baaf17ea93c1adec94c9518b9/numpy-2.3.5-cp314-cp314t-win_arm64.whl", hash = "sha256:6203fdf9f3dc5bdaed7319ad8698e685c7a3be10819f41d32a0723e611733b42", size = 10545459, upload-time = "2025-11-16T22:52:20.55Z" }, +] + +[[package]] +name = "nvidia-cublas" +version = "13.1.0.3" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/a5/fce49e2ae977e0ccc084e5adafceb4f0ac0c8333cb6863501618a7277f67/nvidia_cublas-13.1.0.3-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:c86fc7f7ae36d7528288c5d88098edcb7b02c633d262e7ddbb86b0ad91be5df2", size = 542851226, upload-time = "2025-10-09T08:59:04.818Z" }, + { url = "https://files.pythonhosted.org/packages/e7/44/423ac00af4dd95a5aeb27207e2c0d9b7118702149bf4704c3ddb55bb7429/nvidia_cublas-13.1.0.3-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:ee8722c1f0145ab246bccb9e452153b5e0515fd094c3678df50b2a0888b8b171", size = 423133236, upload-time = "2025-10-09T08:59:32.536Z" }, +] + +[[package]] +name = "nvidia-cuda-cupti" +version = "13.0.85" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/2a/80353b103fc20ce05ef51e928daed4b6015db4aaa9162ed0997090fe2250/nvidia_cuda_cupti-13.0.85-py3-none-manylinux_2_25_aarch64.whl", hash = "sha256:796bd679890ee55fb14a94629b698b6db54bcfd833d391d5e94017dd9d7d3151", size = 10310827, upload-time = "2025-09-04T08:26:42.012Z" }, + { url = "https://files.pythonhosted.org/packages/33/6d/737d164b4837a9bbd202f5ae3078975f0525a55730fe871d8ed4e3b952b0/nvidia_cuda_cupti-13.0.85-py3-none-manylinux_2_25_x86_64.whl", hash = "sha256:4eb01c08e859bf924d222250d2e8f8b8ff6d3db4721288cf35d14252a4d933c8", size = 10715597, upload-time = "2025-09-04T08:26:51.312Z" }, +] + +[[package]] +name = "nvidia-cuda-nvrtc" +version = "13.0.88" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/68/483a78f5e8f31b08fb1bb671559968c0ca3a065ac7acabfc7cee55214fd6/nvidia_cuda_nvrtc-13.0.88-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:ad9b6d2ead2435f11cbb6868809d2adeeee302e9bb94bcf0539c7a40d80e8575", size = 90215200, upload-time = "2025-09-04T08:28:44.204Z" }, + { url = "https://files.pythonhosted.org/packages/b7/dc/6bb80850e0b7edd6588d560758f17e0550893a1feaf436807d64d2da040f/nvidia_cuda_nvrtc-13.0.88-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d27f20a0ca67a4bb34268a5e951033496c5b74870b868bacd046b1b8e0c3267b", size = 43015449, upload-time = "2025-09-04T08:28:20.239Z" }, +] + +[[package]] +name = "nvidia-cuda-runtime" +version = "13.0.96" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/4f/17d7b9b8e285199c58ce28e31b5c5bbaa4d8271af06a89b6405258245de2/nvidia_cuda_runtime-13.0.96-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ef9bcbe90493a2b9d810e43d249adb3d02e98dd30200d86607d8d02687c43f55", size = 2261060, upload-time = "2025-10-09T08:55:15.78Z" }, + { url = "https://files.pythonhosted.org/packages/2e/24/d1558f3b68b1d26e706813b1d10aa1d785e4698c425af8db8edc3dced472/nvidia_cuda_runtime-13.0.96-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7f82250d7782aa23b6cfe765ecc7db554bd3c2870c43f3d1821f1d18aebf0548", size = 2243632, upload-time = "2025-10-09T08:55:36.117Z" }, +] + +[[package]] +name = "nvidia-cudnn-cu13" +version = "9.19.0.56" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/84/26025437c1e6b61a707442184fa0c03d083b661adf3a3eecfd6d21677740/nvidia_cudnn_cu13-9.19.0.56-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:6ed29ffaee1176c612daf442e4dd6cfeb6a0caa43ddcbeb59da94953030b1be4", size = 433781201, upload-time = "2026-02-03T20:40:53.805Z" }, + { url = "https://files.pythonhosted.org/packages/a3/22/0b4b932655d17a6da1b92fa92ab12844b053bb2ac2475e179ba6f043da1e/nvidia_cudnn_cu13-9.19.0.56-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:d20e1734305e9d68889a96e3f35094d733ff1f83932ebe462753973e53a572bf", size = 366066321, upload-time = "2026-02-03T20:44:52.837Z" }, +] + +[[package]] +name = "nvidia-cufft" +version = "12.0.0.61" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/ae/f417a75c0259e85c1d2f83ca4e960289a5f814ed0cea74d18c353d3e989d/nvidia_cufft-12.0.0.61-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2708c852ef8cd89d1d2068bdbece0aa188813a0c934db3779b9b1faa8442e5f5", size = 214053554, upload-time = "2025-09-04T08:31:38.196Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2f/7b57e29836ea8714f81e9898409196f47d772d5ddedddf1592eadb8ab743/nvidia_cufft-12.0.0.61-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6c44f692dce8fd5ffd3e3df134b6cdb9c2f72d99cf40b62c32dde45eea9ddad3", size = 214085489, upload-time = "2025-09-04T08:31:56.044Z" }, +] + +[[package]] +name = "nvidia-cufile" +version = "1.15.1.6" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/70/4f193de89a48b71714e74602ee14d04e4019ad36a5a9f20c425776e72cd6/nvidia_cufile-1.15.1.6-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:08a3ecefae5a01c7f5117351c64f17c7c62efa5fffdbe24fc7d298da19cd0b44", size = 1223672, upload-time = "2025-09-04T08:32:22.779Z" }, + { url = "https://files.pythonhosted.org/packages/ab/73/cc4a14c9813a8a0d509417cf5f4bdaba76e924d58beb9864f5a7baceefbf/nvidia_cufile-1.15.1.6-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:bdc0deedc61f548bddf7733bdc216456c2fdb101d020e1ab4b88d232d5e2f6d1", size = 1136992, upload-time = "2025-09-04T08:32:14.119Z" }, +] + +[[package]] +name = "nvidia-curand" +version = "10.4.0.35" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/72/7c2ae24fb6b63a32e6ae5d241cc65263ea18d08802aaae087d9f013335a2/nvidia_curand-10.4.0.35-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:133df5a7509c3e292aaa2b477afd0194f06ce4ea24d714d616ff36439cee349a", size = 61962106, upload-time = "2025-08-04T10:21:41.128Z" }, + { url = "https://files.pythonhosted.org/packages/a5/9f/be0a41ca4a4917abf5cb9ae0daff1a6060cc5de950aec0396de9f3b52bc5/nvidia_curand-10.4.0.35-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:1aee33a5da6e1db083fe2b90082def8915f30f3248d5896bcec36a579d941bfc", size = 59544258, upload-time = "2025-08-04T10:22:03.992Z" }, +] + +[[package]] +name = "nvidia-cusolver" +version = "12.0.4.66" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas" }, + { name = "nvidia-cusparse" }, + { name = "nvidia-nvjitlink" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/c3/b30c9e935fc01e3da443ec0116ed1b2a009bb867f5324d3f2d7e533e776b/nvidia_cusolver-12.0.4.66-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:02c2457eaa9e39de20f880f4bd8820e6a1cfb9f9a34f820eb12a155aa5bc92d2", size = 223467760, upload-time = "2025-09-04T08:33:04.222Z" }, + { url = "https://files.pythonhosted.org/packages/5f/67/cba3777620cdacb99102da4042883709c41c709f4b6323c10781a9c3aa34/nvidia_cusolver-12.0.4.66-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:0a759da5dea5c0ea10fd307de75cdeb59e7ea4fcb8add0924859b944babf1112", size = 200941980, upload-time = "2025-09-04T08:33:22.767Z" }, +] + +[[package]] +name = "nvidia-cusparse" +version = "12.6.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/94/5c26f33738ae35276672f12615a64bd008ed5be6d1ebcb23579285d960a9/nvidia_cusparse-12.6.3.3-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:80bcc4662f23f1054ee334a15c72b8940402975e0eab63178fc7e670aa59472c", size = 162155568, upload-time = "2025-09-04T08:33:42.864Z" }, + { url = "https://files.pythonhosted.org/packages/fa/18/623c77619c31d62efd55302939756966f3ecc8d724a14dab2b75f1508850/nvidia_cusparse-12.6.3.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b3c89c88d01ee0e477cb7f82ef60a11a4bcd57b6b87c33f789350b59759360b", size = 145942937, upload-time = "2025-09-04T08:33:58.029Z" }, +] + +[[package]] +name = "nvidia-cusparselt-cu13" +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/10/8dcd1175260706a2fc92a16a52e306b71d4c1ea0b0cc4a9484183399818a/nvidia_cusparselt_cu13-0.8.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:400c6ed1cf6780fc6efedd64ec9f1345871767e6a1a0a552a1ea0578117ea77c", size = 220791277, upload-time = "2025-08-13T19:22:40.982Z" }, + { url = "https://files.pythonhosted.org/packages/fd/53/43b0d71f4e702fa9733f8b4571fdca50a8813f1e450b656c239beff12315/nvidia_cusparselt_cu13-0.8.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:25e30a8a7323935d4ad0340b95a0b69926eee755767e8e0b1cf8dd85b197d3fd", size = 169884119, upload-time = "2025-08-13T19:23:41.967Z" }, +] + +[[package]] +name = "nvidia-nccl-cu13" +version = "2.28.9" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/55/1920646a2e43ffd4fc958536b276197ed740e9e0c54105b4bb3521591fc7/nvidia_nccl_cu13-2.28.9-py3-none-manylinux_2_18_aarch64.whl", hash = "sha256:01c873ba1626b54caa12272ed228dc5b2781545e0ae8ba3f432a8ef1c6d78643", size = 196561677, upload-time = "2025-11-18T05:49:03.45Z" }, + { url = "https://files.pythonhosted.org/packages/b0/b4/878fefaad5b2bcc6fcf8d474a25e3e3774bc5133e4b58adff4d0bca238bc/nvidia_nccl_cu13-2.28.9-py3-none-manylinux_2_18_x86_64.whl", hash = "sha256:e4553a30f34195f3fa1da02a6da3d6337d28f2003943aa0a3d247bbc25fefc42", size = 196493177, upload-time = "2025-11-18T05:49:17.677Z" }, +] + +[[package]] +name = "nvidia-nvjitlink" +version = "13.0.88" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/7a/123e033aaff487c77107195fa5a2b8686795ca537935a24efae476c41f05/nvidia_nvjitlink-13.0.88-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:13a74f429e23b921c1109976abefacc69835f2f433ebd323d3946e11d804e47b", size = 40713933, upload-time = "2025-09-04T08:35:43.553Z" }, + { url = "https://files.pythonhosted.org/packages/ab/2c/93c5250e64df4f894f1cbb397c6fd71f79813f9fd79d7cd61de3f97b3c2d/nvidia_nvjitlink-13.0.88-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e931536ccc7d467a98ba1d8b89ff7fa7f1fa3b13f2b0069118cd7f47bff07d0c", size = 38768748, upload-time = "2025-09-04T08:35:20.008Z" }, +] + +[[package]] +name = "nvidia-nvshmem-cu13" +version = "3.4.5" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/0f/05cc9c720236dcd2db9c1ab97fff629e96821be2e63103569da0c9b72f19/nvidia_nvshmem_cu13-3.4.5-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dc2a197f38e5d0376ad52cd1a2a3617d3cdc150fd5966f4aee9bcebb1d68fe9", size = 60215947, upload-time = "2025-09-06T00:32:20.022Z" }, + { url = "https://files.pythonhosted.org/packages/3c/35/a9bf80a609e74e3b000fef598933235c908fcefcef9026042b8e6dfde2a9/nvidia_nvshmem_cu13-3.4.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:290f0a2ee94c9f3687a02502f3b9299a9f9fe826e6d0287ee18482e78d495b80", size = 60412546, upload-time = "2025-09-06T00:32:41.564Z" }, +] + +[[package]] +name = "nvidia-nvtx" +version = "13.0.85" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/f3/d86c845465a2723ad7e1e5c36dcd75ddb82898b3f53be47ebd429fb2fa5d/nvidia_nvtx-13.0.85-py3-none-manylinux1_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4936d1d6780fbe68db454f5e72a42ff64d1fd6397df9f363ae786930fd5c1cd4", size = 148047, upload-time = "2025-09-04T08:29:01.761Z" }, + { url = "https://files.pythonhosted.org/packages/a8/64/3708a90d1ebe202ffdeb7185f878a3c84d15c2b2c31858da2ce0583e2def/nvidia_nvtx-13.0.85-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb7780edb6b14107373c835bf8b72e7a178bac7367e23da7acb108f973f157a6", size = 148878, upload-time = "2025-09-04T08:28:53.627Z" }, +] + +[[package]] +name = "nvidia-riva-client" +version = "2.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "grpcio-tools" }, + { name = "protobuf" }, + { name = "websockets" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/f9/85f0bf863deed9078f3a25938a9f06206f98bcc39a6541a48cc97143db10/nvidia_riva_client-2.25.1-1-py3-none-any.whl", hash = "sha256:bde1232a8de3fe1561cccf49d3d0e6fe06190b1f0df4ad0ba118b9f5ae5a06aa", size = 55383, upload-time = "2026-04-30T10:27:58.381Z" }, + { url = "https://files.pythonhosted.org/packages/08/3b/b267af66a49c2e80e673b85ccd5484059b141be8031e4a4bb84ea4bcf31f/nvidia_riva_client-2.25.1-py3-none-any.whl", hash = "sha256:07c48c9cc7f3ca04cd988ad6d2205b0bcf3f6f25bb97d76b397e87cc696acc9f", size = 55371, upload-time = "2026-03-25T13:05:57.425Z" }, +] + +[[package]] +name = "openai" +version = "2.32.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/59/bdcc6b759b8c42dd73afaf5bf8f902c04b37987a5514dbc1c64dba390fef/openai-2.32.0.tar.gz", hash = "sha256:c54b27a9e4cb8d51f0dd94972ffd1a04437efeb259a9e60d8922b8bd26fe55e0", size = 693286, upload-time = "2026-04-15T22:28:19.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/c1/d6e64ccd0536bf616556f0cad2b6d94a8125f508d25cfd814b1d2db4e2f1/openai-2.32.0-py3-none-any.whl", hash = "sha256:4dcc9badeb4bf54ad0d187453742f290226d30150890b7890711bda4f32f192f", size = 1162570, upload-time = "2026-04-15T22:28:17.714Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.9.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/04/fea538adf7dbbd6d186f551d595961e564a3b6715bdf276b477460858672/platformdirs-4.9.2.tar.gz", hash = "sha256:9a33809944b9db043ad67ca0db94b14bf452cc6aeaac46a88ea55b26e2e9d291", size = 28394, upload-time = "2026-02-16T03:56:10.574Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/31/05e764397056194206169869b50cf2fee4dbbbc71b344705b9c0d878d4d8/platformdirs-4.9.2-py3-none-any.whl", hash = "sha256:9170634f126f8efdae22fb58ae8a0eaa86f38365bc57897a6c4f781d1f5875bd", size = 21168, upload-time = "2026-02-16T03:56:08.891Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pooch" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "platformdirs" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/43/85ef45e8b36c6a48546af7b266592dc32d7f67837a6514d111bced6d7d75/pooch-1.9.0.tar.gz", hash = "sha256:de46729579b9857ffd3e741987a2f6d5e0e03219892c167c6578c0091fb511ed", size = 61788, upload-time = "2026-01-30T19:15:09.649Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/2d/d4bf65e47cea8ff2c794a600c4fd1273a7902f268757c531e0ee9f18aa58/pooch-1.9.0-py3-none-any.whl", hash = "sha256:f265597baa9f760d25ceb29d0beb8186c243d6607b0f60b83ecf14078dbc703b", size = 67175, upload-time = "2026-01-30T19:15:08.36Z" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "protobuf" +version = "6.33.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, + { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, +] + +[[package]] +name = "psutil" +version = "7.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/08/510cbdb69c25a96f4ae523f733cdc963ae654904e8db864c07585ef99875/psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b", size = 130595, upload-time = "2026-01-28T18:14:57.293Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/97baea3fe7a5a9af7436301f85490905379b1c6f2dd51fe3ecf24b4c5fbf/psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea", size = 131082, upload-time = "2026-01-28T18:14:59.732Z" }, + { url = "https://files.pythonhosted.org/packages/37/d6/246513fbf9fa174af531f28412297dd05241d97a75911ac8febefa1a53c6/psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63", size = 181476, upload-time = "2026-01-28T18:15:01.884Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b5/9182c9af3836cca61696dabe4fd1304e17bc56cb62f17439e1154f225dd3/psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312", size = 184062, upload-time = "2026-01-28T18:15:04.436Z" }, + { url = "https://files.pythonhosted.org/packages/16/ba/0756dca669f5a9300d0cbcbfae9a4c30e446dfc7440ffe43ded5724bfd93/psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b", size = 139893, upload-time = "2026-01-28T18:15:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/1c/61/8fa0e26f33623b49949346de05ec1ddaad02ed8ba64af45f40a147dbfa97/psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9", size = 135589, upload-time = "2026-01-28T18:15:08.03Z" }, + { url = "https://files.pythonhosted.org/packages/81/69/ef179ab5ca24f32acc1dac0c247fd6a13b501fd5534dbae0e05a1c48b66d/psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00", size = 130664, upload-time = "2026-01-28T18:15:09.469Z" }, + { url = "https://files.pythonhosted.org/packages/7b/64/665248b557a236d3fa9efc378d60d95ef56dd0a490c2cd37dafc7660d4a9/psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9", size = 131087, upload-time = "2026-01-28T18:15:11.724Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2e/e6782744700d6759ebce3043dcfa661fb61e2fb752b91cdeae9af12c2178/psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a", size = 182383, upload-time = "2026-01-28T18:15:13.445Z" }, + { url = "https://files.pythonhosted.org/packages/57/49/0a41cefd10cb7505cdc04dab3eacf24c0c2cb158a998b8c7b1d27ee2c1f5/psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf", size = 185210, upload-time = "2026-01-28T18:15:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/dd/2c/ff9bfb544f283ba5f83ba725a3c5fec6d6b10b8f27ac1dc641c473dc390d/psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1", size = 141228, upload-time = "2026-01-28T18:15:18.385Z" }, + { url = "https://files.pythonhosted.org/packages/f2/fc/f8d9c31db14fcec13748d373e668bc3bed94d9077dbc17fb0eebc073233c/psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841", size = 136284, upload-time = "2026-01-28T18:15:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, + { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, + { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, + { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pydantic" +version = "2.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d9/e4/40d09941a2cebcb20609b86a559817d5b9291c49dd6f8c87e5feffbe703a/pydantic-2.13.3.tar.gz", hash = "sha256:af09e9d1d09f4e7fe37145c1f577e1d61ceb9a41924bf0094a36506285d0a84d", size = 844068, upload-time = "2026-04-20T14:46:43.632Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/0a/fd7d723f8f8153418fb40cf9c940e82004fce7e987026b08a68a36dd3fe7/pydantic-2.13.3-py3-none-any.whl", hash = "sha256:6db14ac8dfc9a1e57f87ea2c0de670c251240f43cb0c30a5130e9720dc612927", size = 471981, upload-time = "2026-04-20T14:46:41.402Z" }, +] + +[package.optional-dependencies] +email = [ + { name = "email-validator" }, +] + +[[package]] +name = "pydantic-core" +version = "2.46.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/ef/f7abb56c49382a246fd2ce9c799691e3c3e7175ec74b14d99e798bcddb1a/pydantic_core-2.46.3.tar.gz", hash = "sha256:41c178f65b8c29807239d47e6050262eb6bf84eb695e41101e62e38df4a5bc2c", size = 471412, upload-time = "2026-04-20T14:40:56.672Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/cb/5b47425556ecc1f3fe18ed2a0083188aa46e1dd812b06e406475b3a5d536/pydantic_core-2.46.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b11b59b3eee90a80a36701ddb4576d9ae31f93f05cb9e277ceaa09e6bf074a67", size = 2101946, upload-time = "2026-04-20T14:40:52.581Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4f/2fb62c2267cae99b815bbf4a7b9283812c88ca3153ef29f7707200f1d4e5/pydantic_core-2.46.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af8653713055ea18a3abc1537fe2ebc42f5b0bbb768d1eb79fd74eb47c0ac089", size = 1951612, upload-time = "2026-04-20T14:42:42.996Z" }, + { url = "https://files.pythonhosted.org/packages/50/6e/b7348fd30d6556d132cddd5bd79f37f96f2601fe0608afac4f5fb01ec0b3/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a519dab6d63c514f3a81053e5266c549679e4aa88f6ec57f2b7b854aceb1b0", size = 1977027, upload-time = "2026-04-20T14:42:02.001Z" }, + { url = "https://files.pythonhosted.org/packages/82/11/31d60ee2b45540d3fb0b29302a393dbc01cd771c473f5b5147bcd353e593/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6cd87cb1575b1ad05ba98894c5b5c96411ef678fa2f6ed2576607095b8d9789", size = 2063008, upload-time = "2026-04-20T14:44:17.952Z" }, + { url = "https://files.pythonhosted.org/packages/8a/db/3a9d1957181b59258f44a2300ab0f0be9d1e12d662a4f57bb31250455c52/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f80a55484b8d843c8ada81ebf70a682f3f00a3d40e378c06cf17ecb44d280d7d", size = 2233082, upload-time = "2026-04-20T14:40:57.934Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e1/3277c38792aeb5cfb18c2f0c5785a221d9ff4e149abbe1184d53d5f72273/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3861f1731b90c50a3266316b9044f5c9b405eecb8e299b0a7120596334e4fe9c", size = 2304615, upload-time = "2026-04-20T14:42:12.584Z" }, + { url = "https://files.pythonhosted.org/packages/5e/d5/e3d9717c9eba10855325650afd2a9cba8e607321697f18953af9d562da2f/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb528e295ed31570ac3dcc9bfdd6e0150bc11ce6168ac87a8082055cf1a67395", size = 2094380, upload-time = "2026-04-20T14:43:05.522Z" }, + { url = "https://files.pythonhosted.org/packages/a1/20/abac35dedcbfd66c6f0b03e4e3564511771d6c9b7ede10a362d03e110d9b/pydantic_core-2.46.3-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:367508faa4973b992b271ba1494acaab36eb7e8739d1e47be5035fb1ea225396", size = 2135429, upload-time = "2026-04-20T14:41:55.549Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a5/41bfd1df69afad71b5cf0535055bccc73022715ad362edbc124bc1e021d7/pydantic_core-2.46.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ad3c826fe523e4becf4fe39baa44286cff85ef137c729a2c5e269afbfd0905d", size = 2174582, upload-time = "2026-04-20T14:41:45.96Z" }, + { url = "https://files.pythonhosted.org/packages/79/65/38d86ea056b29b2b10734eb23329b7a7672ca604df4f2b6e9c02d4ee22fe/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ec638c5d194ef8af27db69f16c954a09797c0dc25015ad6123eb2c73a4d271ca", size = 2187533, upload-time = "2026-04-20T14:40:55.367Z" }, + { url = "https://files.pythonhosted.org/packages/b6/55/a1129141678a2026badc539ad1dee0a71d06f54c2f06a4bd68c030ac781b/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:28ed528c45446062ee66edb1d33df5d88828ae167de76e773a3c7f64bd14e976", size = 2332985, upload-time = "2026-04-20T14:44:13.05Z" }, + { url = "https://files.pythonhosted.org/packages/d7/60/cb26f4077719f709e54819f4e8e1d43f4091f94e285eb6bd21e1190a7b7c/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aed19d0c783886d5bd86d80ae5030006b45e28464218747dcf83dabfdd092c7b", size = 2373670, upload-time = "2026-04-20T14:41:53.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/7e/c3f21882bdf1d8d086876f81b5e296206c69c6082551d776895de7801fa0/pydantic_core-2.46.3-cp312-cp312-win32.whl", hash = "sha256:06d5d8820cbbdb4147578c1fe7ffcd5b83f34508cb9f9ab76e807be7db6ff0a4", size = 1966722, upload-time = "2026-04-20T14:44:30.588Z" }, + { url = "https://files.pythonhosted.org/packages/57/be/6b5e757b859013ebfbd7adba02f23b428f37c86dcbf78b5bb0b4ffd36e99/pydantic_core-2.46.3-cp312-cp312-win_amd64.whl", hash = "sha256:c3212fda0ee959c1dd04c60b601ec31097aaa893573a3a1abd0a47bcac2968c1", size = 2072970, upload-time = "2026-04-20T14:42:54.248Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f8/a989b21cc75e9a32d24192ef700eea606521221a89faa40c919ce884f2b1/pydantic_core-2.46.3-cp312-cp312-win_arm64.whl", hash = "sha256:f1f8338dd7a7f31761f1f1a3c47503a9a3b34eea3c8b01fa6ee96408affb5e72", size = 2035963, upload-time = "2026-04-20T14:44:20.4Z" }, + { url = "https://files.pythonhosted.org/packages/9b/3c/9b5e8eb9821936d065439c3b0fb1490ffa64163bfe7e1595985a47896073/pydantic_core-2.46.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:12bc98de041458b80c86c56b24df1d23832f3e166cbaff011f25d187f5c62c37", size = 2102109, upload-time = "2026-04-20T14:41:24.219Z" }, + { url = "https://files.pythonhosted.org/packages/91/97/1c41d1f5a19f241d8069f1e249853bcce378cdb76eec8ab636d7bc426280/pydantic_core-2.46.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85348b8f89d2c3508b65b16c3c33a4da22b8215138d8b996912bb1532868885f", size = 1951820, upload-time = "2026-04-20T14:42:14.236Z" }, + { url = "https://files.pythonhosted.org/packages/30/b4/d03a7ae14571bc2b6b3c7b122441154720619afe9a336fa3a95434df5e2f/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1105677a6df914b1fb71a81b96c8cce7726857e1717d86001f29be06a25ee6f8", size = 1977785, upload-time = "2026-04-20T14:42:31.648Z" }, + { url = "https://files.pythonhosted.org/packages/ae/0c/4086f808834b59e3c8f1aa26df8f4b6d998cdcf354a143d18ef41529d1fe/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87082cd65669a33adeba5470769e9704c7cf026cc30afb9cc77fd865578ebaad", size = 2062761, upload-time = "2026-04-20T14:40:37.093Z" }, + { url = "https://files.pythonhosted.org/packages/fa/71/a649be5a5064c2df0db06e0a512c2281134ed2fcc981f52a657936a7527c/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e5f66e12c4f5212d08522963380eaaeac5ebd795826cfd19b2dfb0c7a52b9c", size = 2232989, upload-time = "2026-04-20T14:42:59.254Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/7756e75763e810b3a710f4724441d1ecc5883b94aacb07ca71c5fb5cfb69/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6cdf19bf84128d5e7c37e8a73a0c5c10d51103a650ac585d42dd6ae233f2b7f", size = 2303975, upload-time = "2026-04-20T14:41:32.287Z" }, + { url = "https://files.pythonhosted.org/packages/6c/35/68a762e0c1e31f35fa0dac733cbd9f5b118042853698de9509c8e5bf128b/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031bb17f4885a43773c8c763089499f242aee2ea85cf17154168775dccdecf35", size = 2095325, upload-time = "2026-04-20T14:42:47.685Z" }, + { url = "https://files.pythonhosted.org/packages/77/bf/1bf8c9a8e91836c926eae5e3e51dce009bf495a60ca56060689d3df3f340/pydantic_core-2.46.3-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:bcf2a8b2982a6673693eae7348ef3d8cf3979c1d63b54fca7c397a635cc68687", size = 2133368, upload-time = "2026-04-20T14:41:22.766Z" }, + { url = "https://files.pythonhosted.org/packages/e5/50/87d818d6bab915984995157ceb2380f5aac4e563dddbed6b56f0ed057aba/pydantic_core-2.46.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28e8cf2f52d72ced402a137145923a762cbb5081e48b34312f7a0c8f55928ec3", size = 2173908, upload-time = "2026-04-20T14:42:52.044Z" }, + { url = "https://files.pythonhosted.org/packages/91/88/a311fb306d0bd6185db41fa14ae888fb81d0baf648a761ae760d30819d33/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:17eaface65d9fc5abb940003020309c1bf7a211f5f608d7870297c367e6f9022", size = 2186422, upload-time = "2026-04-20T14:43:29.55Z" }, + { url = "https://files.pythonhosted.org/packages/8f/79/28fd0d81508525ab2054fef7c77a638c8b5b0afcbbaeee493cf7c3fef7e1/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:93fd339f23408a07e98950a89644f92c54d8729719a40b30c0a30bb9ebc55d23", size = 2332709, upload-time = "2026-04-20T14:42:16.134Z" }, + { url = "https://files.pythonhosted.org/packages/b3/21/795bf5fe5c0f379308b8ef19c50dedab2e7711dbc8d0c2acf08f1c7daa05/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:23cbdb3aaa74dfe0837975dbf69b469753bbde8eacace524519ffdb6b6e89eb7", size = 2372428, upload-time = "2026-04-20T14:41:10.974Z" }, + { url = "https://files.pythonhosted.org/packages/45/b3/ed14c659cbe7605e3ef063077680a64680aec81eb1a04763a05190d49b7f/pydantic_core-2.46.3-cp313-cp313-win32.whl", hash = "sha256:610eda2e3838f401105e6326ca304f5da1e15393ae25dacae5c5c63f2c275b13", size = 1965601, upload-time = "2026-04-20T14:41:42.128Z" }, + { url = "https://files.pythonhosted.org/packages/ef/bb/adb70d9a762ddd002d723fbf1bd492244d37da41e3af7b74ad212609027e/pydantic_core-2.46.3-cp313-cp313-win_amd64.whl", hash = "sha256:68cc7866ed863db34351294187f9b729964c371ba33e31c26f478471c52e1ed0", size = 2071517, upload-time = "2026-04-20T14:43:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/52/eb/66faefabebfe68bd7788339c9c9127231e680b11906368c67ce112fdb47f/pydantic_core-2.46.3-cp313-cp313-win_arm64.whl", hash = "sha256:f64b5537ac62b231572879cd08ec05600308636a5d63bcbdb15063a466977bec", size = 2035802, upload-time = "2026-04-20T14:43:38.507Z" }, + { url = "https://files.pythonhosted.org/packages/7f/db/a7bcb4940183fda36022cd18ba8dd12f2dff40740ec7b58ce7457befa416/pydantic_core-2.46.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:afa3aa644f74e290cdede48a7b0bee37d1c35e71b05105f6b340d484af536d9b", size = 2097614, upload-time = "2026-04-20T14:44:38.374Z" }, + { url = "https://files.pythonhosted.org/packages/24/35/e4066358a22e3e99519db370494c7528f5a2aa1367370e80e27e20283543/pydantic_core-2.46.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ced3310e51aa425f7f77da8bbbb5212616655bedbe82c70944320bc1dbe5e018", size = 1951896, upload-time = "2026-04-20T14:40:53.996Z" }, + { url = "https://files.pythonhosted.org/packages/87/92/37cf4049d1636996e4b888c05a501f40a43ff218983a551d57f9d5e14f0d/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e29908922ce9da1a30b4da490bd1d3d82c01dcfdf864d2a74aacee674d0bfa34", size = 1979314, upload-time = "2026-04-20T14:41:49.446Z" }, + { url = "https://files.pythonhosted.org/packages/d8/36/9ff4d676dfbdfb2d591cf43f3d90ded01e15b1404fd101180ed2d62a2fd3/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c9ff69140423eea8ed2d5477df3ba037f671f5e897d206d921bc9fdc39613e7", size = 2056133, upload-time = "2026-04-20T14:42:23.574Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f0/405b442a4d7ba855b06eec8b2bf9c617d43b8432d099dfdc7bf999293495/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b675ab0a0d5b1c8fdb81195dc5bcefea3f3c240871cdd7ff9a2de8aa50772eb2", size = 2228726, upload-time = "2026-04-20T14:44:22.816Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f8/65cd92dd5a0bd89ba277a98ecbfaf6fc36bbd3300973c7a4b826d6ab1391/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0087084960f209a9a4af50ecd1fb063d9ad3658c07bb81a7a53f452dacbfb2ba", size = 2301214, upload-time = "2026-04-20T14:44:48.792Z" }, + { url = "https://files.pythonhosted.org/packages/fd/86/ef96a4c6e79e7a2d0410826a68fbc0eccc0fd44aa733be199d5fcac3bb87/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed42e6cc8e1b0e2b9b96e2276bad70ae625d10d6d524aed0c93de974ae029f9f", size = 2099927, upload-time = "2026-04-20T14:41:40.196Z" }, + { url = "https://files.pythonhosted.org/packages/6d/53/269caf30e0096e0a8a8f929d1982a27b3879872cca2d917d17c2f9fdf4fe/pydantic_core-2.46.3-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:f1771ce258afb3e4201e67d154edbbae712a76a6081079fe247c2f53c6322c22", size = 2128789, upload-time = "2026-04-20T14:41:15.868Z" }, + { url = "https://files.pythonhosted.org/packages/00/b0/1a6d9b6a587e118482910c244a1c5acf4d192604174132efd12bf0ac486f/pydantic_core-2.46.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7610b6a5242a6c736d8ad47fd5fff87fcfe8f833b281b1c409c3d6835d9227f", size = 2173815, upload-time = "2026-04-20T14:44:25.152Z" }, + { url = "https://files.pythonhosted.org/packages/87/56/e7e00d4041a7e62b5a40815590114db3b535bf3ca0bf4dca9f16cef25246/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:ff5e7783bcc5476e1db448bf268f11cb257b1c276d3e89f00b5727be86dd0127", size = 2181608, upload-time = "2026-04-20T14:41:28.933Z" }, + { url = "https://files.pythonhosted.org/packages/e8/22/4bd23c3d41f7c185d60808a1de83c76cf5aeabf792f6c636a55c3b1ec7f9/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:9d2e32edcc143bc01e95300671915d9ca052d4f745aa0a49c48d4803f8a85f2c", size = 2326968, upload-time = "2026-04-20T14:42:03.962Z" }, + { url = "https://files.pythonhosted.org/packages/24/ac/66cd45129e3915e5ade3b292cb3bc7fd537f58f8f8dbdaba6170f7cabb74/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d83d1c6b87fa56b521479cff237e626a292f3b31b6345c15a99121b454c1", size = 2369842, upload-time = "2026-04-20T14:41:35.52Z" }, + { url = "https://files.pythonhosted.org/packages/a2/51/dd4248abb84113615473aa20d5545b7c4cd73c8644003b5259686f93996c/pydantic_core-2.46.3-cp314-cp314-win32.whl", hash = "sha256:07bc6d2a28c3adb4f7c6ae46aa4f2d2929af127f587ed44057af50bf1ce0f505", size = 1959661, upload-time = "2026-04-20T14:41:00.042Z" }, + { url = "https://files.pythonhosted.org/packages/20/eb/59980e5f1ae54a3b86372bd9f0fa373ea2d402e8cdcd3459334430f91e91/pydantic_core-2.46.3-cp314-cp314-win_amd64.whl", hash = "sha256:8940562319bc621da30714617e6a7eaa6b98c84e8c685bcdc02d7ed5e7c7c44e", size = 2071686, upload-time = "2026-04-20T14:43:16.471Z" }, + { url = "https://files.pythonhosted.org/packages/8c/db/1cf77e5247047dfee34bc01fa9bca134854f528c8eb053e144298893d370/pydantic_core-2.46.3-cp314-cp314-win_arm64.whl", hash = "sha256:5dcbbcf4d22210ced8f837c96db941bdb078f419543472aca5d9a0bb7cddc7df", size = 2026907, upload-time = "2026-04-20T14:43:31.732Z" }, + { url = "https://files.pythonhosted.org/packages/57/c0/b3df9f6a543276eadba0a48487b082ca1f201745329d97dbfa287034a230/pydantic_core-2.46.3-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:d0fe3dce1e836e418f912c1ad91c73357d03e556a4d286f441bf34fed2dbeecf", size = 2095047, upload-time = "2026-04-20T14:42:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/57/886a938073b97556c168fd99e1a7305bb363cd30a6d2c76086bf0587b32a/pydantic_core-2.46.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9ce92e58abc722dac1bf835a6798a60b294e48eb0e625ec9fd994b932ac5feee", size = 1934329, upload-time = "2026-04-20T14:43:49.655Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7c/b42eaa5c34b13b07ecb51da21761297a9b8eb43044c864a035999998f328/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03e6467f0f5ab796a486146d1b887b2dc5e5f9b3288898c1b1c3ad974e53e4a", size = 1974847, upload-time = "2026-04-20T14:42:10.737Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9b/92b42db6543e7de4f99ae977101a2967b63122d4b6cf7773812da2d7d5b5/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2798b6ba041b9d70acfb9071a2ea13c8456dd1e6a5555798e41ba7b0790e329c", size = 2041742, upload-time = "2026-04-20T14:40:44.262Z" }, + { url = "https://files.pythonhosted.org/packages/0f/19/46fbe1efabb5aa2834b43b9454e70f9a83ad9c338c1291e48bdc4fecf167/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9be3e221bdc6d69abf294dcf7aff6af19c31a5cdcc8f0aa3b14be29df4bd03b1", size = 2236235, upload-time = "2026-04-20T14:41:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/77/da/b3f95bc009ad60ec53120f5d16c6faa8cabdbe8a20d83849a1f2b8728148/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f13936129ce841f2a5ddf6f126fea3c43cd128807b5a59588c37cf10178c2e64", size = 2282633, upload-time = "2026-04-20T14:44:33.271Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6e/401336117722e28f32fb8220df676769d28ebdf08f2f4469646d404c43a3/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28b5f2ef03416facccb1c6ef744c69793175fd27e44ef15669201601cf423acb", size = 2109679, upload-time = "2026-04-20T14:44:41.065Z" }, + { url = "https://files.pythonhosted.org/packages/fc/53/b289f9bc8756a32fe718c46f55afaeaf8d489ee18d1a1e7be1db73f42cc4/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:830d1247d77ad23852314f069e9d7ddafeec5f684baf9d7e7065ed46a049c4e6", size = 2108342, upload-time = "2026-04-20T14:42:50.144Z" }, + { url = "https://files.pythonhosted.org/packages/10/5b/8292fc7c1f9111f1b2b7c1b0dcf1179edcd014fc3ea4517499f50b829d71/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0793c90c1a3c74966e7975eaef3ed30ebdff3260a0f815a62a22adc17e4c01c", size = 2157208, upload-time = "2026-04-20T14:42:08.133Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9e/f80044e9ec07580f057a89fc131f78dda7a58751ddf52bbe05eaf31db50f/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d2d0aead851b66f5245ec0c4fb2612ef457f8bbafefdf65a2bf9d6bac6140f47", size = 2167237, upload-time = "2026-04-20T14:42:25.412Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/6781a1b037f3b96be9227edbd1101f6d3946746056231bf4ac48cdff1a8d/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:2f40e4246676beb31c5ce77c38a55ca4e465c6b38d11ea1bd935420568e0b1ab", size = 2312540, upload-time = "2026-04-20T14:40:40.313Z" }, + { url = "https://files.pythonhosted.org/packages/3e/db/19c0839feeb728e7df03255581f198dfdf1c2aeb1e174a8420b63c5252e5/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:cf489cf8986c543939aeee17a09c04d6ffb43bfef8ca16fcbcc5cfdcbed24dba", size = 2369556, upload-time = "2026-04-20T14:41:09.427Z" }, + { url = "https://files.pythonhosted.org/packages/e0/15/3228774cb7cd45f5f721ddf1b2242747f4eb834d0c491f0c02d606f09fed/pydantic_core-2.46.3-cp314-cp314t-win32.whl", hash = "sha256:ffe0883b56cfc05798bf994164d2b2ff03efe2d22022a2bb080f3b626176dd56", size = 1949756, upload-time = "2026-04-20T14:41:25.717Z" }, + { url = "https://files.pythonhosted.org/packages/b8/2a/c79cf53fd91e5a87e30d481809f52f9a60dd221e39de66455cf04deaad37/pydantic_core-2.46.3-cp314-cp314t-win_amd64.whl", hash = "sha256:706d9d0ce9cf4593d07270d8e9f53b161f90c57d315aeec4fb4fd7a8b10240d8", size = 2051305, upload-time = "2026-04-20T14:43:18.627Z" }, + { url = "https://files.pythonhosted.org/packages/0b/db/d8182a7f1d9343a032265aae186eb063fe26ca4c40f256b21e8da4498e89/pydantic_core-2.46.3-cp314-cp314t-win_arm64.whl", hash = "sha256:77706aeb41df6a76568434701e0917da10692da28cb69d5fb6919ce5fdb07374", size = 2026310, upload-time = "2026-04-20T14:41:01.778Z" }, + { url = "https://files.pythonhosted.org/packages/34/42/f426db557e8ab2791bc7562052299944a118655496fbff99914e564c0a94/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b12dd51f1187c2eb489af8e20f880362db98e954b54ab792fa5d92e8bcc6b803", size = 2091877, upload-time = "2026-04-20T14:43:27.091Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4f/86a832a9d14df58e663bfdf4627dc00d3317c2bd583c4fb23390b0f04b8e/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f00a0961b125f1a47af7bcc17f00782e12f4cd056f83416006b30111d941dfa3", size = 1932428, upload-time = "2026-04-20T14:40:45.781Z" }, + { url = "https://files.pythonhosted.org/packages/11/1a/fe857968954d93fb78e0d4b6df5c988c74c4aaa67181c60be7cfe327c0ca/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57697d7c056aca4bbb680200f96563e841a6386ac1129370a0102592f4dddff5", size = 1997550, upload-time = "2026-04-20T14:44:02.425Z" }, + { url = "https://files.pythonhosted.org/packages/17/eb/9d89ad2d9b0ba8cd65393d434471621b98912abb10fbe1df08e480ba57b5/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd35aa21299def8db7ef4fe5c4ff862941a9a158ca7b63d61e66fe67d30416b4", size = 2137657, upload-time = "2026-04-20T14:42:45.149Z" }, +] + +[[package]] +name = "pydantic-extra-types" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/35/2fee58b1316a73e025728583d3b1447218a97e621933fc776fb8c0f2ebdd/pydantic_extra_types-2.11.0.tar.gz", hash = "sha256:4e9991959d045b75feb775683437a97991d02c138e00b59176571db9ce634f0e", size = 157226, upload-time = "2025-12-31T16:18:27.944Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/17/fabd56da47096d240dd45ba627bead0333b0cf0ee8ada9bec579287dadf3/pydantic_extra_types-2.11.0-py3-none-any.whl", hash = "sha256:84b864d250a0fc62535b7ec591e36f2c5b4d1325fa0017eb8cda9aeb63b374a6", size = 74296, upload-time = "2025-12-31T16:18:26.38Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/98/c8345dccdc31de4228c039a98f6467a941e39558da41c1744fbe29fa5666/pydantic_settings-2.14.0.tar.gz", hash = "sha256:24285fd4b0e0c06507dd9fdfd331ee23794305352aaec8fc4eb92d4047aeb67d", size = 235709, upload-time = "2026-04-20T13:37:40.293Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/dd/bebff3040138f00ae8a102d426b27349b9a49acc310fcae7f92112d867e3/pydantic_settings-2.14.0-py3-none-any.whl", hash = "sha256:fc8d5d692eb7092e43c8647c1c35a3ecd00e040fcf02ed86f4cb5458ca62182e", size = 60940, upload-time = "2026-04-20T13:37:38.586Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, +] + +[[package]] +name = "python-telegram-bot" +version = "22.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpcore", marker = "python_full_version >= '3.14'" }, + { name = "httpx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/25/2258161b1069e66d6c39c0a602dbe57461d4767dc0012539970ea40bc9d6/python_telegram_bot-22.7.tar.gz", hash = "sha256:784b59ea3852fe4616ad63b4a0264c755637f5d725e87755ecdee28300febf61", size = 1516454, upload-time = "2026-03-16T09:36:03.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/f7/0e2f89dd62f45d46d4ea0d8aec5893ce5b37389638db010c117f46f11450/python_telegram_bot-22.7-py3-none-any.whl", hash = "sha256:d72eed532cf763758cd9331b57a6d790aff0bb4d37d8f4e92149436fe21c6475", size = 745365, upload-time = "2026-03-16T09:36:01.498Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "redis" +version = "7.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/7f/3759b1d0d72b7c92f0d70ffd9dc962b7b7b5ee74e135f9d7d8ab06b8a318/redis-7.4.0.tar.gz", hash = "sha256:64a6ea7bf567ad43c964d2c30d82853f8df927c5c9017766c55a1d1ed95d18ad", size = 4943913, upload-time = "2026-03-24T09:14:37.53Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/3a/95deec7db1eb53979973ebd156f3369a72732208d1391cd2e5d127062a32/redis-7.4.0-py3-none-any.whl", hash = "sha256:a9c74a5c893a5ef8455a5adb793a31bb70feb821c86eccb62eebef5a19c429ec", size = 409772, upload-time = "2026-03-24T09:14:35.968Z" }, +] + +[[package]] +name = "regex" +version = "2026.1.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398, upload-time = "2026-01-14T23:14:43.741Z" }, + { url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339, upload-time = "2026-01-14T23:14:45.183Z" }, + { url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003, upload-time = "2026-01-14T23:14:47.25Z" }, + { url = "https://files.pythonhosted.org/packages/c6/e4/1fc4599450c9f0863d9406e944592d968b8d6dfd0d552a7d569e43bceada/regex-2026.1.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8a154cf6537ebbc110e24dabe53095e714245c272da9c1be05734bdad4a61aa", size = 798656, upload-time = "2026-01-14T23:14:48.77Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e6/59650d73a73fa8a60b3a590545bfcf1172b4384a7df2e7fe7b9aab4e2da9/regex-2026.1.15-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8050ba2e3ea1d8731a549e83c18d2f0999fbc99a5f6bd06b4c91449f55291804", size = 864252, upload-time = "2026-01-14T23:14:50.528Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ab/1d0f4d50a1638849a97d731364c9a80fa304fec46325e48330c170ee8e80/regex-2026.1.15-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf065240704cb8951cc04972cf107063917022511273e0969bdb34fc173456c", size = 912268, upload-time = "2026-01-14T23:14:52.952Z" }, + { url = "https://files.pythonhosted.org/packages/dd/df/0d722c030c82faa1d331d1921ee268a4e8fb55ca8b9042c9341c352f17fa/regex-2026.1.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c32bef3e7aeee75746748643667668ef941d28b003bfc89994ecf09a10f7a1b5", size = 803589, upload-time = "2026-01-14T23:14:55.182Z" }, + { url = "https://files.pythonhosted.org/packages/66/23/33289beba7ccb8b805c6610a8913d0131f834928afc555b241caabd422a9/regex-2026.1.15-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d5eaa4a4c5b1906bd0d2508d68927f15b81821f85092e06f1a34a4254b0e1af3", size = 775700, upload-time = "2026-01-14T23:14:56.707Z" }, + { url = "https://files.pythonhosted.org/packages/e7/65/bf3a42fa6897a0d3afa81acb25c42f4b71c274f698ceabd75523259f6688/regex-2026.1.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:86c1077a3cc60d453d4084d5b9649065f3bf1184e22992bd322e1f081d3117fb", size = 787928, upload-time = "2026-01-14T23:14:58.312Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f5/13bf65864fc314f68cdd6d8ca94adcab064d4d39dbd0b10fef29a9da48fc/regex-2026.1.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2b091aefc05c78d286657cd4db95f2e6313375ff65dcf085e42e4c04d9c8d410", size = 858607, upload-time = "2026-01-14T23:15:00.657Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/040e589834d7a439ee43fb0e1e902bc81bd58a5ba81acffe586bb3321d35/regex-2026.1.15-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:57e7d17f59f9ebfa9667e6e5a1c0127b96b87cb9cede8335482451ed00788ba4", size = 763729, upload-time = "2026-01-14T23:15:02.248Z" }, + { url = "https://files.pythonhosted.org/packages/9b/84/6921e8129687a427edf25a34a5594b588b6d88f491320b9de5b6339a4fcb/regex-2026.1.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c6c4dcdfff2c08509faa15d36ba7e5ef5fcfab25f1e8f85a0c8f45bc3a30725d", size = 850697, upload-time = "2026-01-14T23:15:03.878Z" }, + { url = "https://files.pythonhosted.org/packages/8a/87/3d06143d4b128f4229158f2de5de6c8f2485170c7221e61bf381313314b2/regex-2026.1.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf8ff04c642716a7f2048713ddc6278c5fd41faa3b9cab12607c7abecd012c22", size = 789849, upload-time = "2026-01-14T23:15:06.102Z" }, + { url = "https://files.pythonhosted.org/packages/77/69/c50a63842b6bd48850ebc7ab22d46e7a2a32d824ad6c605b218441814639/regex-2026.1.15-cp312-cp312-win32.whl", hash = "sha256:82345326b1d8d56afbe41d881fdf62f1926d7264b2fc1537f99ae5da9aad7913", size = 266279, upload-time = "2026-01-14T23:15:07.678Z" }, + { url = "https://files.pythonhosted.org/packages/f2/36/39d0b29d087e2b11fd8191e15e81cce1b635fcc845297c67f11d0d19274d/regex-2026.1.15-cp312-cp312-win_amd64.whl", hash = "sha256:4def140aa6156bc64ee9912383d4038f3fdd18fee03a6f222abd4de6357ce42a", size = 277166, upload-time = "2026-01-14T23:15:09.257Z" }, + { url = "https://files.pythonhosted.org/packages/28/32/5b8e476a12262748851fa8ab1b0be540360692325975b094e594dfebbb52/regex-2026.1.15-cp312-cp312-win_arm64.whl", hash = "sha256:c6c565d9a6e1a8d783c1948937ffc377dd5771e83bd56de8317c450a954d2056", size = 270415, upload-time = "2026-01-14T23:15:10.743Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2e/6870bb16e982669b674cce3ee9ff2d1d46ab80528ee6bcc20fb2292efb60/regex-2026.1.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e69d0deeb977ffe7ed3d2e4439360089f9c3f217ada608f0f88ebd67afb6385e", size = 489164, upload-time = "2026-01-14T23:15:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/9774542e203849b0286badf67199970a44ebdb0cc5fb739f06e47ada72f8/regex-2026.1.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3601ffb5375de85a16f407854d11cca8fe3f5febbe3ac78fb2866bb220c74d10", size = 291218, upload-time = "2026-01-14T23:15:15.647Z" }, + { url = "https://files.pythonhosted.org/packages/b2/87/b0cda79f22b8dee05f774922a214da109f9a4c0eca5da2c9d72d77ea062c/regex-2026.1.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4c5ef43b5c2d4114eb8ea424bb8c9cec01d5d17f242af88b2448f5ee81caadbc", size = 288895, upload-time = "2026-01-14T23:15:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/3b/6a/0041f0a2170d32be01ab981d6346c83a8934277d82c780d60b127331f264/regex-2026.1.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:968c14d4f03e10b2fd960f1d5168c1f0ac969381d3c1fcc973bc45fb06346599", size = 798680, upload-time = "2026-01-14T23:15:19.342Z" }, + { url = "https://files.pythonhosted.org/packages/58/de/30e1cfcdbe3e891324aa7568b7c968771f82190df5524fabc1138cb2d45a/regex-2026.1.15-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56a5595d0f892f214609c9f76b41b7428bed439d98dc961efafdd1354d42baae", size = 864210, upload-time = "2026-01-14T23:15:22.005Z" }, + { url = "https://files.pythonhosted.org/packages/64/44/4db2f5c5ca0ccd40ff052ae7b1e9731352fcdad946c2b812285a7505ca75/regex-2026.1.15-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf650f26087363434c4e560011f8e4e738f6f3e029b85d4904c50135b86cfa5", size = 912358, upload-time = "2026-01-14T23:15:24.569Z" }, + { url = "https://files.pythonhosted.org/packages/79/b6/e6a5665d43a7c42467138c8a2549be432bad22cbd206f5ec87162de74bd7/regex-2026.1.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18388a62989c72ac24de75f1449d0fb0b04dfccd0a1a7c1c43af5eb503d890f6", size = 803583, upload-time = "2026-01-14T23:15:26.526Z" }, + { url = "https://files.pythonhosted.org/packages/e7/53/7cd478222169d85d74d7437e74750005e993f52f335f7c04ff7adfda3310/regex-2026.1.15-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d220a2517f5893f55daac983bfa9fe998a7dbcaee4f5d27a88500f8b7873788", size = 775782, upload-time = "2026-01-14T23:15:29.352Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b5/75f9a9ee4b03a7c009fe60500fe550b45df94f0955ca29af16333ef557c5/regex-2026.1.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9c08c2fbc6120e70abff5d7f28ffb4d969e14294fb2143b4b5c7d20e46d1714", size = 787978, upload-time = "2026-01-14T23:15:31.295Z" }, + { url = "https://files.pythonhosted.org/packages/72/b3/79821c826245bbe9ccbb54f6eadb7879c722fd3e0248c17bfc90bf54e123/regex-2026.1.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7ef7d5d4bd49ec7364315167a4134a015f61e8266c6d446fc116a9ac4456e10d", size = 858550, upload-time = "2026-01-14T23:15:33.558Z" }, + { url = "https://files.pythonhosted.org/packages/4a/85/2ab5f77a1c465745bfbfcb3ad63178a58337ae8d5274315e2cc623a822fa/regex-2026.1.15-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e42844ad64194fa08d5ccb75fe6a459b9b08e6d7296bd704460168d58a388f3", size = 763747, upload-time = "2026-01-14T23:15:35.206Z" }, + { url = "https://files.pythonhosted.org/packages/6d/84/c27df502d4bfe2873a3e3a7cf1bdb2b9cc10284d1a44797cf38bed790470/regex-2026.1.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cfecdaa4b19f9ca534746eb3b55a5195d5c95b88cac32a205e981ec0a22b7d31", size = 850615, upload-time = "2026-01-14T23:15:37.523Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b7/658a9782fb253680aa8ecb5ccbb51f69e088ed48142c46d9f0c99b46c575/regex-2026.1.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:08df9722d9b87834a3d701f3fca570b2be115654dbfd30179f30ab2f39d606d3", size = 789951, upload-time = "2026-01-14T23:15:39.582Z" }, + { url = "https://files.pythonhosted.org/packages/fc/2a/5928af114441e059f15b2f63e188bd00c6529b3051c974ade7444b85fcda/regex-2026.1.15-cp313-cp313-win32.whl", hash = "sha256:d426616dae0967ca225ab12c22274eb816558f2f99ccb4a1d52ca92e8baf180f", size = 266275, upload-time = "2026-01-14T23:15:42.108Z" }, + { url = "https://files.pythonhosted.org/packages/4f/16/5bfbb89e435897bff28cf0352a992ca719d9e55ebf8b629203c96b6ce4f7/regex-2026.1.15-cp313-cp313-win_amd64.whl", hash = "sha256:febd38857b09867d3ed3f4f1af7d241c5c50362e25ef43034995b77a50df494e", size = 277145, upload-time = "2026-01-14T23:15:44.244Z" }, + { url = "https://files.pythonhosted.org/packages/56/c1/a09ff7392ef4233296e821aec5f78c51be5e91ffde0d163059e50fd75835/regex-2026.1.15-cp313-cp313-win_arm64.whl", hash = "sha256:8e32f7896f83774f91499d239e24cebfadbc07639c1494bb7213983842348337", size = 270411, upload-time = "2026-01-14T23:15:45.858Z" }, + { url = "https://files.pythonhosted.org/packages/3c/38/0cfd5a78e5c6db00e6782fdae70458f89850ce95baa5e8694ab91d89744f/regex-2026.1.15-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ec94c04149b6a7b8120f9f44565722c7ae31b7a6d2275569d2eefa76b83da3be", size = 492068, upload-time = "2026-01-14T23:15:47.616Z" }, + { url = "https://files.pythonhosted.org/packages/50/72/6c86acff16cb7c959c4355826bbf06aad670682d07c8f3998d9ef4fee7cd/regex-2026.1.15-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40c86d8046915bb9aeb15d3f3f15b6fd500b8ea4485b30e1bbc799dab3fe29f8", size = 292756, upload-time = "2026-01-14T23:15:49.307Z" }, + { url = "https://files.pythonhosted.org/packages/4e/58/df7fb69eadfe76526ddfce28abdc0af09ffe65f20c2c90932e89d705153f/regex-2026.1.15-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:726ea4e727aba21643205edad8f2187ec682d3305d790f73b7a51c7587b64bdd", size = 291114, upload-time = "2026-01-14T23:15:51.484Z" }, + { url = "https://files.pythonhosted.org/packages/ed/6c/a4011cd1cf96b90d2cdc7e156f91efbd26531e822a7fbb82a43c1016678e/regex-2026.1.15-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb740d044aff31898804e7bf1181cc72c03d11dfd19932b9911ffc19a79070a", size = 807524, upload-time = "2026-01-14T23:15:53.102Z" }, + { url = "https://files.pythonhosted.org/packages/1d/25/a53ffb73183f69c3e9f4355c4922b76d2840aee160af6af5fac229b6201d/regex-2026.1.15-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05d75a668e9ea16f832390d22131fe1e8acc8389a694c8febc3e340b0f810b93", size = 873455, upload-time = "2026-01-14T23:15:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/66/0b/8b47fc2e8f97d9b4a851736f3890a5f786443aa8901061c55f24c955f45b/regex-2026.1.15-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d991483606f3dbec93287b9f35596f41aa2e92b7c2ebbb935b63f409e243c9af", size = 915007, upload-time = "2026-01-14T23:15:57.041Z" }, + { url = "https://files.pythonhosted.org/packages/c2/fa/97de0d681e6d26fabe71968dbee06dd52819e9a22fdce5dac7256c31ed84/regex-2026.1.15-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:194312a14819d3e44628a44ed6fea6898fdbecb0550089d84c403475138d0a09", size = 812794, upload-time = "2026-01-14T23:15:58.916Z" }, + { url = "https://files.pythonhosted.org/packages/22/38/e752f94e860d429654aa2b1c51880bff8dfe8f084268258adf9151cf1f53/regex-2026.1.15-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe2fda4110a3d0bc163c2e0664be44657431440722c5c5315c65155cab92f9e5", size = 781159, upload-time = "2026-01-14T23:16:00.817Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a7/d739ffaef33c378fc888302a018d7f81080393d96c476b058b8c64fd2b0d/regex-2026.1.15-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:124dc36c85d34ef2d9164da41a53c1c8c122cfb1f6e1ec377a1f27ee81deb794", size = 795558, upload-time = "2026-01-14T23:16:03.267Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c4/542876f9a0ac576100fc73e9c75b779f5c31e3527576cfc9cb3009dcc58a/regex-2026.1.15-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1774cd1981cd212506a23a14dba7fdeaee259f5deba2df6229966d9911e767a", size = 868427, upload-time = "2026-01-14T23:16:05.646Z" }, + { url = "https://files.pythonhosted.org/packages/fc/0f/d5655bea5b22069e32ae85a947aa564912f23758e112cdb74212848a1a1b/regex-2026.1.15-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:b5f7d8d2867152cdb625e72a530d2ccb48a3d199159144cbdd63870882fb6f80", size = 769939, upload-time = "2026-01-14T23:16:07.542Z" }, + { url = "https://files.pythonhosted.org/packages/20/06/7e18a4fa9d326daeda46d471a44ef94201c46eaa26dbbb780b5d92cbfdda/regex-2026.1.15-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:492534a0ab925d1db998defc3c302dae3616a2fc3fe2e08db1472348f096ddf2", size = 854753, upload-time = "2026-01-14T23:16:10.395Z" }, + { url = "https://files.pythonhosted.org/packages/3b/67/dc8946ef3965e166f558ef3b47f492bc364e96a265eb4a2bb3ca765c8e46/regex-2026.1.15-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c661fc820cfb33e166bf2450d3dadbda47c8d8981898adb9b6fe24e5e582ba60", size = 799559, upload-time = "2026-01-14T23:16:12.347Z" }, + { url = "https://files.pythonhosted.org/packages/a5/61/1bba81ff6d50c86c65d9fd84ce9699dd106438ee4cdb105bf60374ee8412/regex-2026.1.15-cp313-cp313t-win32.whl", hash = "sha256:99ad739c3686085e614bf77a508e26954ff1b8f14da0e3765ff7abbf7799f952", size = 268879, upload-time = "2026-01-14T23:16:14.049Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/cef7d4c5fb0ea3ac5c775fd37db5747f7378b29526cc83f572198924ff47/regex-2026.1.15-cp313-cp313t-win_amd64.whl", hash = "sha256:32655d17905e7ff8ba5c764c43cb124e34a9245e45b83c22e81041e1071aee10", size = 280317, upload-time = "2026-01-14T23:16:15.718Z" }, + { url = "https://files.pythonhosted.org/packages/b4/52/4317f7a5988544e34ab57b4bde0f04944c4786128c933fb09825924d3e82/regex-2026.1.15-cp313-cp313t-win_arm64.whl", hash = "sha256:b2a13dd6a95e95a489ca242319d18fc02e07ceb28fa9ad146385194d95b3c829", size = 271551, upload-time = "2026-01-14T23:16:17.533Z" }, + { url = "https://files.pythonhosted.org/packages/52/0a/47fa888ec7cbbc7d62c5f2a6a888878e76169170ead271a35239edd8f0e8/regex-2026.1.15-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:d920392a6b1f353f4aa54328c867fec3320fa50657e25f64abf17af054fc97ac", size = 489170, upload-time = "2026-01-14T23:16:19.835Z" }, + { url = "https://files.pythonhosted.org/packages/ac/c4/d000e9b7296c15737c9301708e9e7fbdea009f8e93541b6b43bdb8219646/regex-2026.1.15-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b5a28980a926fa810dbbed059547b02783952e2efd9c636412345232ddb87ff6", size = 291146, upload-time = "2026-01-14T23:16:21.541Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b6/921cc61982e538682bdf3bdf5b2c6ab6b34368da1f8e98a6c1ddc503c9cf/regex-2026.1.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:621f73a07595d83f28952d7bd1e91e9d1ed7625fb7af0064d3516674ec93a2a2", size = 288986, upload-time = "2026-01-14T23:16:23.381Z" }, + { url = "https://files.pythonhosted.org/packages/ca/33/eb7383dde0bbc93f4fb9d03453aab97e18ad4024ac7e26cef8d1f0a2cff0/regex-2026.1.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d7d92495f47567a9b1669c51fc8d6d809821849063d168121ef801bbc213846", size = 799098, upload-time = "2026-01-14T23:16:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/27/56/b664dccae898fc8d8b4c23accd853f723bde0f026c747b6f6262b688029c/regex-2026.1.15-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8dd16fba2758db7a3780a051f245539c4451ca20910f5a5e6ea1c08d06d4a76b", size = 864980, upload-time = "2026-01-14T23:16:27.297Z" }, + { url = "https://files.pythonhosted.org/packages/16/40/0999e064a170eddd237bae9ccfcd8f28b3aa98a38bf727a086425542a4fc/regex-2026.1.15-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1e1808471fbe44c1a63e5f577a1d5f02fe5d66031dcbdf12f093ffc1305a858e", size = 911607, upload-time = "2026-01-14T23:16:29.235Z" }, + { url = "https://files.pythonhosted.org/packages/07/78/c77f644b68ab054e5a674fb4da40ff7bffb2c88df58afa82dbf86573092d/regex-2026.1.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0751a26ad39d4f2ade8fe16c59b2bf5cb19eb3d2cd543e709e583d559bd9efde", size = 803358, upload-time = "2026-01-14T23:16:31.369Z" }, + { url = "https://files.pythonhosted.org/packages/27/31/d4292ea8566eaa551fafc07797961c5963cf5235c797cc2ae19b85dfd04d/regex-2026.1.15-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0f0c7684c7f9ca241344ff95a1de964f257a5251968484270e91c25a755532c5", size = 775833, upload-time = "2026-01-14T23:16:33.141Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b2/cff3bf2fea4133aa6fb0d1e370b37544d18c8350a2fa118c7e11d1db0e14/regex-2026.1.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:74f45d170a21df41508cb67165456538425185baaf686281fa210d7e729abc34", size = 788045, upload-time = "2026-01-14T23:16:35.005Z" }, + { url = "https://files.pythonhosted.org/packages/8d/99/2cb9b69045372ec877b6f5124bda4eb4253bc58b8fe5848c973f752bc52c/regex-2026.1.15-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f1862739a1ffb50615c0fde6bae6569b5efbe08d98e59ce009f68a336f64da75", size = 859374, upload-time = "2026-01-14T23:16:36.919Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/710b0a5abe8e077b1729a562d2f297224ad079f3a66dce46844c193416c8/regex-2026.1.15-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:453078802f1b9e2b7303fb79222c054cb18e76f7bdc220f7530fdc85d319f99e", size = 763940, upload-time = "2026-01-14T23:16:38.685Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/7585c8e744e40eb3d32f119191969b91de04c073fca98ec14299041f6e7e/regex-2026.1.15-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:a30a68e89e5a218b8b23a52292924c1f4b245cb0c68d1cce9aec9bbda6e2c160", size = 850112, upload-time = "2026-01-14T23:16:40.646Z" }, + { url = "https://files.pythonhosted.org/packages/af/d6/43e1dd85df86c49a347aa57c1f69d12c652c7b60e37ec162e3096194a278/regex-2026.1.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9479cae874c81bf610d72b85bb681a94c95722c127b55445285fb0e2c82db8e1", size = 789586, upload-time = "2026-01-14T23:16:42.799Z" }, + { url = "https://files.pythonhosted.org/packages/93/38/77142422f631e013f316aaae83234c629555729a9fbc952b8a63ac91462a/regex-2026.1.15-cp314-cp314-win32.whl", hash = "sha256:d639a750223132afbfb8f429c60d9d318aeba03281a5f1ab49f877456448dcf1", size = 271691, upload-time = "2026-01-14T23:16:44.671Z" }, + { url = "https://files.pythonhosted.org/packages/4a/a9/ab16b4649524ca9e05213c1cdbb7faa85cc2aa90a0230d2f796cbaf22736/regex-2026.1.15-cp314-cp314-win_amd64.whl", hash = "sha256:4161d87f85fa831e31469bfd82c186923070fc970b9de75339b68f0c75b51903", size = 280422, upload-time = "2026-01-14T23:16:46.607Z" }, + { url = "https://files.pythonhosted.org/packages/be/2a/20fd057bf3521cb4791f69f869635f73e0aaf2b9ad2d260f728144f9047c/regex-2026.1.15-cp314-cp314-win_arm64.whl", hash = "sha256:91c5036ebb62663a6b3999bdd2e559fd8456d17e2b485bf509784cd31a8b1705", size = 273467, upload-time = "2026-01-14T23:16:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/ad/77/0b1e81857060b92b9cad239104c46507dd481b3ff1fa79f8e7f865aae38a/regex-2026.1.15-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ee6854c9000a10938c79238de2379bea30c82e4925a371711af45387df35cab8", size = 492073, upload-time = "2026-01-14T23:16:51.154Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/f8302b0c208b22c1e4f423147e1913fd475ddd6230565b299925353de644/regex-2026.1.15-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c2b80399a422348ce5de4fe40c418d6299a0fa2803dd61dc0b1a2f28e280fcf", size = 292757, upload-time = "2026-01-14T23:16:53.08Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f0/ef55de2460f3b4a6da9d9e7daacd0cb79d4ef75c64a2af316e68447f0df0/regex-2026.1.15-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:dca3582bca82596609959ac39e12b7dad98385b4fefccb1151b937383cec547d", size = 291122, upload-time = "2026-01-14T23:16:55.383Z" }, + { url = "https://files.pythonhosted.org/packages/cf/55/bb8ccbacabbc3a11d863ee62a9f18b160a83084ea95cdfc5d207bfc3dd75/regex-2026.1.15-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71d476caa6692eea743ae5ea23cde3260677f70122c4d258ca952e5c2d4e84", size = 807761, upload-time = "2026-01-14T23:16:57.251Z" }, + { url = "https://files.pythonhosted.org/packages/8f/84/f75d937f17f81e55679a0509e86176e29caa7298c38bd1db7ce9c0bf6075/regex-2026.1.15-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c243da3436354f4af6c3058a3f81a97d47ea52c9bd874b52fd30274853a1d5df", size = 873538, upload-time = "2026-01-14T23:16:59.349Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d9/0da86327df70349aa8d86390da91171bd3ca4f0e7c1d1d453a9c10344da3/regex-2026.1.15-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8355ad842a7c7e9e5e55653eade3b7d1885ba86f124dd8ab1f722f9be6627434", size = 915066, upload-time = "2026-01-14T23:17:01.607Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5e/f660fb23fc77baa2a61aa1f1fe3a4eea2bbb8a286ddec148030672e18834/regex-2026.1.15-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f192a831d9575271a22d804ff1a5355355723f94f31d9eef25f0d45a152fdc1a", size = 812938, upload-time = "2026-01-14T23:17:04.366Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/a47a29bfecebbbfd1e5cd3f26b28020a97e4820f1c5148e66e3b7d4b4992/regex-2026.1.15-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:166551807ec20d47ceaeec380081f843e88c8949780cd42c40f18d16168bed10", size = 781314, upload-time = "2026-01-14T23:17:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/65/ec/7ec2bbfd4c3f4e494a24dec4c6943a668e2030426b1b8b949a6462d2c17b/regex-2026.1.15-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9ca1cbdc0fbfe5e6e6f8221ef2309988db5bcede52443aeaee9a4ad555e0dac", size = 795652, upload-time = "2026-01-14T23:17:08.521Z" }, + { url = "https://files.pythonhosted.org/packages/46/79/a5d8651ae131fe27d7c521ad300aa7f1c7be1dbeee4d446498af5411b8a9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b30bcbd1e1221783c721483953d9e4f3ab9c5d165aa709693d3f3946747b1aea", size = 868550, upload-time = "2026-01-14T23:17:10.573Z" }, + { url = "https://files.pythonhosted.org/packages/06/b7/25635d2809664b79f183070786a5552dd4e627e5aedb0065f4e3cf8ee37d/regex-2026.1.15-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2a8d7b50c34578d0d3bf7ad58cde9652b7d683691876f83aedc002862a35dc5e", size = 769981, upload-time = "2026-01-14T23:17:12.871Z" }, + { url = "https://files.pythonhosted.org/packages/16/8b/fc3fcbb2393dcfa4a6c5ffad92dc498e842df4581ea9d14309fcd3c55fb9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9d787e3310c6a6425eb346be4ff2ccf6eece63017916fd77fe8328c57be83521", size = 854780, upload-time = "2026-01-14T23:17:14.837Z" }, + { url = "https://files.pythonhosted.org/packages/d0/38/dde117c76c624713c8a2842530be9c93ca8b606c0f6102d86e8cd1ce8bea/regex-2026.1.15-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:619843841e220adca114118533a574a9cd183ed8a28b85627d2844c500a2b0db", size = 799778, upload-time = "2026-01-14T23:17:17.369Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0d/3a6cfa9ae99606afb612d8fb7a66b245a9d5ff0f29bb347c8a30b6ad561b/regex-2026.1.15-cp314-cp314t-win32.whl", hash = "sha256:e90b8db97f6f2c97eb045b51a6b2c5ed69cedd8392459e0642d4199b94fabd7e", size = 274667, upload-time = "2026-01-14T23:17:19.301Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b2/297293bb0742fd06b8d8e2572db41a855cdf1cae0bf009b1cb74fe07e196/regex-2026.1.15-cp314-cp314t-win_amd64.whl", hash = "sha256:5ef19071f4ac9f0834793af85bd04a920b4407715624e40cb7a0631a11137cdf", size = 284386, upload-time = "2026-01-14T23:17:21.231Z" }, + { url = "https://files.pythonhosted.org/packages/95/e4/a3b9480c78cf8ee86626cb06f8d931d74d775897d44201ccb813097ae697/regex-2026.1.15-cp314-cp314t-win_arm64.whl", hash = "sha256:ca89c5e596fc05b015f27561b3793dc2fa0917ea0d7507eebb448efd35274a70", size = 274837, upload-time = "2026-01-14T23:17:23.146Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rich" +version = "14.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/99/a4cab2acbb884f80e558b0771e97e21e939c5dfb460f488d19df485e8298/rich-14.3.2.tar.gz", hash = "sha256:e712f11c1a562a11843306f5ed999475f09ac31ffb64281f73ab29ffdda8b3b8", size = 230143, upload-time = "2026-02-01T16:20:47.908Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69", size = 309963, upload-time = "2026-02-01T16:20:46.078Z" }, +] + +[[package]] +name = "rich-toolkit" +version = "0.19.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d0/c9/4bbf4bfee195ed1b7d7a6733cc523ca61dbfb4a3e3c12ea090aaffd97597/rich_toolkit-0.19.4.tar.gz", hash = "sha256:52e23d56f9dc30d1343eb3b3f6f18764c313fbfea24e52e6a1d6069bec9c18eb", size = 193951, upload-time = "2026-02-12T10:08:15.814Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/31/97d39719def09c134385bfcfbedfed255168b571e7beb3ad7765aae660ca/rich_toolkit-0.19.4-py3-none-any.whl", hash = "sha256:34ac344de8862801644be8b703e26becf44b047e687f208d7829e8f7cfc311d6", size = 32757, upload-time = "2026-02-12T10:08:15.037Z" }, +] + +[[package]] +name = "rignore" +version = "0.7.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/f5/8bed2310abe4ae04b67a38374a4d311dd85220f5d8da56f47ae9361be0b0/rignore-0.7.6.tar.gz", hash = "sha256:00d3546cd793c30cb17921ce674d2c8f3a4b00501cb0e3dd0e82217dbeba2671", size = 57140, upload-time = "2025-11-05T21:41:21.968Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/0e/012556ef3047a2628842b44e753bb15f4dc46806780ff090f1e8fe4bf1eb/rignore-0.7.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:03e82348cb7234f8d9b2834f854400ddbbd04c0f8f35495119e66adbd37827a8", size = 883488, upload-time = "2025-11-05T20:42:41.359Z" }, + { url = "https://files.pythonhosted.org/packages/93/b0/d4f1f3fe9eb3f8e382d45ce5b0547ea01c4b7e0b4b4eb87bcd66a1d2b888/rignore-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9e624f6be6116ea682e76c5feb71ea91255c67c86cb75befe774365b2931961", size = 820411, upload-time = "2025-11-05T20:42:24.782Z" }, + { url = "https://files.pythonhosted.org/packages/4a/c8/dea564b36dedac8de21c18e1851789545bc52a0c22ece9843444d5608a6a/rignore-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bda49950d405aa8d0ebe26af807c4e662dd281d926530f03f29690a2e07d649a", size = 897821, upload-time = "2025-11-05T20:40:52.613Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2b/ee96db17ac1835e024c5d0742eefb7e46de60020385ac883dd3d1cde2c1f/rignore-0.7.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5fd5ab3840b8c16851d327ed06e9b8be6459702a53e5ab1fc4073b684b3789e", size = 873963, upload-time = "2025-11-05T20:41:07.49Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8c/ad5a57bbb9d14d5c7e5960f712a8a0b902472ea3f4a2138cbf70d1777b75/rignore-0.7.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ced2a248352636a5c77504cb755dc02c2eef9a820a44d3f33061ce1bb8a7f2d2", size = 1169216, upload-time = "2025-11-05T20:41:23.73Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/5b00bc2a6bc1701e6878fca798cf5d9125eb3113193e33078b6fc0d99123/rignore-0.7.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a04a3b73b75ddc12c9c9b21efcdaab33ca3832941d6f1d67bffd860941cd448a", size = 942942, upload-time = "2025-11-05T20:41:39.393Z" }, + { url = "https://files.pythonhosted.org/packages/85/e5/7f99bd0cc9818a91d0e8b9acc65b792e35750e3bdccd15a7ee75e64efca4/rignore-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24321efac92140b7ec910ac7c53ab0f0c86a41133d2bb4b0e6a7c94967f44dd", size = 959787, upload-time = "2025-11-05T20:42:09.765Z" }, + { url = "https://files.pythonhosted.org/packages/55/54/2ffea79a7c1eabcede1926347ebc2a81bc6b81f447d05b52af9af14948b9/rignore-0.7.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c7aa109d41e593785c55fdaa89ad80b10330affa9f9d3e3a51fa695f739b20", size = 984245, upload-time = "2025-11-05T20:41:54.062Z" }, + { url = "https://files.pythonhosted.org/packages/41/f7/e80f55dfe0f35787fa482aa18689b9c8251e045076c35477deb0007b3277/rignore-0.7.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1734dc49d1e9501b07852ef44421f84d9f378da9fbeda729e77db71f49cac28b", size = 1078647, upload-time = "2025-11-05T21:40:13.463Z" }, + { url = "https://files.pythonhosted.org/packages/d4/cf/2c64f0b6725149f7c6e7e5a909d14354889b4beaadddaa5fff023ec71084/rignore-0.7.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5719ea14ea2b652c0c0894be5dfde954e1853a80dea27dd2fbaa749618d837f5", size = 1139186, upload-time = "2025-11-05T21:40:31.27Z" }, + { url = "https://files.pythonhosted.org/packages/75/95/a86c84909ccc24af0d094b50d54697951e576c252a4d9f21b47b52af9598/rignore-0.7.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e23424fc7ce35726854f639cb7968151a792c0c3d9d082f7f67e0c362cfecca", size = 1117604, upload-time = "2025-11-05T21:40:48.07Z" }, + { url = "https://files.pythonhosted.org/packages/7f/5e/13b249613fd5d18d58662490ab910a9f0be758981d1797789913adb4e918/rignore-0.7.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3efdcf1dd84d45f3e2bd2f93303d9be103888f56dfa7c3349b5bf4f0657ec696", size = 1127725, upload-time = "2025-11-05T21:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/c7/28/fa5dcd1e2e16982c359128664e3785f202d3eca9b22dd0b2f91c4b3d242f/rignore-0.7.6-cp312-cp312-win32.whl", hash = "sha256:ccca9d1a8b5234c76b71546fc3c134533b013f40495f394a65614a81f7387046", size = 646145, upload-time = "2025-11-05T21:41:51.096Z" }, + { url = "https://files.pythonhosted.org/packages/26/87/69387fb5dd81a0f771936381431780b8cf66fcd2cfe9495e1aaf41548931/rignore-0.7.6-cp312-cp312-win_amd64.whl", hash = "sha256:c96a285e4a8bfec0652e0bfcf42b1aabcdda1e7625f5006d188e3b1c87fdb543", size = 726090, upload-time = "2025-11-05T21:41:36.485Z" }, + { url = "https://files.pythonhosted.org/packages/24/5f/e8418108dcda8087fb198a6f81caadbcda9fd115d61154bf0df4d6d3619b/rignore-0.7.6-cp312-cp312-win_arm64.whl", hash = "sha256:a64a750e7a8277a323f01ca50b7784a764845f6cce2fe38831cb93f0508d0051", size = 656317, upload-time = "2025-11-05T21:41:25.305Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8a/a4078f6e14932ac7edb171149c481de29969d96ddee3ece5dc4c26f9e0c3/rignore-0.7.6-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2bdab1d31ec9b4fb1331980ee49ea051c0d7f7bb6baa28b3125ef03cdc48fdaf", size = 883057, upload-time = "2025-11-05T20:42:42.741Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8f/f8daacd177db4bf7c2223bab41e630c52711f8af9ed279be2058d2fe4982/rignore-0.7.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:90f0a00ce0c866c275bf888271f1dc0d2140f29b82fcf33cdbda1e1a6af01010", size = 820150, upload-time = "2025-11-05T20:42:26.545Z" }, + { url = "https://files.pythonhosted.org/packages/36/31/b65b837e39c3f7064c426754714ac633b66b8c2290978af9d7f513e14aa9/rignore-0.7.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1ad295537041dc2ed4b540fb1a3906bd9ede6ccdad3fe79770cd89e04e3c73c", size = 897406, upload-time = "2025-11-05T20:40:53.854Z" }, + { url = "https://files.pythonhosted.org/packages/ca/58/1970ce006c427e202ac7c081435719a076c478f07b3a23f469227788dc23/rignore-0.7.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f782dbd3a65a5ac85adfff69e5c6b101285ef3f845c3a3cae56a54bebf9fe116", size = 874050, upload-time = "2025-11-05T20:41:08.922Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/eb45db9f90137329072a732273be0d383cb7d7f50ddc8e0bceea34c1dfdf/rignore-0.7.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65cece3b36e5b0826d946494734c0e6aaf5a0337e18ff55b071438efe13d559e", size = 1167835, upload-time = "2025-11-05T20:41:24.997Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f1/6f1d72ddca41a64eed569680587a1236633587cc9f78136477ae69e2c88a/rignore-0.7.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7e4bb66c13cd7602dc8931822c02dfbbd5252015c750ac5d6152b186f0a8be0", size = 941945, upload-time = "2025-11-05T20:41:40.628Z" }, + { url = "https://files.pythonhosted.org/packages/48/6f/2f178af1c1a276a065f563ec1e11e7a9e23d4996fd0465516afce4b5c636/rignore-0.7.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297e500c15766e196f68aaaa70e8b6db85fa23fdc075b880d8231fdfba738cd7", size = 959067, upload-time = "2025-11-05T20:42:11.09Z" }, + { url = "https://files.pythonhosted.org/packages/5b/db/423a81c4c1e173877c7f9b5767dcaf1ab50484a94f60a0b2ed78be3fa765/rignore-0.7.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a07084211a8d35e1a5b1d32b9661a5ed20669970b369df0cf77da3adea3405de", size = 984438, upload-time = "2025-11-05T20:41:55.443Z" }, + { url = "https://files.pythonhosted.org/packages/31/eb/c4f92cc3f2825d501d3c46a244a671eb737fc1bcf7b05a3ecd34abb3e0d7/rignore-0.7.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:181eb2a975a22256a1441a9d2f15eb1292839ea3f05606620bd9e1938302cf79", size = 1078365, upload-time = "2025-11-05T21:40:15.148Z" }, + { url = "https://files.pythonhosted.org/packages/26/09/99442f02794bd7441bfc8ed1c7319e890449b816a7493b2db0e30af39095/rignore-0.7.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:7bbcdc52b5bf9f054b34ce4af5269df5d863d9c2456243338bc193c28022bd7b", size = 1139066, upload-time = "2025-11-05T21:40:32.771Z" }, + { url = "https://files.pythonhosted.org/packages/2c/88/bcfc21e520bba975410e9419450f4b90a2ac8236b9a80fd8130e87d098af/rignore-0.7.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f2e027a6da21a7c8c0d87553c24ca5cc4364def18d146057862c23a96546238e", size = 1118036, upload-time = "2025-11-05T21:40:49.646Z" }, + { url = "https://files.pythonhosted.org/packages/e2/25/d37215e4562cda5c13312636393aea0bafe38d54d4e0517520a4cc0753ec/rignore-0.7.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee4a18b82cbbc648e4aac1510066682fe62beb5dc88e2c67c53a83954e541360", size = 1127550, upload-time = "2025-11-05T21:41:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/dc/76/a264ab38bfa1620ec12a8ff1c07778da89e16d8c0f3450b0333020d3d6dc/rignore-0.7.6-cp313-cp313-win32.whl", hash = "sha256:a7d7148b6e5e95035d4390396895adc384d37ff4e06781a36fe573bba7c283e5", size = 646097, upload-time = "2025-11-05T21:41:53.201Z" }, + { url = "https://files.pythonhosted.org/packages/62/44/3c31b8983c29ea8832b6082ddb1d07b90379c2d993bd20fce4487b71b4f4/rignore-0.7.6-cp313-cp313-win_amd64.whl", hash = "sha256:b037c4b15a64dced08fc12310ee844ec2284c4c5c1ca77bc37d0a04f7bff386e", size = 726170, upload-time = "2025-11-05T21:41:38.131Z" }, + { url = "https://files.pythonhosted.org/packages/aa/41/e26a075cab83debe41a42661262f606166157df84e0e02e2d904d134c0d8/rignore-0.7.6-cp313-cp313-win_arm64.whl", hash = "sha256:e47443de9b12fe569889bdbe020abe0e0b667516ee2ab435443f6d0869bd2804", size = 656184, upload-time = "2025-11-05T21:41:27.396Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b9/1f5bd82b87e5550cd843ceb3768b4a8ef274eb63f29333cf2f29644b3d75/rignore-0.7.6-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:8e41be9fa8f2f47239ded8920cc283699a052ac4c371f77f5ac017ebeed75732", size = 882632, upload-time = "2025-11-05T20:42:44.063Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6b/07714a3efe4a8048864e8a5b7db311ba51b921e15268b17defaebf56d3db/rignore-0.7.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6dc1e171e52cefa6c20e60c05394a71165663b48bca6c7666dee4f778f2a7d90", size = 820760, upload-time = "2025-11-05T20:42:27.885Z" }, + { url = "https://files.pythonhosted.org/packages/ac/0f/348c829ea2d8d596e856371b14b9092f8a5dfbb62674ec9b3f67e4939a9d/rignore-0.7.6-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ce2268837c3600f82ab8db58f5834009dc638ee17103582960da668963bebc5", size = 899044, upload-time = "2025-11-05T20:40:55.336Z" }, + { url = "https://files.pythonhosted.org/packages/f0/30/2e1841a19b4dd23878d73edd5d82e998a83d5ed9570a89675f140ca8b2ad/rignore-0.7.6-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:690a3e1b54bfe77e89c4bacb13f046e642f8baadafc61d68f5a726f324a76ab6", size = 874144, upload-time = "2025-11-05T20:41:10.195Z" }, + { url = "https://files.pythonhosted.org/packages/c2/bf/0ce9beb2e5f64c30e3580bef09f5829236889f01511a125f98b83169b993/rignore-0.7.6-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09d12ac7a0b6210c07bcd145007117ebd8abe99c8eeb383e9e4673910c2754b2", size = 1168062, upload-time = "2025-11-05T20:41:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/b9/8b/571c178414eb4014969865317da8a02ce4cf5241a41676ef91a59aab24de/rignore-0.7.6-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a2b2b74a8c60203b08452479b90e5ce3dbe96a916214bc9eb2e5af0b6a9beb0", size = 942542, upload-time = "2025-11-05T20:41:41.838Z" }, + { url = "https://files.pythonhosted.org/packages/19/62/7a3cf601d5a45137a7e2b89d10c05b5b86499190c4b7ca5c3c47d79ee519/rignore-0.7.6-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc5a531ef02131e44359419a366bfac57f773ea58f5278c2cdd915f7d10ea94", size = 958739, upload-time = "2025-11-05T20:42:12.463Z" }, + { url = "https://files.pythonhosted.org/packages/5f/1f/4261f6a0d7caf2058a5cde2f5045f565ab91aa7badc972b57d19ce58b14e/rignore-0.7.6-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7a1f77d9c4cd7e76229e252614d963442686bfe12c787a49f4fe481df49e7a9", size = 984138, upload-time = "2025-11-05T20:41:56.775Z" }, + { url = "https://files.pythonhosted.org/packages/2b/bf/628dfe19c75e8ce1f45f7c248f5148b17dfa89a817f8e3552ab74c3ae812/rignore-0.7.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ead81f728682ba72b5b1c3d5846b011d3e0174da978de87c61645f2ed36659a7", size = 1079299, upload-time = "2025-11-05T21:40:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/af/a5/be29c50f5c0c25c637ed32db8758fdf5b901a99e08b608971cda8afb293b/rignore-0.7.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:12ffd50f520c22ffdabed8cd8bfb567d9ac165b2b854d3e679f4bcaef11a9441", size = 1139618, upload-time = "2025-11-05T21:40:34.507Z" }, + { url = "https://files.pythonhosted.org/packages/2a/40/3c46cd7ce4fa05c20b525fd60f599165e820af66e66f2c371cd50644558f/rignore-0.7.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e5a16890fbe3c894f8ca34b0fcacc2c200398d4d46ae654e03bc9b3dbf2a0a72", size = 1117626, upload-time = "2025-11-05T21:40:51.494Z" }, + { url = "https://files.pythonhosted.org/packages/8c/b9/aea926f263b8a29a23c75c2e0d8447965eb1879d3feb53cfcf84db67ed58/rignore-0.7.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3abab3bf99e8a77488ef6c7c9a799fac22224c28fe9f25cc21aa7cc2b72bfc0b", size = 1128144, upload-time = "2025-11-05T21:41:09.169Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f6/0d6242f8d0df7f2ecbe91679fefc1f75e7cd2072cb4f497abaab3f0f8523/rignore-0.7.6-cp314-cp314-win32.whl", hash = "sha256:eeef421c1782953c4375aa32f06ecae470c1285c6381eee2a30d2e02a5633001", size = 646385, upload-time = "2025-11-05T21:41:55.105Z" }, + { url = "https://files.pythonhosted.org/packages/d5/38/c0dcd7b10064f084343d6af26fe9414e46e9619c5f3224b5272e8e5d9956/rignore-0.7.6-cp314-cp314-win_amd64.whl", hash = "sha256:6aeed503b3b3d5af939b21d72a82521701a4bd3b89cd761da1e7dc78621af304", size = 725738, upload-time = "2025-11-05T21:41:39.736Z" }, + { url = "https://files.pythonhosted.org/packages/d9/7a/290f868296c1ece914d565757ab363b04730a728b544beb567ceb3b2d96f/rignore-0.7.6-cp314-cp314-win_arm64.whl", hash = "sha256:104f215b60b3c984c386c3e747d6ab4376d5656478694e22c7bd2f788ddd8304", size = 656008, upload-time = "2025-11-05T21:41:29.028Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d2/3c74e3cd81fe8ea08a8dcd2d755c09ac2e8ad8fe409508904557b58383d3/rignore-0.7.6-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:bb24a5b947656dd94cb9e41c4bc8b23cec0c435b58be0d74a874f63c259549e8", size = 882835, upload-time = "2025-11-05T20:42:45.443Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/a772a34b6b63154877433ac2d048364815b24c2dd308f76b212c408101a2/rignore-0.7.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b1e33c9501cefe24b70a1eafd9821acfd0ebf0b35c3a379430a14df089993e3", size = 820301, upload-time = "2025-11-05T20:42:29.226Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/054880b09c0b1b61d17eeb15279d8bf729c0ba52b36c3ada52fb827cbb3c/rignore-0.7.6-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bec3994665a44454df86deb762061e05cd4b61e3772f5b07d1882a8a0d2748d5", size = 897611, upload-time = "2025-11-05T20:40:56.475Z" }, + { url = "https://files.pythonhosted.org/packages/1e/40/b2d1c169f833d69931bf232600eaa3c7998ba4f9a402e43a822dad2ea9f2/rignore-0.7.6-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26cba2edfe3cff1dfa72bddf65d316ddebf182f011f2f61538705d6dbaf54986", size = 873875, upload-time = "2025-11-05T20:41:11.561Z" }, + { url = "https://files.pythonhosted.org/packages/55/59/ca5ae93d83a1a60e44b21d87deb48b177a8db1b85e82fc8a9abb24a8986d/rignore-0.7.6-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ffa86694fec604c613696cb91e43892aa22e1fec5f9870e48f111c603e5ec4e9", size = 1167245, upload-time = "2025-11-05T20:41:28.29Z" }, + { url = "https://files.pythonhosted.org/packages/a5/52/cf3dce392ba2af806cba265aad6bcd9c48bb2a6cb5eee448d3319f6e505b/rignore-0.7.6-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48efe2ed95aa8104145004afb15cdfa02bea5cdde8b0344afeb0434f0d989aa2", size = 941750, upload-time = "2025-11-05T20:41:43.111Z" }, + { url = "https://files.pythonhosted.org/packages/ec/be/3f344c6218d779395e785091d05396dfd8b625f6aafbe502746fcd880af2/rignore-0.7.6-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dcae43eb44b7f2457fef7cc87f103f9a0013017a6f4e62182c565e924948f21", size = 958896, upload-time = "2025-11-05T20:42:13.784Z" }, + { url = "https://files.pythonhosted.org/packages/c9/34/d3fa71938aed7d00dcad87f0f9bcb02ad66c85d6ffc83ba31078ce53646a/rignore-0.7.6-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2cd649a7091c0dad2f11ef65630d30c698d505cbe8660dd395268e7c099cc99f", size = 983992, upload-time = "2025-11-05T20:41:58.022Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/52a697158e9920705bdbd0748d59fa63e0f3233fb92e9df9a71afbead6ca/rignore-0.7.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42de84b0289d478d30ceb7ae59023f7b0527786a9a5b490830e080f0e4ea5aeb", size = 1078181, upload-time = "2025-11-05T21:40:18.151Z" }, + { url = "https://files.pythonhosted.org/packages/ac/65/aa76dbcdabf3787a6f0fd61b5cc8ed1e88580590556d6c0207960d2384bb/rignore-0.7.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:875a617e57b53b4acbc5a91de418233849711c02e29cc1f4f9febb2f928af013", size = 1139232, upload-time = "2025-11-05T21:40:35.966Z" }, + { url = "https://files.pythonhosted.org/packages/08/44/31b31a49b3233c6842acc1c0731aa1e7fb322a7170612acf30327f700b44/rignore-0.7.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8703998902771e96e49968105207719f22926e4431b108450f3f430b4e268b7c", size = 1117349, upload-time = "2025-11-05T21:40:53.013Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ae/1b199a2302c19c658cf74e5ee1427605234e8c91787cfba0015f2ace145b/rignore-0.7.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:602ef33f3e1b04c1e9a10a3c03f8bc3cef2d2383dcc250d309be42b49923cabc", size = 1127702, upload-time = "2025-11-05T21:41:10.881Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d3/18210222b37e87e36357f7b300b7d98c6dd62b133771e71ae27acba83a4f/rignore-0.7.6-cp314-cp314t-win32.whl", hash = "sha256:c1d8f117f7da0a4a96a8daef3da75bc090e3792d30b8b12cfadc240c631353f9", size = 647033, upload-time = "2025-11-05T21:42:00.095Z" }, + { url = "https://files.pythonhosted.org/packages/3e/87/033eebfbee3ec7d92b3bb1717d8f68c88e6fc7de54537040f3b3a405726f/rignore-0.7.6-cp314-cp314t-win_amd64.whl", hash = "sha256:ca36e59408bec81de75d307c568c2d0d410fb880b1769be43611472c61e85c96", size = 725647, upload-time = "2025-11-05T21:41:44.449Z" }, + { url = "https://files.pythonhosted.org/packages/79/62/b88e5879512c55b8ee979c666ee6902adc4ed05007226de266410ae27965/rignore-0.7.6-cp314-cp314t-win_arm64.whl", hash = "sha256:b83adabeb3e8cf662cabe1931b83e165b88c526fa6af6b3aa90429686e474896", size = 656035, upload-time = "2025-11-05T21:41:31.13Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/99/43/3291f1cc9106f4c63bdce7a8d0df5047fe8422a75b091c16b5e9355e0b11/ruff-0.15.12.tar.gz", hash = "sha256:ecea26adb26b4232c0c2ca19ccbc0083a68344180bba2a600605538ce51a40a6", size = 4643852, upload-time = "2026-04-24T18:17:14.305Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/6e/e78ffb61d4686f3d96ba3df2c801161843746dcbcbb17a1e927d4829312b/ruff-0.15.12-py3-none-linux_armv6l.whl", hash = "sha256:f86f176e188e94d6bdbc09f09bfd9dc729059ad93d0e7390b5a73efe19f8861c", size = 10640713, upload-time = "2026-04-24T18:17:22.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/08/a317bc231fb9e7b93e4ef3089501e51922ff88d6936ce5cf870c4fe55419/ruff-0.15.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e3bcd123364c3770b8e1b7baaf343cc99a35f197c5c6e8af79015c666c423a6c", size = 11069267, upload-time = "2026-04-24T18:17:30.105Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a4/f828e9718d3dce1f5f11c39c4f65afd32783c8b2aebb2e3d259e492c47bd/ruff-0.15.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fe87510d000220aa1ed530d4448a7c696a0cae1213e5ec30e5874287b66557b5", size = 10397182, upload-time = "2026-04-24T18:17:07.177Z" }, + { url = "https://files.pythonhosted.org/packages/71/e0/3310fc6d1b5e1fdea22bf3b1b807c7e187b581021b0d7d4514cccdb5fb71/ruff-0.15.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84a1630093121375a3e2a95b4a6dc7b59e2b4ee76216e32d81aae550a832d002", size = 10758012, upload-time = "2026-04-24T18:16:55.759Z" }, + { url = "https://files.pythonhosted.org/packages/11/c1/a606911aee04c324ddaa883ae418f3569792fd3c4a10c50e0dd0a2311e1e/ruff-0.15.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb129f40f114f089ebe0ca56c0d251cf2061b17651d464bb6478dc01e69f11f5", size = 10447479, upload-time = "2026-04-24T18:16:51.677Z" }, + { url = "https://files.pythonhosted.org/packages/9d/68/4201e8444f0894f21ab4aeeaee68aa4f10b51613514a20d80bd628d57e88/ruff-0.15.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0c862b172d695db7598426b8af465e7e9ac00a3ea2a3630ee67eb82e366aaa6", size = 11234040, upload-time = "2026-04-24T18:17:16.529Z" }, + { url = "https://files.pythonhosted.org/packages/34/ff/8a6d6cf4ccc23fd67060874e832c18919d1557a0611ebef03fdb01fff11e/ruff-0.15.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2849ea9f3484c3aca43a82f484210370319e7170df4dfe4843395ddf6c57bc33", size = 12087377, upload-time = "2026-04-24T18:17:04.944Z" }, + { url = "https://files.pythonhosted.org/packages/85/f6/c669cf73f5152f623d34e69866a46d5e6185816b19fcd5b6dd8a2d299922/ruff-0.15.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e77c7e51c07fe396826d5969a5b846d9cd4c402535835fb6e21ce8b28fef847", size = 11367784, upload-time = "2026-04-24T18:17:25.409Z" }, + { url = "https://files.pythonhosted.org/packages/e8/39/c61d193b8a1daaa8977f7dea9e8d8ba866e02ea7b65d32f6861693aa4c12/ruff-0.15.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b2f4f2f3b1026b5fb449b467d9264bf22067b600f7b6f41fc5958909f449d0", size = 11344088, upload-time = "2026-04-24T18:17:12.258Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8d/49afab3645e31e12c590acb6d3b5b69d7aab5b81926dbaf7461f9441f37a/ruff-0.15.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9ba3b8f1afd7e2e43d8943e55f249e13f9682fde09711644a6e7290eb4f3e339", size = 11271770, upload-time = "2026-04-24T18:17:02.457Z" }, + { url = "https://files.pythonhosted.org/packages/46/06/33f41fe94403e2b755481cdfb9b7ef3e4e0ed031c4581124658d935d52b4/ruff-0.15.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e852ba9fdc890655e1d78f2df1499efbe0e54126bd405362154a75e2bde159c5", size = 10719355, upload-time = "2026-04-24T18:17:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/0d/59/18aa4e014debbf559670e4048e39260a85c7fcee84acfd761ac01e7b8d35/ruff-0.15.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dd8aed930da53780d22fc70bdf84452c843cf64f8cb4eb38984319c24c5cd5fd", size = 10462758, upload-time = "2026-04-24T18:17:32.347Z" }, + { url = "https://files.pythonhosted.org/packages/25/e7/cc9f16fd0f3b5fddcbd7ec3d6ae30c8f3fde1047f32a4093a98d633c6570/ruff-0.15.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01da3988d225628b709493d7dc67c3b9b12c0210016b08690ef9bd27970b262b", size = 10953498, upload-time = "2026-04-24T18:17:20.674Z" }, + { url = "https://files.pythonhosted.org/packages/72/7a/a9ba7f98c7a575978698f4230c5e8cc54bbc761af34f560818f933dafa0c/ruff-0.15.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9cae0f92bd5700d1213188b31cd3bdd2b315361296d10b96b8e2337d3d11f53e", size = 11447765, upload-time = "2026-04-24T18:17:09.755Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f9/0ae446942c846b8266059ad8a30702a35afae55f5cdc54c5adf8d7afdc27/ruff-0.15.12-py3-none-win32.whl", hash = "sha256:d0185894e038d7043ba8fd6aee7499ece6462dc0ea9f1e260c7451807c714c20", size = 10657277, upload-time = "2026-04-24T18:17:18.591Z" }, + { url = "https://files.pythonhosted.org/packages/33/f1/9614e03e1cdcbf9437570b5400ced8a720b5db22b28d8e0f1bda429f660d/ruff-0.15.12-py3-none-win_amd64.whl", hash = "sha256:c87a162d61ab3adca47c03f7f717c68672edec7d1b5499e652331780fe74950d", size = 11837758, upload-time = "2026-04-24T18:17:00.113Z" }, + { url = "https://files.pythonhosted.org/packages/c0/98/6beb4b351e472e5f4c4613f7c35a5290b8be2497e183825310c4c3a3984b/ruff-0.15.12-py3-none-win_arm64.whl", hash = "sha256:a538f7a82d061cee7be55542aca1d86d1393d55d81d4fcc314370f4340930d4f", size = 11120821, upload-time = "2026-04-24T18:16:57.979Z" }, +] + +[[package]] +name = "safetensors" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/9c/6e74567782559a63bd040a236edca26fd71bc7ba88de2ef35d75df3bca5e/safetensors-0.7.0.tar.gz", hash = "sha256:07663963b67e8bd9f0b8ad15bb9163606cd27cc5a1b96235a50d8369803b96b0", size = 200878, upload-time = "2025-11-19T15:18:43.199Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/47/aef6c06649039accf914afef490268e1067ed82be62bcfa5b7e886ad15e8/safetensors-0.7.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c82f4d474cf725255d9e6acf17252991c3c8aac038d6ef363a4bf8be2f6db517", size = 467781, upload-time = "2025-11-19T15:18:35.84Z" }, + { url = "https://files.pythonhosted.org/packages/e8/00/374c0c068e30cd31f1e1b46b4b5738168ec79e7689ca82ee93ddfea05109/safetensors-0.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:94fd4858284736bb67a897a41608b5b0c2496c9bdb3bf2af1fa3409127f20d57", size = 447058, upload-time = "2025-11-19T15:18:34.416Z" }, + { url = "https://files.pythonhosted.org/packages/f1/06/578ffed52c2296f93d7fd2d844cabfa92be51a587c38c8afbb8ae449ca89/safetensors-0.7.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07d91d0c92a31200f25351f4acb2bc6aff7f48094e13ebb1d0fb995b54b6542", size = 491748, upload-time = "2025-11-19T15:18:09.79Z" }, + { url = "https://files.pythonhosted.org/packages/ae/33/1debbbb70e4791dde185edb9413d1fe01619255abb64b300157d7f15dddd/safetensors-0.7.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8469155f4cb518bafb4acf4865e8bb9d6804110d2d9bdcaa78564b9fd841e104", size = 503881, upload-time = "2025-11-19T15:18:16.145Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1c/40c2ca924d60792c3be509833df711b553c60effbd91da6f5284a83f7122/safetensors-0.7.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bef08bf00a2bff599982f6b08e8770e09cc012d7bba00783fc7ea38f1fb37d", size = 623463, upload-time = "2025-11-19T15:18:21.11Z" }, + { url = "https://files.pythonhosted.org/packages/9b/3a/13784a9364bd43b0d61eef4bea2845039bc2030458b16594a1bd787ae26e/safetensors-0.7.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42cb091236206bb2016d245c377ed383aa7f78691748f3bb6ee1bfa51ae2ce6a", size = 532855, upload-time = "2025-11-19T15:18:25.719Z" }, + { url = "https://files.pythonhosted.org/packages/a0/60/429e9b1cb3fc651937727befe258ea24122d9663e4d5709a48c9cbfceecb/safetensors-0.7.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac7252938f0696ddea46f5e855dd3138444e82236e3be475f54929f0c510d48", size = 507152, upload-time = "2025-11-19T15:18:33.023Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a8/4b45e4e059270d17af60359713ffd83f97900d45a6afa73aaa0d737d48b6/safetensors-0.7.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1d060c70284127fa805085d8f10fbd0962792aed71879d00864acda69dbab981", size = 541856, upload-time = "2025-11-19T15:18:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/06/87/d26d8407c44175d8ae164a95b5a62707fcc445f3c0c56108e37d98070a3d/safetensors-0.7.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cdab83a366799fa730f90a4ebb563e494f28e9e92c4819e556152ad55e43591b", size = 674060, upload-time = "2025-11-19T15:18:37.211Z" }, + { url = "https://files.pythonhosted.org/packages/11/f5/57644a2ff08dc6325816ba7217e5095f17269dada2554b658442c66aed51/safetensors-0.7.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:672132907fcad9f2aedcb705b2d7b3b93354a2aec1b2f706c4db852abe338f85", size = 771715, upload-time = "2025-11-19T15:18:38.689Z" }, + { url = "https://files.pythonhosted.org/packages/86/31/17883e13a814bd278ae6e266b13282a01049b0c81341da7fd0e3e71a80a3/safetensors-0.7.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:5d72abdb8a4d56d4020713724ba81dac065fedb7f3667151c4a637f1d3fb26c0", size = 714377, upload-time = "2025-11-19T15:18:40.162Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d8/0c8a7dc9b41dcac53c4cbf9df2b9c83e0e0097203de8b37a712b345c0be5/safetensors-0.7.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0f6d66c1c538d5a94a73aa9ddca8ccc4227e6c9ff555322ea40bdd142391dd4", size = 677368, upload-time = "2025-11-19T15:18:41.627Z" }, + { url = "https://files.pythonhosted.org/packages/05/e5/cb4b713c8a93469e3c5be7c3f8d77d307e65fe89673e731f5c2bfd0a9237/safetensors-0.7.0-cp38-abi3-win32.whl", hash = "sha256:c74af94bf3ac15ac4d0f2a7c7b4663a15f8c2ab15ed0fc7531ca61d0835eccba", size = 326423, upload-time = "2025-11-19T15:18:45.74Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/ec8471c8072382cb91233ba7267fd931219753bb43814cbc71757bfd4dab/safetensors-0.7.0-cp38-abi3-win_amd64.whl", hash = "sha256:d1239932053f56f3456f32eb9625590cc7582e905021f94636202a864d470755", size = 341380, upload-time = "2025-11-19T15:18:44.427Z" }, +] + +[[package]] +name = "scikit-learn" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib" }, + { name = "numpy" }, + { name = "scipy" }, + { name = "threadpoolctl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/d4/40988bf3b8e34feec1d0e6a051446b1f66225f8529b9309becaeef62b6c4/scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd", size = 7335585, upload-time = "2025-12-10T07:08:53.618Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/74/e6a7cc4b820e95cc38cf36cd74d5aa2b42e8ffc2d21fe5a9a9c45c1c7630/scikit_learn-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fb63362b5a7ddab88e52b6dbb47dac3fd7dafeee740dc6c8d8a446ddedade8e", size = 8548242, upload-time = "2025-12-10T07:07:51.568Z" }, + { url = "https://files.pythonhosted.org/packages/49/d8/9be608c6024d021041c7f0b3928d4749a706f4e2c3832bbede4fb4f58c95/scikit_learn-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:5025ce924beccb28298246e589c691fe1b8c1c96507e6d27d12c5fadd85bfd76", size = 8079075, upload-time = "2025-12-10T07:07:53.697Z" }, + { url = "https://files.pythonhosted.org/packages/dd/47/f187b4636ff80cc63f21cd40b7b2d177134acaa10f6bb73746130ee8c2e5/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4496bb2cf7a43ce1a2d7524a79e40bc5da45cf598dbf9545b7e8316ccba47bb4", size = 8660492, upload-time = "2025-12-10T07:07:55.574Z" }, + { url = "https://files.pythonhosted.org/packages/97/74/b7a304feb2b49df9fafa9382d4d09061a96ee9a9449a7cbea7988dda0828/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0bcfe4d0d14aec44921545fd2af2338c7471de9cb701f1da4c9d85906ab847a", size = 8931904, upload-time = "2025-12-10T07:07:57.666Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c4/0ab22726a04ede56f689476b760f98f8f46607caecff993017ac1b64aa5d/scikit_learn-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:35c007dedb2ffe38fe3ee7d201ebac4a2deccd2408e8621d53067733e3c74809", size = 8019359, upload-time = "2025-12-10T07:07:59.838Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/344a67811cfd561d7335c1b96ca21455e7e472d281c3c279c4d3f2300236/scikit_learn-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:8c497fff237d7b4e07e9ef1a640887fa4fb765647f86fbe00f969ff6280ce2bb", size = 7641898, upload-time = "2025-12-10T07:08:01.36Z" }, + { url = "https://files.pythonhosted.org/packages/03/aa/e22e0768512ce9255eba34775be2e85c2048da73da1193e841707f8f039c/scikit_learn-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0d6ae97234d5d7079dc0040990a6f7aeb97cb7fa7e8945f1999a429b23569e0a", size = 8513770, upload-time = "2025-12-10T07:08:03.251Z" }, + { url = "https://files.pythonhosted.org/packages/58/37/31b83b2594105f61a381fc74ca19e8780ee923be2d496fcd8d2e1147bd99/scikit_learn-1.8.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:edec98c5e7c128328124a029bceb09eda2d526997780fef8d65e9a69eead963e", size = 8044458, upload-time = "2025-12-10T07:08:05.336Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5a/3f1caed8765f33eabb723596666da4ebbf43d11e96550fb18bdec42b467b/scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:74b66d8689d52ed04c271e1329f0c61635bcaf5b926db9b12d58914cdc01fe57", size = 8610341, upload-time = "2025-12-10T07:08:07.732Z" }, + { url = "https://files.pythonhosted.org/packages/38/cf/06896db3f71c75902a8e9943b444a56e727418f6b4b4a90c98c934f51ed4/scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fdf95767f989b0cfedb85f7ed8ca215d4be728031f56ff5a519ee1e3276dc2e", size = 8900022, upload-time = "2025-12-10T07:08:09.862Z" }, + { url = "https://files.pythonhosted.org/packages/1c/f9/9b7563caf3ec8873e17a31401858efab6b39a882daf6c1bfa88879c0aa11/scikit_learn-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:2de443b9373b3b615aec1bb57f9baa6bb3a9bd093f1269ba95c17d870422b271", size = 7989409, upload-time = "2025-12-10T07:08:12.028Z" }, + { url = "https://files.pythonhosted.org/packages/49/bd/1f4001503650e72c4f6009ac0c4413cb17d2d601cef6f71c0453da2732fc/scikit_learn-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:eddde82a035681427cbedded4e6eff5e57fa59216c2e3e90b10b19ab1d0a65c3", size = 7619760, upload-time = "2025-12-10T07:08:13.688Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7d/a630359fc9dcc95496588c8d8e3245cc8fd81980251079bc09c70d41d951/scikit_learn-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7cc267b6108f0a1499a734167282c00c4ebf61328566b55ef262d48e9849c735", size = 8826045, upload-time = "2025-12-10T07:08:15.215Z" }, + { url = "https://files.pythonhosted.org/packages/cc/56/a0c86f6930cfcd1c7054a2bc417e26960bb88d32444fe7f71d5c2cfae891/scikit_learn-1.8.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:fe1c011a640a9f0791146011dfd3c7d9669785f9fed2b2a5f9e207536cf5c2fd", size = 8420324, upload-time = "2025-12-10T07:08:17.561Z" }, + { url = "https://files.pythonhosted.org/packages/46/1e/05962ea1cebc1cf3876667ecb14c283ef755bf409993c5946ade3b77e303/scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72358cce49465d140cc4e7792015bb1f0296a9742d5622c67e31399b75468b9e", size = 8680651, upload-time = "2025-12-10T07:08:19.952Z" }, + { url = "https://files.pythonhosted.org/packages/fe/56/a85473cd75f200c9759e3a5f0bcab2d116c92a8a02ee08ccd73b870f8bb4/scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:80832434a6cc114f5219211eec13dcbc16c2bac0e31ef64c6d346cde3cf054cb", size = 8925045, upload-time = "2025-12-10T07:08:22.11Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b7/64d8cfa896c64435ae57f4917a548d7ac7a44762ff9802f75a79b77cb633/scikit_learn-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ee787491dbfe082d9c3013f01f5991658b0f38aa8177e4cd4bf434c58f551702", size = 8507994, upload-time = "2025-12-10T07:08:23.943Z" }, + { url = "https://files.pythonhosted.org/packages/5e/37/e192ea709551799379958b4c4771ec507347027bb7c942662c7fbeba31cb/scikit_learn-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf97c10a3f5a7543f9b88cbf488d33d175e9146115a451ae34568597ba33dcde", size = 7869518, upload-time = "2025-12-10T07:08:25.71Z" }, + { url = "https://files.pythonhosted.org/packages/24/05/1af2c186174cc92dcab2233f327336058c077d38f6fe2aceb08e6ab4d509/scikit_learn-1.8.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c22a2da7a198c28dd1a6e1136f19c830beab7fdca5b3e5c8bba8394f8a5c45b3", size = 8528667, upload-time = "2025-12-10T07:08:27.541Z" }, + { url = "https://files.pythonhosted.org/packages/a8/25/01c0af38fe969473fb292bba9dc2b8f9b451f3112ff242c647fee3d0dfe7/scikit_learn-1.8.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:6b595b07a03069a2b1740dc08c2299993850ea81cce4fe19b2421e0c970de6b7", size = 8066524, upload-time = "2025-12-10T07:08:29.822Z" }, + { url = "https://files.pythonhosted.org/packages/be/ce/a0623350aa0b68647333940ee46fe45086c6060ec604874e38e9ab7d8e6c/scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:29ffc74089f3d5e87dfca4c2c8450f88bdc61b0fc6ed5d267f3988f19a1309f6", size = 8657133, upload-time = "2025-12-10T07:08:31.865Z" }, + { url = "https://files.pythonhosted.org/packages/b8/cb/861b41341d6f1245e6ca80b1c1a8c4dfce43255b03df034429089ca2a2c5/scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb65db5d7531bccf3a4f6bec3462223bea71384e2cda41da0f10b7c292b9e7c4", size = 8923223, upload-time = "2025-12-10T07:08:34.166Z" }, + { url = "https://files.pythonhosted.org/packages/76/18/a8def8f91b18cd1ba6e05dbe02540168cb24d47e8dcf69e8d00b7da42a08/scikit_learn-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:56079a99c20d230e873ea40753102102734c5953366972a71d5cb39a32bc40c6", size = 8096518, upload-time = "2025-12-10T07:08:36.339Z" }, + { url = "https://files.pythonhosted.org/packages/d1/77/482076a678458307f0deb44e29891d6022617b2a64c840c725495bee343f/scikit_learn-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:3bad7565bc9cf37ce19a7c0d107742b320c1285df7aab1a6e2d28780df167242", size = 7754546, upload-time = "2025-12-10T07:08:38.128Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d1/ef294ca754826daa043b2a104e59960abfab4cf653891037d19dd5b6f3cf/scikit_learn-1.8.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:4511be56637e46c25721e83d1a9cea9614e7badc7040c4d573d75fbe257d6fd7", size = 8848305, upload-time = "2025-12-10T07:08:41.013Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e2/b1f8b05138ee813b8e1a4149f2f0d289547e60851fd1bb268886915adbda/scikit_learn-1.8.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:a69525355a641bf8ef136a7fa447672fb54fe8d60cab5538d9eb7c6438543fb9", size = 8432257, upload-time = "2025-12-10T07:08:42.873Z" }, + { url = "https://files.pythonhosted.org/packages/26/11/c32b2138a85dcb0c99f6afd13a70a951bfdff8a6ab42d8160522542fb647/scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c2656924ec73e5939c76ac4c8b026fc203b83d8900362eb2599d8aee80e4880f", size = 8678673, upload-time = "2025-12-10T07:08:45.362Z" }, + { url = "https://files.pythonhosted.org/packages/c7/57/51f2384575bdec454f4fe4e7a919d696c9ebce914590abf3e52d47607ab8/scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15fc3b5d19cc2be65404786857f2e13c70c83dd4782676dd6814e3b89dc8f5b9", size = 8922467, upload-time = "2025-12-10T07:08:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/35/4d/748c9e2872637a57981a04adc038dacaa16ba8ca887b23e34953f0b3f742/scikit_learn-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:00d6f1d66fbcf4eba6e356e1420d33cc06c70a45bb1363cd6f6a8e4ebbbdece2", size = 8774395, upload-time = "2025-12-10T07:08:49.337Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/d7b2ebe4704a5e50790ba089d5c2ae308ab6bb852719e6c3bd4f04c3a363/scikit_learn-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:f28dd15c6bb0b66ba09728cf09fd8736c304be29409bd8445a080c1280619e8c", size = 8002647, upload-time = "2025-12-10T07:08:51.601Z" }, +] + +[[package]] +name = "scipy" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/11/7241a63e73ba5a516f1930ac8d5b44cbbfabd35ac73a2d08ca206df007c4/scipy-1.17.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:0d5018a57c24cb1dd828bcf51d7b10e65986d549f52ef5adb6b4d1ded3e32a57", size = 31364580, upload-time = "2026-01-10T21:25:25.717Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1d/5057f812d4f6adc91a20a2d6f2ebcdb517fdbc87ae3acc5633c9b97c8ba5/scipy-1.17.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:88c22af9e5d5a4f9e027e26772cc7b5922fab8bcc839edb3ae33de404feebd9e", size = 27969012, upload-time = "2026-01-10T21:25:30.921Z" }, + { url = "https://files.pythonhosted.org/packages/e3/21/f6ec556c1e3b6ec4e088da667d9987bb77cc3ab3026511f427dc8451187d/scipy-1.17.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f3cd947f20fe17013d401b64e857c6b2da83cae567adbb75b9dcba865abc66d8", size = 20140691, upload-time = "2026-01-10T21:25:34.802Z" }, + { url = "https://files.pythonhosted.org/packages/7a/fe/5e5ad04784964ba964a96f16c8d4676aa1b51357199014dce58ab7ec5670/scipy-1.17.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e8c0b331c2c1f531eb51f1b4fc9ba709521a712cce58f1aa627bc007421a5306", size = 22463015, upload-time = "2026-01-10T21:25:39.277Z" }, + { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" }, + { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" }, + { url = "https://files.pythonhosted.org/packages/81/61/0470810c8a093cdacd4ba7504b8a218fd49ca070d79eca23a615f5d9a0b0/scipy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0cf46c8013fec9d3694dc572f0b54100c28405d55d3e2cb15e2895b25057996e", size = 37405953, upload-time = "2026-01-10T21:26:07.75Z" }, + { url = "https://files.pythonhosted.org/packages/92/ce/672ed546f96d5d41ae78c4b9b02006cedd0b3d6f2bf5bb76ea455c320c28/scipy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:0937a0b0d8d593a198cededd4c439a0ea216a3f36653901ea1f3e4be949056f8", size = 36328121, upload-time = "2026-01-10T21:26:16.509Z" }, + { url = "https://files.pythonhosted.org/packages/9d/21/38165845392cae67b61843a52c6455d47d0cc2a40dd495c89f4362944654/scipy-1.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:f603d8a5518c7426414d1d8f82e253e454471de682ce5e39c29adb0df1efb86b", size = 24314368, upload-time = "2026-01-10T21:26:23.087Z" }, + { url = "https://files.pythonhosted.org/packages/0c/51/3468fdfd49387ddefee1636f5cf6d03ce603b75205bf439bbf0e62069bfd/scipy-1.17.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:65ec32f3d32dfc48c72df4291345dae4f048749bc8d5203ee0a3f347f96c5ce6", size = 31344101, upload-time = "2026-01-10T21:26:30.25Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9a/9406aec58268d437636069419e6977af953d1e246df941d42d3720b7277b/scipy-1.17.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:1f9586a58039d7229ce77b52f8472c972448cded5736eaf102d5658bbac4c269", size = 27950385, upload-time = "2026-01-10T21:26:36.801Z" }, + { url = "https://files.pythonhosted.org/packages/4f/98/e7342709e17afdfd1b26b56ae499ef4939b45a23a00e471dfb5375eea205/scipy-1.17.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9fad7d3578c877d606b1150135c2639e9de9cecd3705caa37b66862977cc3e72", size = 20122115, upload-time = "2026-01-10T21:26:42.107Z" }, + { url = "https://files.pythonhosted.org/packages/fd/0e/9eeeb5357a64fd157cbe0302c213517c541cc16b8486d82de251f3c68ede/scipy-1.17.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:423ca1f6584fc03936972b5f7c06961670dbba9f234e71676a7c7ccf938a0d61", size = 22442402, upload-time = "2026-01-10T21:26:48.029Z" }, + { url = "https://files.pythonhosted.org/packages/c9/10/be13397a0e434f98e0c79552b2b584ae5bb1c8b2be95db421533bbca5369/scipy-1.17.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe508b5690e9eaaa9467fc047f833af58f1152ae51a0d0aed67aa5801f4dd7d6", size = 32696338, upload-time = "2026-01-10T21:26:55.521Z" }, + { url = "https://files.pythonhosted.org/packages/63/1e/12fbf2a3bb240161651c94bb5cdd0eae5d4e8cc6eaeceb74ab07b12a753d/scipy-1.17.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6680f2dfd4f6182e7d6db161344537da644d1cf85cf293f015c60a17ecf08752", size = 34977201, upload-time = "2026-01-10T21:27:03.501Z" }, + { url = "https://files.pythonhosted.org/packages/19/5b/1a63923e23ccd20bd32156d7dd708af5bbde410daa993aa2500c847ab2d2/scipy-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eec3842ec9ac9de5917899b277428886042a93db0b227ebbe3a333b64ec7643d", size = 34777384, upload-time = "2026-01-10T21:27:11.423Z" }, + { url = "https://files.pythonhosted.org/packages/39/22/b5da95d74edcf81e540e467202a988c50fef41bd2011f46e05f72ba07df6/scipy-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d7425fcafbc09a03731e1bc05581f5fad988e48c6a861f441b7ab729a49a55ea", size = 37379586, upload-time = "2026-01-10T21:27:20.171Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b6/8ac583d6da79e7b9e520579f03007cb006f063642afd6b2eeb16b890bf93/scipy-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:87b411e42b425b84777718cc41516b8a7e0795abfa8e8e1d573bf0ef014f0812", size = 36287211, upload-time = "2026-01-10T21:28:43.122Z" }, + { url = "https://files.pythonhosted.org/packages/55/fb/7db19e0b3e52f882b420417644ec81dd57eeef1bd1705b6f689d8ff93541/scipy-1.17.0-cp313-cp313-win_arm64.whl", hash = "sha256:357ca001c6e37601066092e7c89cca2f1ce74e2a520ca78d063a6d2201101df2", size = 24312646, upload-time = "2026-01-10T21:28:49.893Z" }, + { url = "https://files.pythonhosted.org/packages/20/b6/7feaa252c21cc7aff335c6c55e1b90ab3e3306da3f048109b8b639b94648/scipy-1.17.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:ec0827aa4d36cb79ff1b81de898e948a51ac0b9b1c43e4a372c0508c38c0f9a3", size = 31693194, upload-time = "2026-01-10T21:27:27.454Z" }, + { url = "https://files.pythonhosted.org/packages/76/bb/bbb392005abce039fb7e672cb78ac7d158700e826b0515cab6b5b60c26fb/scipy-1.17.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:819fc26862b4b3c73a60d486dbb919202f3d6d98c87cf20c223511429f2d1a97", size = 28365415, upload-time = "2026-01-10T21:27:34.26Z" }, + { url = "https://files.pythonhosted.org/packages/37/da/9d33196ecc99fba16a409c691ed464a3a283ac454a34a13a3a57c0d66f3a/scipy-1.17.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:363ad4ae2853d88ebcde3ae6ec46ccca903ea9835ee8ba543f12f575e7b07e4e", size = 20537232, upload-time = "2026-01-10T21:27:40.306Z" }, + { url = "https://files.pythonhosted.org/packages/56/9d/f4b184f6ddb28e9a5caea36a6f98e8ecd2a524f9127354087ce780885d83/scipy-1.17.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:979c3a0ff8e5ba254d45d59ebd38cde48fce4f10b5125c680c7a4bfe177aab07", size = 22791051, upload-time = "2026-01-10T21:27:46.539Z" }, + { url = "https://files.pythonhosted.org/packages/9b/9d/025cccdd738a72140efc582b1641d0dd4caf2e86c3fb127568dc80444e6e/scipy-1.17.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:130d12926ae34399d157de777472bf82e9061c60cc081372b3118edacafe1d00", size = 32815098, upload-time = "2026-01-10T21:27:54.389Z" }, + { url = "https://files.pythonhosted.org/packages/48/5f/09b879619f8bca15ce392bfc1894bd9c54377e01d1b3f2f3b595a1b4d945/scipy-1.17.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e886000eb4919eae3a44f035e63f0fd8b651234117e8f6f29bad1cd26e7bc45", size = 35031342, upload-time = "2026-01-10T21:28:03.012Z" }, + { url = "https://files.pythonhosted.org/packages/f2/9a/f0f0a9f0aa079d2f106555b984ff0fbb11a837df280f04f71f056ea9c6e4/scipy-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:13c4096ac6bc31d706018f06a49abe0485f96499deb82066b94d19b02f664209", size = 34893199, upload-time = "2026-01-10T21:28:10.832Z" }, + { url = "https://files.pythonhosted.org/packages/90/b8/4f0f5cf0c5ea4d7548424e6533e6b17d164f34a6e2fb2e43ffebb6697b06/scipy-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cacbaddd91fcffde703934897c5cd2c7cb0371fac195d383f4e1f1c5d3f3bd04", size = 37438061, upload-time = "2026-01-10T21:28:19.684Z" }, + { url = "https://files.pythonhosted.org/packages/f9/cc/2bd59140ed3b2fa2882fb15da0a9cb1b5a6443d67cfd0d98d4cec83a57ec/scipy-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:edce1a1cf66298cccdc48a1bdf8fb10a3bf58e8b58d6c3883dd1530e103f87c0", size = 36328593, upload-time = "2026-01-10T21:28:28.007Z" }, + { url = "https://files.pythonhosted.org/packages/13/1b/c87cc44a0d2c7aaf0f003aef2904c3d097b422a96c7e7c07f5efd9073c1b/scipy-1.17.0-cp313-cp313t-win_arm64.whl", hash = "sha256:30509da9dbec1c2ed8f168b8d8aa853bc6723fede1dbc23c7d43a56f5ab72a67", size = 24625083, upload-time = "2026-01-10T21:28:35.188Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2d/51006cd369b8e7879e1c630999a19d1fbf6f8b5ed3e33374f29dc87e53b3/scipy-1.17.0-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:c17514d11b78be8f7e6331b983a65a7f5ca1fd037b95e27b280921fe5606286a", size = 31346803, upload-time = "2026-01-10T21:28:57.24Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2e/2349458c3ce445f53a6c93d4386b1c4c5c0c540917304c01222ff95ff317/scipy-1.17.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:4e00562e519c09da34c31685f6acc3aa384d4d50604db0f245c14e1b4488bfa2", size = 27967182, upload-time = "2026-01-10T21:29:04.107Z" }, + { url = "https://files.pythonhosted.org/packages/5e/7c/df525fbfa77b878d1cfe625249529514dc02f4fd5f45f0f6295676a76528/scipy-1.17.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f7df7941d71314e60a481e02d5ebcb3f0185b8d799c70d03d8258f6c80f3d467", size = 20139125, upload-time = "2026-01-10T21:29:10.179Z" }, + { url = "https://files.pythonhosted.org/packages/33/11/fcf9d43a7ed1234d31765ec643b0515a85a30b58eddccc5d5a4d12b5f194/scipy-1.17.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:aabf057c632798832f071a8dde013c2e26284043934f53b00489f1773b33527e", size = 22443554, upload-time = "2026-01-10T21:29:15.888Z" }, + { url = "https://files.pythonhosted.org/packages/80/5c/ea5d239cda2dd3d31399424967a24d556cf409fbea7b5b21412b0fd0a44f/scipy-1.17.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a38c3337e00be6fd8a95b4ed66b5d988bac4ec888fd922c2ea9fe5fb1603dd67", size = 32757834, upload-time = "2026-01-10T21:29:23.406Z" }, + { url = "https://files.pythonhosted.org/packages/b8/7e/8c917cc573310e5dc91cbeead76f1b600d3fb17cf0969db02c9cf92e3cfa/scipy-1.17.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00fb5f8ec8398ad90215008d8b6009c9db9fa924fd4c7d6be307c6f945f9cd73", size = 34995775, upload-time = "2026-01-10T21:29:31.915Z" }, + { url = "https://files.pythonhosted.org/packages/c5/43/176c0c3c07b3f7df324e7cdd933d3e2c4898ca202b090bd5ba122f9fe270/scipy-1.17.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f2a4942b0f5f7c23c7cd641a0ca1955e2ae83dedcff537e3a0259096635e186b", size = 34841240, upload-time = "2026-01-10T21:29:39.995Z" }, + { url = "https://files.pythonhosted.org/packages/44/8c/d1f5f4b491160592e7f084d997de53a8e896a3ac01cd07e59f43ca222744/scipy-1.17.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:dbf133ced83889583156566d2bdf7a07ff89228fe0c0cb727f777de92092ec6b", size = 37394463, upload-time = "2026-01-10T21:29:48.723Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ec/42a6657f8d2d087e750e9a5dde0b481fd135657f09eaf1cf5688bb23c338/scipy-1.17.0-cp314-cp314-win_amd64.whl", hash = "sha256:3625c631a7acd7cfd929e4e31d2582cf00f42fcf06011f59281271746d77e061", size = 37053015, upload-time = "2026-01-10T21:30:51.418Z" }, + { url = "https://files.pythonhosted.org/packages/27/58/6b89a6afd132787d89a362d443a7bddd511b8f41336a1ae47f9e4f000dc4/scipy-1.17.0-cp314-cp314-win_arm64.whl", hash = "sha256:9244608d27eafe02b20558523ba57f15c689357c85bdcfe920b1828750aa26eb", size = 24951312, upload-time = "2026-01-10T21:30:56.771Z" }, + { url = "https://files.pythonhosted.org/packages/e9/01/f58916b9d9ae0112b86d7c3b10b9e685625ce6e8248df139d0fcb17f7397/scipy-1.17.0-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:2b531f57e09c946f56ad0b4a3b2abee778789097871fc541e267d2eca081cff1", size = 31706502, upload-time = "2026-01-10T21:29:56.326Z" }, + { url = "https://files.pythonhosted.org/packages/59/8e/2912a87f94a7d1f8b38aabc0faf74b82d3b6c9e22be991c49979f0eceed8/scipy-1.17.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:13e861634a2c480bd237deb69333ac79ea1941b94568d4b0efa5db5e263d4fd1", size = 28380854, upload-time = "2026-01-10T21:30:01.554Z" }, + { url = "https://files.pythonhosted.org/packages/bd/1c/874137a52dddab7d5d595c1887089a2125d27d0601fce8c0026a24a92a0b/scipy-1.17.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:eb2651271135154aa24f6481cbae5cc8af1f0dd46e6533fb7b56aa9727b6a232", size = 20552752, upload-time = "2026-01-10T21:30:05.93Z" }, + { url = "https://files.pythonhosted.org/packages/3f/f0/7518d171cb735f6400f4576cf70f756d5b419a07fe1867da34e2c2c9c11b/scipy-1.17.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:c5e8647f60679790c2f5c76be17e2e9247dc6b98ad0d3b065861e082c56e078d", size = 22803972, upload-time = "2026-01-10T21:30:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/7c/74/3498563a2c619e8a3ebb4d75457486c249b19b5b04a30600dfd9af06bea5/scipy-1.17.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5fb10d17e649e1446410895639f3385fd2bf4c3c7dfc9bea937bddcbc3d7b9ba", size = 32829770, upload-time = "2026-01-10T21:30:16.359Z" }, + { url = "https://files.pythonhosted.org/packages/48/d1/7b50cedd8c6c9d6f706b4b36fa8544d829c712a75e370f763b318e9638c1/scipy-1.17.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8547e7c57f932e7354a2319fab613981cde910631979f74c9b542bb167a8b9db", size = 35051093, upload-time = "2026-01-10T21:30:22.987Z" }, + { url = "https://files.pythonhosted.org/packages/e2/82/a2d684dfddb87ba1b3ea325df7c3293496ee9accb3a19abe9429bce94755/scipy-1.17.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33af70d040e8af9d5e7a38b5ed3b772adddd281e3062ff23fec49e49681c38cf", size = 34909905, upload-time = "2026-01-10T21:30:28.704Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5e/e565bd73991d42023eb82bb99e51c5b3d9e2c588ca9d4b3e2cc1d3ca62a6/scipy-1.17.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb55bb97d00f8b7ab95cb64f873eb0bf54d9446264d9f3609130381233483f", size = 37457743, upload-time = "2026-01-10T21:30:34.819Z" }, + { url = "https://files.pythonhosted.org/packages/58/a8/a66a75c3d8f1fb2b83f66007d6455a06a6f6cf5618c3dc35bc9b69dd096e/scipy-1.17.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1ff269abf702f6c7e67a4b7aad981d42871a11b9dd83c58d2d2ea624efbd1088", size = 37098574, upload-time = "2026-01-10T21:30:40.782Z" }, + { url = "https://files.pythonhosted.org/packages/56/a5/df8f46ef7da168f1bc52cd86e09a9de5c6f19cc1da04454d51b7d4f43408/scipy-1.17.0-cp314-cp314t-win_arm64.whl", hash = "sha256:031121914e295d9791319a1875444d55079885bbae5bdc9c5e0f2ee5f09d34ff", size = 25246266, upload-time = "2026-01-10T21:30:45.923Z" }, +] + +[[package]] +name = "sentry-sdk" +version = "2.52.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/59/eb/1b497650eb564701f9a7b8a95c51b2abe9347ed2c0b290ba78f027ebe4ea/sentry_sdk-2.52.0.tar.gz", hash = "sha256:fa0bec872cfec0302970b2996825723d67390cdd5f0229fb9efed93bd5384899", size = 410273, upload-time = "2026-02-04T15:03:54.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/63/2c6daf59d86b1c30600bff679d039f57fd1932af82c43c0bde1cbc55e8d4/sentry_sdk-2.52.0-py2.py3-none-any.whl", hash = "sha256:931c8f86169fc6f2752cb5c4e6480f0d516112e78750c312e081ababecbaf2ed", size = 435547, upload-time = "2026-02-04T15:03:51.567Z" }, +] + +[[package]] +name = "setuptools" +version = "78.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/81/9c/42314ee079a3e9c24b27515f9fbc7a3c1d29992c33451779011c74488375/setuptools-78.1.1.tar.gz", hash = "sha256:fcc17fd9cd898242f6b4adfaca46137a9edef687f43e6f78469692a5e70d851d", size = 1368163, upload-time = "2025-04-19T18:23:36.68Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/99/158ad0609729111163fc1f674a5a42f2605371a4cf036d0441070e2f7455/setuptools-78.1.1-py3-none-any.whl", hash = "sha256:c3a9c4211ff4c309edb8b8c4f1cbfa7ae324c4ba9f91ff254e3d305b9fd54561", size = 1256462, upload-time = "2025-04-19T18:23:34.525Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "socksio" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/5c/48a7d9495be3d1c651198fd99dbb6ce190e2274d0f28b9051307bdec6b85/socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac", size = 19055, upload-time = "2020-04-17T15:50:34.664Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/c3/6eeb6034408dac0fa653d126c9204ade96b819c936e136c5e8a6897eee9c/socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3", size = 12763, upload-time = "2020-04-17T15:50:31.878Z" }, +] + +[[package]] +name = "soundfile" +version = "0.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/41/9b873a8c055582859b239be17902a85339bec6a30ad162f98c9b0288a2cc/soundfile-0.13.1.tar.gz", hash = "sha256:b2c68dab1e30297317080a5b43df57e302584c49e2942defdde0acccc53f0e5b", size = 46156, upload-time = "2025-01-25T09:17:04.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/28/e2a36573ccbcf3d57c00626a21fe51989380636e821b341d36ccca0c1c3a/soundfile-0.13.1-py2.py3-none-any.whl", hash = "sha256:a23c717560da2cf4c7b5ae1142514e0fd82d6bbd9dfc93a50423447142f2c445", size = 25751, upload-time = "2025-01-25T09:16:44.235Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ab/73e97a5b3cc46bba7ff8650a1504348fa1863a6f9d57d7001c6b67c5f20e/soundfile-0.13.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:82dc664d19831933fe59adad199bf3945ad06d84bc111a5b4c0d3089a5b9ec33", size = 1142250, upload-time = "2025-01-25T09:16:47.583Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e5/58fd1a8d7b26fc113af244f966ee3aecf03cb9293cb935daaddc1e455e18/soundfile-0.13.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:743f12c12c4054921e15736c6be09ac26b3b3d603aef6fd69f9dde68748f2593", size = 1101406, upload-time = "2025-01-25T09:16:49.662Z" }, + { url = "https://files.pythonhosted.org/packages/58/ae/c0e4a53d77cf6e9a04179535766b3321b0b9ced5f70522e4caf9329f0046/soundfile-0.13.1-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:9c9e855f5a4d06ce4213f31918653ab7de0c5a8d8107cd2427e44b42df547deb", size = 1235729, upload-time = "2025-01-25T09:16:53.018Z" }, + { url = "https://files.pythonhosted.org/packages/57/5e/70bdd9579b35003a489fc850b5047beeda26328053ebadc1fb60f320f7db/soundfile-0.13.1-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:03267c4e493315294834a0870f31dbb3b28a95561b80b134f0bd3cf2d5f0e618", size = 1313646, upload-time = "2025-01-25T09:16:54.872Z" }, + { url = "https://files.pythonhosted.org/packages/fe/df/8c11dc4dfceda14e3003bb81a0d0edcaaf0796dd7b4f826ea3e532146bba/soundfile-0.13.1-py2.py3-none-win32.whl", hash = "sha256:c734564fab7c5ddf8e9be5bf70bab68042cd17e9c214c06e365e20d64f9a69d5", size = 899881, upload-time = "2025-01-25T09:16:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/14/e9/6b761de83277f2f02ded7e7ea6f07828ec78e4b229b80e4ca55dd205b9dc/soundfile-0.13.1-py2.py3-none-win_amd64.whl", hash = "sha256:1e70a05a0626524a69e9f0f4dd2ec174b4e9567f4d8b6c11d38b5c289be36ee9", size = 1019162, upload-time = "2025-01-25T09:16:59.573Z" }, +] + +[[package]] +name = "soxr" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/7e/f4b461944662ad75036df65277d6130f9411002bfb79e9df7dff40a31db9/soxr-1.0.0.tar.gz", hash = "sha256:e07ee6c1d659bc6957034f4800c60cb8b98de798823e34d2a2bba1caa85a4509", size = 171415, upload-time = "2025-09-07T13:22:21.317Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/c7/f92b81f1a151c13afb114f57799b86da9330bec844ea5a0d3fe6a8732678/soxr-1.0.0-cp312-abi3-macosx_10_14_x86_64.whl", hash = "sha256:abecf4e39017f3fadb5e051637c272ae5778d838e5c3926a35db36a53e3a607f", size = 205508, upload-time = "2025-09-07T13:22:01.252Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1d/c945fea9d83ea1f2be9d116b3674dbaef26ed090374a77c394b31e3b083b/soxr-1.0.0-cp312-abi3-macosx_11_0_arm64.whl", hash = "sha256:e973d487ee46aa8023ca00a139db6e09af053a37a032fe22f9ff0cc2e19c94b4", size = 163568, upload-time = "2025-09-07T13:22:03.558Z" }, + { url = "https://files.pythonhosted.org/packages/b5/80/10640970998a1d2199bef6c4d92205f36968cddaf3e4d0e9fe35ddd405bd/soxr-1.0.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e8ce273cca101aff3d8c387db5a5a41001ba76ef1837883438d3c652507a9ccc", size = 204707, upload-time = "2025-09-07T13:22:05.125Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/2726603c13c2126cb8ded9e57381b7377f4f0df6ba4408e1af5ddbfdc3dd/soxr-1.0.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8f2a69686f2856d37823bbb7b78c3d44904f311fe70ba49b893af11d6b6047b", size = 238032, upload-time = "2025-09-07T13:22:06.428Z" }, + { url = "https://files.pythonhosted.org/packages/ce/04/530252227f4d0721a5524a936336485dfb429bb206a66baf8e470384f4a2/soxr-1.0.0-cp312-abi3-win_amd64.whl", hash = "sha256:2a3b77b115ae7c478eecdbd060ed4f61beda542dfb70639177ac263aceda42a2", size = 172070, upload-time = "2025-09-07T13:22:07.62Z" }, + { url = "https://files.pythonhosted.org/packages/99/77/d3b3c25b4f1b1aa4a73f669355edcaee7a52179d0c50407697200a0e55b9/soxr-1.0.0-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:392a5c70c04eb939c9c176bd6f654dec9a0eaa9ba33d8f1024ed63cf68cdba0a", size = 209509, upload-time = "2025-09-07T13:22:08.773Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ee/3ca73e18781bb2aff92b809f1c17c356dfb9a1870652004bd432e79afbfa/soxr-1.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fdc41a1027ba46777186f26a8fba7893be913383414135577522da2fcc684490", size = 167690, upload-time = "2025-09-07T13:22:10.259Z" }, + { url = "https://files.pythonhosted.org/packages/bd/f0/eea8b5f587a2531657dc5081d2543a5a845f271a3bea1c0fdee5cebde021/soxr-1.0.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:449acd1dfaf10f0ce6dfd75c7e2ef984890df94008765a6742dafb42061c1a24", size = 209541, upload-time = "2025-09-07T13:22:11.739Z" }, + { url = "https://files.pythonhosted.org/packages/64/59/2430a48c705565eb09e78346950b586f253a11bd5313426ced3ecd9b0feb/soxr-1.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:38b35c99e408b8f440c9376a5e1dd48014857cd977c117bdaa4304865ae0edd0", size = 243025, upload-time = "2025-09-07T13:22:12.877Z" }, + { url = "https://files.pythonhosted.org/packages/3c/1b/f84a2570a74094e921bbad5450b2a22a85d58585916e131d9b98029c3e69/soxr-1.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:a39b519acca2364aa726b24a6fd55acf29e4c8909102e0b858c23013c38328e5", size = 184850, upload-time = "2025-09-07T13:22:14.068Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.49" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/45/461788f35e0364a8da7bda51a1fe1b09762d0c32f12f63727998d85a873b/sqlalchemy-2.0.49.tar.gz", hash = "sha256:d15950a57a210e36dd4cec1aac22787e2a4d57ba9318233e2ef8b2daf9ff2d5f", size = 9898221, upload-time = "2026-04-03T16:38:11.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/b3/2de412451330756aaaa72d27131db6dde23995efe62c941184e15242a5fa/sqlalchemy-2.0.49-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4bbccb45260e4ff1b7db0be80a9025bb1e6698bdb808b83fff0000f7a90b2c0b", size = 2157681, upload-time = "2026-04-03T16:53:07.132Z" }, + { url = "https://files.pythonhosted.org/packages/50/84/b2a56e2105bd11ebf9f0b93abddd748e1a78d592819099359aa98134a8bf/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb37f15714ec2652d574f021d479e78cd4eb9d04396dca36568fdfffb3487982", size = 3338976, upload-time = "2026-04-03T17:07:40Z" }, + { url = "https://files.pythonhosted.org/packages/2c/fa/65fcae2ed62f84ab72cf89536c7c3217a156e71a2c111b1305ab6f0690e2/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb9ec6436a820a4c006aad1ac351f12de2f2dbdaad171692ee457a02429b672", size = 3351937, upload-time = "2026-04-03T17:12:23.374Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2f/6fd118563572a7fe475925742eb6b3443b2250e346a0cc27d8d408e73773/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8d6efc136f44a7e8bc8088507eaabbb8c2b55b3dbb63fe102c690da0ddebe55e", size = 3281646, upload-time = "2026-04-03T17:07:41.949Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d7/410f4a007c65275b9cf82354adb4bb8ba587b176d0a6ee99caa16fe638f8/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e06e617e3d4fd9e51d385dfe45b077a41e9d1b033a7702551e3278ac597dc750", size = 3316695, upload-time = "2026-04-03T17:12:25.642Z" }, + { url = "https://files.pythonhosted.org/packages/d9/95/81f594aa60ded13273a844539041ccf1e66c5a7bed0a8e27810a3b52d522/sqlalchemy-2.0.49-cp312-cp312-win32.whl", hash = "sha256:83101a6930332b87653886c01d1ee7e294b1fe46a07dd9a2d2b4f91bcc88eec0", size = 2117483, upload-time = "2026-04-03T17:05:40.896Z" }, + { url = "https://files.pythonhosted.org/packages/47/9e/fd90114059175cac64e4fafa9bf3ac20584384d66de40793ae2e2f26f3bb/sqlalchemy-2.0.49-cp312-cp312-win_amd64.whl", hash = "sha256:618a308215b6cececb6240b9abde545e3acdabac7ae3e1d4e666896bf5ba44b4", size = 2144494, upload-time = "2026-04-03T17:05:42.282Z" }, + { url = "https://files.pythonhosted.org/packages/ae/81/81755f50eb2478eaf2049728491d4ea4f416c1eb013338682173259efa09/sqlalchemy-2.0.49-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df2d441bacf97022e81ad047e1597552eb3f83ca8a8f1a1fdd43cd7fe3898120", size = 2154547, upload-time = "2026-04-03T16:53:08.64Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bc/3494270da80811d08bcfa247404292428c4fe16294932bce5593f215cad9/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8e20e511dc15265fb433571391ba313e10dd8ea7e509d51686a51313b4ac01a2", size = 3280782, upload-time = "2026-04-03T17:07:43.508Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f5/038741f5e747a5f6ea3e72487211579d8cbea5eb9827a9cbd61d0108c4bd/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47604cb2159f8bbd5a1ab48a714557156320f20871ee64d550d8bf2683d980d3", size = 3297156, upload-time = "2026-04-03T17:12:27.697Z" }, + { url = "https://files.pythonhosted.org/packages/88/50/a6af0ff9dc954b43a65ca9b5367334e45d99684c90a3d3413fc19a02d43c/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:22d8798819f86720bc646ab015baff5ea4c971d68121cb36e2ebc2ee43ead2b7", size = 3228832, upload-time = "2026-04-03T17:07:45.38Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d1/5f6bdad8de0bf546fc74370939621396515e0cdb9067402d6ba1b8afbe9a/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9b1c058c171b739e7c330760044803099c7fff11511e3ab3573e5327116a9c33", size = 3267000, upload-time = "2026-04-03T17:12:29.657Z" }, + { url = "https://files.pythonhosted.org/packages/f7/30/ad62227b4a9819a5e1c6abff77c0f614fa7c9326e5a3bdbee90f7139382b/sqlalchemy-2.0.49-cp313-cp313-win32.whl", hash = "sha256:a143af2ea6672f2af3f44ed8f9cd020e9cc34c56f0e8db12019d5d9ecf41cb3b", size = 2115641, upload-time = "2026-04-03T17:05:43.989Z" }, + { url = "https://files.pythonhosted.org/packages/17/3a/7215b1b7d6d49dc9a87211be44562077f5f04f9bb5a59552c1c8e2d98173/sqlalchemy-2.0.49-cp313-cp313-win_amd64.whl", hash = "sha256:12b04d1db2663b421fe072d638a138460a51d5a862403295671c4f3987fb9148", size = 2141498, upload-time = "2026-04-03T17:05:45.7Z" }, + { url = "https://files.pythonhosted.org/packages/28/4b/52a0cb2687a9cd1648252bb257be5a1ba2c2ded20ba695c65756a55a15a4/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24bd94bb301ec672d8f0623eba9226cc90d775d25a0c92b5f8e4965d7f3a1518", size = 3560807, upload-time = "2026-04-03T16:58:31.666Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d8/fda95459204877eed0458550d6c7c64c98cc50c2d8d618026737de9ed41a/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a51d3db74ba489266ef55c7a4534eb0b8db9a326553df481c11e5d7660c8364d", size = 3527481, upload-time = "2026-04-03T17:06:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0a/2aac8b78ac6487240cf7afef8f203ca783e8796002dc0cf65c4ee99ff8bb/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:55250fe61d6ebfd6934a272ee16ef1244e0f16b7af6cd18ab5b1fc9f08631db0", size = 3468565, upload-time = "2026-04-03T16:58:33.414Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/ce71cfa82c50a373fd2148b3c870be05027155ce791dc9a5dcf439790b8b/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:46796877b47034b559a593d7e4b549aba151dae73f9e78212a3478161c12ab08", size = 3477769, upload-time = "2026-04-03T17:06:02.787Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e8/0a9f5c1f7c6f9ca480319bf57c2d7423f08d31445974167a27d14483c948/sqlalchemy-2.0.49-cp313-cp313t-win32.whl", hash = "sha256:9c4969a86e41454f2858256c39bdfb966a20961e9b58bf8749b65abf447e9a8d", size = 2143319, upload-time = "2026-04-03T17:02:04.328Z" }, + { url = "https://files.pythonhosted.org/packages/0e/51/fb5240729fbec73006e137c4f7a7918ffd583ab08921e6ff81a999d6517a/sqlalchemy-2.0.49-cp313-cp313t-win_amd64.whl", hash = "sha256:b9870d15ef00e4d0559ae10ee5bc71b654d1f20076dbe8bc7ed19b4c0625ceba", size = 2175104, upload-time = "2026-04-03T17:02:05.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/33/bf28f618c0a9597d14e0b9ee7d1e0622faff738d44fe986ee287cdf1b8d0/sqlalchemy-2.0.49-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:233088b4b99ebcbc5258c755a097aa52fbf90727a03a5a80781c4b9c54347a2e", size = 2156356, upload-time = "2026-04-03T16:53:09.914Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a7/5f476227576cb8644650eff68cc35fa837d3802b997465c96b8340ced1e2/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57ca426a48eb2c682dae8204cd89ea8ab7031e2675120a47924fabc7caacbc2a", size = 3276486, upload-time = "2026-04-03T17:07:46.9Z" }, + { url = "https://files.pythonhosted.org/packages/2e/84/efc7c0bf3a1c5eef81d397f6fddac855becdbb11cb38ff957888603014a7/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:685e93e9c8f399b0c96a624799820176312f5ceef958c0f88215af4013d29066", size = 3281479, upload-time = "2026-04-03T17:12:32.226Z" }, + { url = "https://files.pythonhosted.org/packages/91/68/bb406fa4257099c67bd75f3f2261b129c63204b9155de0d450b37f004698/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e0400fa22f79acc334d9a6b185dc00a44a8e6578aa7e12d0ddcd8434152b187", size = 3226269, upload-time = "2026-04-03T17:07:48.678Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/acb56c00cca9f251f437cb49e718e14f7687505749ea9255d7bd8158a6df/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a05977bffe9bffd2229f477fa75eabe3192b1b05f408961d1bebff8d1cd4d401", size = 3248260, upload-time = "2026-04-03T17:12:34.381Z" }, + { url = "https://files.pythonhosted.org/packages/56/19/6a20ea25606d1efd7bd1862149bb2a22d1451c3f851d23d887969201633f/sqlalchemy-2.0.49-cp314-cp314-win32.whl", hash = "sha256:0f2fa354ba106eafff2c14b0cc51f22801d1e8b2e4149342023bd6f0955de5f5", size = 2118463, upload-time = "2026-04-03T17:05:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4f/8297e4ed88e80baa1f5aa3c484a0ee29ef3c69c7582f206c916973b75057/sqlalchemy-2.0.49-cp314-cp314-win_amd64.whl", hash = "sha256:77641d299179c37b89cf2343ca9972c88bb6eef0d5fc504a2f86afd15cd5adf5", size = 2144204, upload-time = "2026-04-03T17:05:48.694Z" }, + { url = "https://files.pythonhosted.org/packages/1f/33/95e7216df810c706e0cd3655a778604bbd319ed4f43333127d465a46862d/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dc3368794d522f43914e03312202523cc89692f5389c32bea0233924f8d977", size = 3565474, upload-time = "2026-04-03T16:58:35.128Z" }, + { url = "https://files.pythonhosted.org/packages/0c/a4/ed7b18d8ccf7f954a83af6bb73866f5bc6f5636f44c7731fbb741f72cc4f/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c821c47ecfe05cc32140dcf8dc6fd5d21971c86dbd56eabfe5ba07a64910c01", size = 3530567, upload-time = "2026-04-03T17:06:04.587Z" }, + { url = "https://files.pythonhosted.org/packages/73/a3/20faa869c7e21a827c4a2a42b41353a54b0f9f5e96df5087629c306df71e/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9c04bff9a5335eb95c6ecf1c117576a0aa560def274876fd156cfe5510fccc61", size = 3474282, upload-time = "2026-04-03T16:58:37.131Z" }, + { url = "https://files.pythonhosted.org/packages/b7/50/276b9a007aa0764304ad467eceb70b04822dc32092492ee5f322d559a4dc/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7f605a456948c35260e7b2a39f8952a26f077fd25653c37740ed186b90aaa68a", size = 3480406, upload-time = "2026-04-03T17:06:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c3/c80fcdb41905a2df650c2a3e0337198b6848876e63d66fe9188ef9003d24/sqlalchemy-2.0.49-cp314-cp314t-win32.whl", hash = "sha256:6270d717b11c5476b0cbb21eedc8d4dbb7d1a956fd6c15a23e96f197a6193158", size = 2149151, upload-time = "2026-04-03T17:02:07.281Z" }, + { url = "https://files.pythonhosted.org/packages/05/52/9f1a62feab6ed368aff068524ff414f26a6daebc7361861035ae00b05530/sqlalchemy-2.0.49-cp314-cp314t-win_amd64.whl", hash = "sha256:275424295f4256fd301744b8f335cff367825d270f155d522b30c7bf49903ee7", size = 2184178, upload-time = "2026-04-03T17:02:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/e5/30/8519fdde58a7bdf155b714359791ad1dc018b47d60269d5d160d311fdc36/sqlalchemy-2.0.49-py3-none-any.whl", hash = "sha256:ec44cfa7ef1a728e88ad41674de50f6db8cfdb3e2af84af86e0041aaf02d43d0", size = 1942158, upload-time = "2026-04-03T16:53:44.135Z" }, +] + +[[package]] +name = "standard-aifc" +version = "3.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, + { name = "standard-chunk", marker = "python_full_version >= '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/53/6050dc3dde1671eb3db592c13b55a8005e5040131f7509cef0215212cb84/standard_aifc-3.13.0.tar.gz", hash = "sha256:64e249c7cb4b3daf2fdba4e95721f811bde8bdfc43ad9f936589b7bb2fae2e43", size = 15240, upload-time = "2024-10-30T16:01:31.772Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/52/5fbb203394cc852334d1575cc020f6bcec768d2265355984dfd361968f36/standard_aifc-3.13.0-py3-none-any.whl", hash = "sha256:f7ae09cc57de1224a0dd8e3eb8f73830be7c3d0bc485de4c1f82b4a7f645ac66", size = 10492, upload-time = "2024-10-30T16:01:07.071Z" }, +] + +[[package]] +name = "standard-chunk" +version = "3.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/06/ce1bb165c1f111c7d23a1ad17204d67224baa69725bb6857a264db61beaf/standard_chunk-3.13.0.tar.gz", hash = "sha256:4ac345d37d7e686d2755e01836b8d98eda0d1a3ee90375e597ae43aaf064d654", size = 4672, upload-time = "2024-10-30T16:18:28.326Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/90/a5c1084d87767d787a6caba615aa50dc587229646308d9420c960cb5e4c0/standard_chunk-3.13.0-py3-none-any.whl", hash = "sha256:17880a26c285189c644bd5bd8f8ed2bdb795d216e3293e6dbe55bbd848e2982c", size = 4944, upload-time = "2024-10-30T16:18:26.694Z" }, +] + +[[package]] +name = "standard-sunau" +version = "3.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/e3/ce8d38cb2d70e05ffeddc28bb09bad77cfef979eb0a299c9117f7ed4e6a9/standard_sunau-3.13.0.tar.gz", hash = "sha256:b319a1ac95a09a2378a8442f403c66f4fd4b36616d6df6ae82b8e536ee790908", size = 9368, upload-time = "2024-10-30T16:01:41.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ae/e3707f6c1bc6f7aa0df600ba8075bfb8a19252140cd595335be60e25f9ee/standard_sunau-3.13.0-py3-none-any.whl", hash = "sha256:53af624a9529c41062f4c2fd33837f297f3baa196b0cfceffea6555654602622", size = 7364, upload-time = "2024-10-30T16:01:28.003Z" }, +] + +[[package]] +name = "starlette" +version = "0.52.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, +] + +[[package]] +name = "sympy" +version = "1.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mpmath" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, +] + +[[package]] +name = "threadpoolctl" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274, upload-time = "2025-03-13T13:49:23.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638, upload-time = "2025-03-13T13:49:21.846Z" }, +] + +[[package]] +name = "tiktoken" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, +] + +[[package]] +name = "tokenizers" +version = "0.22.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/6f/f80cfef4a312e1fb34baf7d85c72d4411afde10978d4657f8cdd811d3ccc/tokenizers-0.22.2.tar.gz", hash = "sha256:473b83b915e547aa366d1eee11806deaf419e17be16310ac0a14077f1e28f917", size = 372115, upload-time = "2026-01-05T10:45:15.988Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/97/5dbfabf04c7e348e655e907ed27913e03db0923abb5dfdd120d7b25630e1/tokenizers-0.22.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:544dd704ae7238755d790de45ba8da072e9af3eea688f698b137915ae959281c", size = 3100275, upload-time = "2026-01-05T10:41:02.158Z" }, + { url = "https://files.pythonhosted.org/packages/2e/47/174dca0502ef88b28f1c9e06b73ce33500eedfac7a7692108aec220464e7/tokenizers-0.22.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1e418a55456beedca4621dbab65a318981467a2b188e982a23e117f115ce5001", size = 2981472, upload-time = "2026-01-05T10:41:00.276Z" }, + { url = "https://files.pythonhosted.org/packages/d6/84/7990e799f1309a8b87af6b948f31edaa12a3ed22d11b352eaf4f4b2e5753/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249487018adec45d6e3554c71d46eb39fa8ea67156c640f7513eb26f318cec7", size = 3290736, upload-time = "2026-01-05T10:40:32.165Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/09d0d9ba94dcd5f4f1368d4858d24546b4bdc0231c2354aa31d6199f0399/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25b85325d0815e86e0bac263506dd114578953b7b53d7de09a6485e4a160a7dd", size = 3168835, upload-time = "2026-01-05T10:40:38.847Z" }, + { url = "https://files.pythonhosted.org/packages/47/50/b3ebb4243e7160bda8d34b731e54dd8ab8b133e50775872e7a434e524c28/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfb88f22a209ff7b40a576d5324bf8286b519d7358663db21d6246fb17eea2d5", size = 3521673, upload-time = "2026-01-05T10:40:56.614Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fa/89f4cb9e08df770b57adb96f8cbb7e22695a4cb6c2bd5f0c4f0ebcf33b66/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c774b1276f71e1ef716e5486f21e76333464f47bece56bbd554485982a9e03e", size = 3724818, upload-time = "2026-01-05T10:40:44.507Z" }, + { url = "https://files.pythonhosted.org/packages/64/04/ca2363f0bfbe3b3d36e95bf67e56a4c88c8e3362b658e616d1ac185d47f2/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df6c4265b289083bf710dff49bc51ef252f9d5be33a45ee2bed151114a56207b", size = 3379195, upload-time = "2026-01-05T10:40:51.139Z" }, + { url = "https://files.pythonhosted.org/packages/2e/76/932be4b50ef6ccedf9d3c6639b056a967a86258c6d9200643f01269211ca/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369cc9fc8cc10cb24143873a0d95438bb8ee257bb80c71989e3ee290e8d72c67", size = 3274982, upload-time = "2026-01-05T10:40:58.331Z" }, + { url = "https://files.pythonhosted.org/packages/1d/28/5f9f5a4cc211b69e89420980e483831bcc29dade307955cc9dc858a40f01/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:29c30b83d8dcd061078b05ae0cb94d3c710555fbb44861139f9f83dcca3dc3e4", size = 9478245, upload-time = "2026-01-05T10:41:04.053Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fb/66e2da4704d6aadebf8cb39f1d6d1957df667ab24cff2326b77cda0dcb85/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:37ae80a28c1d3265bb1f22464c856bd23c02a05bb211e56d0c5301a435be6c1a", size = 9560069, upload-time = "2026-01-05T10:45:10.673Z" }, + { url = "https://files.pythonhosted.org/packages/16/04/fed398b05caa87ce9b1a1bb5166645e38196081b225059a6edaff6440fac/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:791135ee325f2336f498590eb2f11dc5c295232f288e75c99a36c5dbce63088a", size = 9899263, upload-time = "2026-01-05T10:45:12.559Z" }, + { url = "https://files.pythonhosted.org/packages/05/a1/d62dfe7376beaaf1394917e0f8e93ee5f67fea8fcf4107501db35996586b/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38337540fbbddff8e999d59970f3c6f35a82de10053206a7562f1ea02d046fa5", size = 10033429, upload-time = "2026-01-05T10:45:14.333Z" }, + { url = "https://files.pythonhosted.org/packages/fd/18/a545c4ea42af3df6effd7d13d250ba77a0a86fb20393143bbb9a92e434d4/tokenizers-0.22.2-cp39-abi3-win32.whl", hash = "sha256:a6bf3f88c554a2b653af81f3204491c818ae2ac6fbc09e76ef4773351292bc92", size = 2502363, upload-time = "2026-01-05T10:45:20.593Z" }, + { url = "https://files.pythonhosted.org/packages/65/71/0670843133a43d43070abeb1949abfdef12a86d490bea9cd9e18e37c5ff7/tokenizers-0.22.2-cp39-abi3-win_amd64.whl", hash = "sha256:c9ea31edff2968b44a88f97d784c2f16dc0729b8b143ed004699ebca91f05c48", size = 2747786, upload-time = "2026-01-05T10:45:18.411Z" }, + { url = "https://files.pythonhosted.org/packages/72/f4/0de46cfa12cdcbcd464cc59fde36912af405696f687e53a091fb432f694c/tokenizers-0.22.2-cp39-abi3-win_arm64.whl", hash = "sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc", size = 2612133, upload-time = "2026-01-05T10:45:17.232Z" }, +] + +[[package]] +name = "torch" +version = "2.11.0+cu130" +source = { registry = "https://download.pytorch.org/whl/cu130" } +dependencies = [ + { name = "cuda-bindings", marker = "sys_platform == 'linux'" }, + { name = "cuda-toolkit", extra = ["cublas", "cudart", "cufft", "cufile", "cupti", "curand", "cusolver", "cusparse", "nvjitlink", "nvrtc", "nvtx"], marker = "sys_platform == 'linux'" }, + { name = "filelock" }, + { name = "fsspec" }, + { name = "jinja2" }, + { name = "networkx" }, + { name = "nvidia-cudnn-cu13", marker = "sys_platform == 'linux'" }, + { name = "nvidia-cusparselt-cu13", marker = "sys_platform == 'linux'" }, + { name = "nvidia-nccl-cu13", marker = "sys_platform == 'linux'" }, + { name = "nvidia-nvshmem-cu13", marker = "sys_platform == 'linux'" }, + { name = "setuptools" }, + { name = "sympy" }, + { name = "triton", marker = "sys_platform == 'linux'" }, + { name = "typing-extensions" }, +] +wheels = [ + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:252f237d417fac3ba59b1635815c1f035a8241f2af038f2c076ed430932d89f1", upload-time = "2026-04-27T20:01:46Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:96911323dcfcd42028c7e8edde7bdf25bb187753234e8775f0f3f112e86a22db", upload-time = "2026-04-27T20:02:14Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp312-cp312-win_amd64.whl", hash = "sha256:ef8beae16d781c3244ef28dc7bee6d8871c26bbde65d5bf66e902cb61972c4ab", upload-time = "2026-04-27T20:03:28Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c3d60f79666b9101e3914a2e5dec2e81eac834e13cae0bcf59e94dc1a465f756", upload-time = "2026-04-27T20:04:49Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:554461b76f21211927c776056bcb0b00fb42972364794b686d768ebb0b586366", upload-time = "2026-04-27T20:05:21Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp313-cp313-win_amd64.whl", hash = "sha256:339801f2163698a53c7fb3c91883e7f44331d22c34d45acfbce4eff71f2332fa", upload-time = "2026-04-27T20:06:44Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:a33905bc3e093b25d2b019181cf834f7f7d4c562739e13dd36a798ecb2e411b0", upload-time = "2026-04-27T20:08:23Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6fd10ed484eb695312ae829719888bb9f6c7f5e8503528e3e8ad1b98a45296c2", upload-time = "2026-04-27T20:08:56Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp313-cp313t-win_amd64.whl", hash = "sha256:21d2734fd02af45d19bb88c0ff2e86b238ce73f7bde6003ade7f1454ae299198", upload-time = "2026-04-27T20:10:20Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:efcdfe08ec2c9db28b50cc7329fed0c90bb74fa6fbce0f7eb12e20db2279a40f", upload-time = "2026-04-27T20:11:48Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:6ccc36928fd17c86011b46fb81bd2c85475f1fbf967dde758672d6a8d83a212a", upload-time = "2026-04-27T20:12:18Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp314-cp314-win_amd64.whl", hash = "sha256:d886f1c2f4406d7ad0c59f254ceb0a9c47a03e97a7c704b778a2066d752dde29", upload-time = "2026-04-27T20:13:41Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:bdb20f8b04e9fcaba2f354c3026667bebb74de8a92526b706aa735e2df334c24", upload-time = "2026-04-27T20:15:02Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:28f952cd4a927616ad9d77644a93237d1ca50bf30d0cf26962b9162d8a00ffa0", upload-time = "2026-04-27T20:15:30Z" }, + { url = "https://download-r2.pytorch.org/whl/cu130/torch-2.11.0%2Bcu130-cp314-cp314t-win_amd64.whl", hash = "sha256:d0a857adc487f275bfc9e7cdc51d12940613ba18b6362da214e20e9e3871f817", upload-time = "2026-04-27T20:16:48Z" }, +] + +[[package]] +name = "tqdm" +version = "4.67.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, +] + +[[package]] +name = "transformers" +version = "5.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "regex" }, + { name = "safetensors" }, + { name = "tokenizers" }, + { name = "tqdm" }, + { name = "typer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a4/e9/c6c80a07690142a7d05444271f47b9f3c8aac7dea01d52e1137ee480ad78/transformers-5.6.2.tar.gz", hash = "sha256:e657134c3e5a6bc00a3c35f4e2674bb51adfcd89898495b788a18552bac2b91a", size = 8311867, upload-time = "2026-04-23T18:33:29.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/95/0b0218149b0d6f14df35f5b8f676fa83df4f19ed253c3cc447107ef86eca/transformers-5.6.2-py3-none-any.whl", hash = "sha256:f8d3a1bb96778fed9b8aabfd0dd6e19843e4b0f2bb6b59f32b8a92051b0f348f", size = 10364898, upload-time = "2026-04-23T18:33:26.081Z" }, +] + +[[package]] +name = "triton" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/5d/08201db32823bdf77a0e2b9039540080b2e5c23a20706ddba942924ebcd6/triton-3.6.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:374f52c11a711fd062b4bfbb201fd9ac0a5febd28a96fb41b4a0f51dde3157f4", size = 176128243, upload-time = "2026-01-20T16:16:07.857Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a8/cdf8b3e4c98132f965f88c2313a4b493266832ad47fb52f23d14d4f86bb5/triton-3.6.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74caf5e34b66d9f3a429af689c1c7128daba1d8208df60e81106b115c00d6fca", size = 188266850, upload-time = "2026-01-20T16:00:43.041Z" }, + { url = "https://files.pythonhosted.org/packages/3c/12/34d71b350e89a204c2c7777a9bba0dcf2f19a5bfdd70b57c4dbc5ffd7154/triton-3.6.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448e02fe6dc898e9e5aa89cf0ee5c371e99df5aa5e8ad976a80b93334f3494fd", size = 176133521, upload-time = "2026-01-20T16:16:13.321Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0b/37d991d8c130ce81a8728ae3c25b6e60935838e9be1b58791f5997b24a54/triton-3.6.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10c7f76c6e72d2ef08df639e3d0d30729112f47a56b0c81672edc05ee5116ac9", size = 188289450, upload-time = "2026-01-20T16:00:49.136Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4e/41b0c8033b503fd3cfcd12392cdd256945026a91ff02452bef40ec34bee7/triton-3.6.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1722e172d34e32abc3eb7711d0025bb69d7959ebea84e3b7f7a341cd7ed694d6", size = 176276087, upload-time = "2026-01-20T16:16:18.989Z" }, + { url = "https://files.pythonhosted.org/packages/35/f8/9c66bfc55361ec6d0e4040a0337fb5924ceb23de4648b8a81ae9d33b2b38/triton-3.6.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d002e07d7180fd65e622134fbd980c9a3d4211fb85224b56a0a0efbd422ab72f", size = 188400296, upload-time = "2026-01-20T16:00:56.042Z" }, + { url = "https://files.pythonhosted.org/packages/49/55/5ecf0dcaa0f2fbbd4420f7ef227ee3cb172e91e5fede9d0ecaddc43363b4/triton-3.6.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef5523241e7d1abca00f1d240949eebdd7c673b005edbbce0aca95b8191f1d43", size = 176138577, upload-time = "2026-01-20T16:16:25.426Z" }, + { url = "https://files.pythonhosted.org/packages/df/3d/9e7eee57b37c80cec63322c0231bb6da3cfe535a91d7a4d64896fcb89357/triton-3.6.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a17a5d5985f0ac494ed8a8e54568f092f7057ef60e1b0fa09d3fd1512064e803", size = 188273063, upload-time = "2026-01-20T16:01:07.278Z" }, + { url = "https://files.pythonhosted.org/packages/48/db/56ee649cab5eaff4757541325aca81f52d02d4a7cd3506776cad2451e060/triton-3.6.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b3a97e8ed304dfa9bd23bb41ca04cdf6b2e617d5e782a8653d616037a5d537d", size = 176274804, upload-time = "2026-01-20T16:16:31.528Z" }, + { url = "https://files.pythonhosted.org/packages/f6/56/6113c23ff46c00aae423333eb58b3e60bdfe9179d542781955a5e1514cb3/triton-3.6.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:46bd1c1af4b6704e554cad2eeb3b0a6513a980d470ccfa63189737340c7746a7", size = 188397994, upload-time = "2026-01-20T16:01:14.236Z" }, +] + +[[package]] +name = "ty" +version = "0.0.32" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/7e/2aa791c9ae7b8cd5024cd4122e92267f664ca954cea3def3211919fa3c1f/ty-0.0.32.tar.gz", hash = "sha256:8743174c5f920f6700a4a0c9de140109189192ba16226884cd50095b43b8a45c", size = 5522294, upload-time = "2026-04-20T19:29:01.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/eb/1075dc6a49d7acbe2584ae4d5b410c41b1f177a5adcc567e09eca4c69000/ty-0.0.32-py3-none-linux_armv6l.whl", hash = "sha256:dacbc2f6cd698d488ae7436838ff929570455bf94bfa4d9fe57a630c552aff83", size = 10902959, upload-time = "2026-04-20T19:28:31.907Z" }, + { url = "https://files.pythonhosted.org/packages/33/d2/c35fc8bc66e98d1ee9b0f8ed319bf743e450e1f1e997574b178fab75670f/ty-0.0.32-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:914bbc4f605ce2a9e2a78982e28fae1d3359a169d141f9dc3b4c7749cd5eca81", size = 10726172, upload-time = "2026-04-20T19:28:44.765Z" }, + { url = "https://files.pythonhosted.org/packages/96/32/c827da3ca480456fb02d8cea68a2609273b6c220fea0be9a4c8d8470b86e/ty-0.0.32-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4787ac9fe1f86b1f3133f5c6732adbe2df5668b50c679ac6e2d98cd284da812f", size = 10163701, upload-time = "2026-04-20T19:28:27.005Z" }, + { url = "https://files.pythonhosted.org/packages/ba/9e/2734478fbdb90c160cb2813a3916a16a2af5c1e231f87d635f6131d781fb/ty-0.0.32-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ea0a728af99fe40dd744cba6441a2404f80b7f4bde17aa6da393810af5ea57", size = 10656220, upload-time = "2026-04-20T19:29:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/44/9f/0007da2d35e424debe7e9f86ffbc1ab7f60983cfbc5f0411324ab2de5292/ty-0.0.32-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2850561f9b018ae33d7e5bbfa0ac414d3c518513edcffe43877dc9801446b9c5", size = 10696086, upload-time = "2026-04-20T19:28:46.829Z" }, + { url = "https://files.pythonhosted.org/packages/3b/5e/ce5fd4ec803222ae3e69a76d2a2db2eed55e19f5b131702b9789ef45f93d/ty-0.0.32-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5fa2fb3c614349ee211d36476b49d88c5ef79a687cdb91b2872ad023b94d2f8", size = 11184800, upload-time = "2026-04-20T19:28:42.57Z" }, + { url = "https://files.pythonhosted.org/packages/6c/46/ebcf67a5999421331214aac51a7464db42de2be15bbe929c612a3ed0b039/ty-0.0.32-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b89969307ab2417d41c9be8059dd79feea577234e1e10d35132f5495e0d42c6", size = 11718718, upload-time = "2026-04-20T19:28:36.433Z" }, + { url = "https://files.pythonhosted.org/packages/18/2c/2141c86ed0ce0962b45cefb658a95e734f59759d47f20afdcd9c732910a1/ty-0.0.32-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b59868ede9b1d69a088f0d695df52a0061f95fa7baa1d5e0dc6fc9cf06e1334", size = 11346369, upload-time = "2026-04-20T19:28:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/7a/da/ed6f772339cf29bd9a46def9d6db5084689eb574ee4d150ff704224c1ed8/ty-0.0.32-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8300caf35345498e9b9b03e550bba03cee8f5f5f8ab4c83c3b1ff1b7403b7d3a", size = 11280714, upload-time = "2026-04-20T19:28:51.516Z" }, + { url = "https://files.pythonhosted.org/packages/da/9b/c6813987edf4816a40e0c8e408b555f97d3f267c7b3a1688c8bbdf65609c/ty-0.0.32-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:583c7094f4574b02f724db924f98b804d1387a0bd9405ecb5e078cc0f47fbcfb", size = 10638806, upload-time = "2026-04-20T19:28:29.651Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d4/0cefcbd2ad0f3d51762ccf58e652ec7da146eb6ae34f87228f6254bbb8be/ty-0.0.32-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e44ebe1bb4143a5628bc4db67ac0dfebe14594af671e4ee66f6f2e983da56501", size = 10726106, upload-time = "2026-04-20T19:29:06.3Z" }, + { url = "https://files.pythonhosted.org/packages/32/ad/2c8a97f91f06311f4367400f7d13534bbda2522c73c99a3e4c0757dff9b8/ty-0.0.32-py3-none-musllinux_1_2_i686.whl", hash = "sha256:06f17ada3e069cba6148342ef88e9929156beca8473e8d4f101b68f66c75643e", size = 10872951, upload-time = "2026-04-20T19:28:34.077Z" }, + { url = "https://files.pythonhosted.org/packages/ba/68/42293f9248106dd51875120971a5cc6ea315c2c4dcfb8e59aa063aa0af26/ty-0.0.32-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e96e60fa556cec04f15d7ea62d2ceee5982bd389233e961ab9fd42304e278175", size = 11363334, upload-time = "2026-04-20T19:28:54.036Z" }, + { url = "https://files.pythonhosted.org/packages/df/92/be9abf4d3e589ad5023e2ea965b93e204ec856420d46adf73c5c36c04678/ty-0.0.32-py3-none-win32.whl", hash = "sha256:2ff2ebb4986b24aebcf1444db7db5ca41b36086040e95eea9f8fb851c11e805c", size = 10260689, upload-time = "2026-04-20T19:28:56.541Z" }, + { url = "https://files.pythonhosted.org/packages/14/61/dc86acea899349d2579cb8419aecedd83dc504d7d6a10df65eef546c8300/ty-0.0.32-py3-none-win_amd64.whl", hash = "sha256:ba7284a4a954b598c1b31500352b3ec1f89bff533825592b5958848226fdc7ee", size = 11255371, upload-time = "2026-04-20T19:28:39.917Z" }, + { url = "https://files.pythonhosted.org/packages/43/01/beffec56d71ca25b343ede63adb076456b5b3e211f1c066452a44cd120b3/ty-0.0.32-py3-none-win_arm64.whl", hash = "sha256:7e10aadbdbda989a7d567ee6a37f8b98d4d542e31e3b190a2879fd581f75d658", size = 10658087, upload-time = "2026-04-20T19:28:59.286Z" }, +] + +[[package]] +name = "typer" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/07/b822e1b307d40e263e8253d2384cf98c51aa2368cc7ba9a07e523a1d964b/typer-0.23.1.tar.gz", hash = "sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134", size = 120047, upload-time = "2026-02-13T10:04:30.984Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/91/9b286ab899c008c2cb05e8be99814807e7fbbd33f0c0c960470826e5ac82/typer-0.23.1-py3-none-any.whl", hash = "sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e", size = 56813, upload-time = "2026-02-13T10:04:32.008Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.46.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/93/041fca8274050e40e6791f267d82e0e2e27dd165627bd640d3e0e378d877/uvicorn-0.46.0.tar.gz", hash = "sha256:fb9da0926999cc6cb22dc7cd71a94a632f078e6ae47ff683c5c420750fb7413d", size = 88758, upload-time = "2026-04-23T07:16:00.151Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/a3/5b1562db76a5a488274b2332a97199b32d0442aca0ed193697fd47786316/uvicorn-0.46.0-py3-none-any.whl", hash = "sha256:bbebbcbed972d162afca128605223022bedd345b7bc7855ce66deb31487a9048", size = 70926, upload-time = "2026-04-23T07:15:58.355Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, + { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" }, + { url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" }, + { url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" }, + { url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" }, + { url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" }, + { url = "https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142", size = 1362067, upload-time = "2025-10-16T22:16:44.503Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74", size = 752423, upload-time = "2025-10-16T22:16:45.968Z" }, + { url = "https://files.pythonhosted.org/packages/a3/94/94af78c156f88da4b3a733773ad5ba0b164393e357cc4bd0ab2e2677a7d6/uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35", size = 4272437, upload-time = "2025-10-16T22:16:47.451Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25", size = 4292101, upload-time = "2025-10-16T22:16:49.318Z" }, + { url = "https://files.pythonhosted.org/packages/02/62/67d382dfcb25d0a98ce73c11ed1a6fba5037a1a1d533dcbb7cab033a2636/uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6", size = 4114158, upload-time = "2025-10-16T22:16:50.517Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/f1171b4a882a5d13c8b7576f348acfe6074d72eaf52cccef752f748d4a9f/uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079", size = 4177360, upload-time = "2025-10-16T22:16:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/79/7b/b01414f31546caf0919da80ad57cbfe24c56b151d12af68cee1b04922ca8/uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289", size = 1454790, upload-time = "2025-10-16T22:16:54.355Z" }, + { url = "https://files.pythonhosted.org/packages/d4/31/0bb232318dd838cad3fa8fb0c68c8b40e1145b32025581975e18b11fab40/uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3", size = 796783, upload-time = "2025-10-16T22:16:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/42/38/c9b09f3271a7a723a5de69f8e237ab8e7803183131bc57c890db0b6bb872/uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c", size = 4647548, upload-time = "2025-10-16T22:16:57.008Z" }, + { url = "https://files.pythonhosted.org/packages/c1/37/945b4ca0ac27e3dc4952642d4c900edd030b3da6c9634875af6e13ae80e5/uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21", size = 4467065, upload-time = "2025-10-16T22:16:58.206Z" }, + { url = "https://files.pythonhosted.org/packages/97/cc/48d232f33d60e2e2e0b42f4e73455b146b76ebe216487e862700457fbf3c/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88", size = 4328384, upload-time = "2025-10-16T22:16:59.36Z" }, + { url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, + { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] + +[[package]] +name = "win32-setctime" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" }, +] + +[[package]] +name = "yarl" +version = "1.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, + { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, + { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, + { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, + { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, + { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, + { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, + { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, + { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, + { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, + { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, + { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, + { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, + { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, + { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, + { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, + { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, + { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, + { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, + { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" }, + { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" }, + { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" }, + { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, + { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, + { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, + { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, + { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, + { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" }, + { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" }, + { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" }, + { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" }, + { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" }, + { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, + { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, + { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, + { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, +]