diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..dab9a4e17afd2ef39d90ccb0b40ef2786fe77422 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,35 +1,35 @@ -*.7z filter=lfs diff=lfs merge=lfs -text -*.arrow filter=lfs diff=lfs merge=lfs -text -*.bin filter=lfs diff=lfs merge=lfs -text -*.bz2 filter=lfs diff=lfs merge=lfs -text -*.ckpt filter=lfs diff=lfs merge=lfs -text -*.ftz filter=lfs diff=lfs merge=lfs -text -*.gz filter=lfs diff=lfs merge=lfs -text -*.h5 filter=lfs diff=lfs merge=lfs -text -*.joblib filter=lfs diff=lfs merge=lfs -text -*.lfs.* filter=lfs diff=lfs merge=lfs -text -*.mlmodel filter=lfs diff=lfs merge=lfs -text -*.model filter=lfs diff=lfs merge=lfs -text -*.msgpack filter=lfs diff=lfs merge=lfs -text -*.npy filter=lfs diff=lfs merge=lfs -text -*.npz filter=lfs diff=lfs merge=lfs -text -*.onnx filter=lfs diff=lfs merge=lfs -text -*.ot filter=lfs diff=lfs merge=lfs -text -*.parquet filter=lfs diff=lfs merge=lfs -text -*.pb filter=lfs diff=lfs merge=lfs -text -*.pickle filter=lfs diff=lfs merge=lfs -text -*.pkl filter=lfs diff=lfs merge=lfs -text -*.pt filter=lfs diff=lfs merge=lfs -text -*.pth filter=lfs diff=lfs merge=lfs -text -*.rar filter=lfs diff=lfs merge=lfs -text -*.safetensors filter=lfs diff=lfs merge=lfs -text -saved_model/**/* filter=lfs diff=lfs merge=lfs -text -*.tar.* filter=lfs diff=lfs merge=lfs -text -*.tar filter=lfs diff=lfs merge=lfs -text -*.tflite filter=lfs diff=lfs merge=lfs -text -*.tgz filter=lfs diff=lfs merge=lfs -text -*.wasm filter=lfs diff=lfs merge=lfs -text -*.xz filter=lfs diff=lfs merge=lfs -text -*.zip filter=lfs diff=lfs merge=lfs -text -*.zst filter=lfs diff=lfs merge=lfs -text -*tfevents* filter=lfs diff=lfs merge=lfs -text +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..208fd5be90dc8eca65f4b6c498630d38f845ec49 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +# Python-generated files +__pycache__/ +*.py[oc] +build/ +dist/ +wheels/ +*.egg-info + +# Virtual environments +.venv +.env + +logs/ +packets_docs/ \ No newline at end of file diff --git a/.python-version b/.python-version new file mode 100644 index 0000000000000000000000000000000000000000..10587343b8ac7872997947fe365be6db94781c2f --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.13 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..2d48aa0faa593c930167f4543215e38aeab615f3 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,30 @@ +# 使用官方 Python 3.13 镜像 +FROM python:3.13-slim + +# 设置工作目录 +WORKDIR /app + +# 设置环境变量 +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 + +# 安装系统依赖 +RUN apt-get update && apt-get install -y \ + gcc \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# 复制项目文件 +COPY . . + +# 安装 uv (更快的 Python 包管理器) +RUN pip install uv + +# 使用 uv 安装依赖 +RUN uv pip install --system -r pyproject.toml + +# 暴露端口 +EXPOSE 8000 + +# 启动命令 +CMD ["python", "server.py"] \ No newline at end of file diff --git a/README.md b/README.md index ac8177b38035afd43f5afab58e0c6c7e3e14de54..f9599feee2b240464eebdd15ecaccccdfb3441c2 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,11 @@ ---- -title: Warp -emoji: 🔥 -colorFrom: yellow -colorTo: yellow -sdk: docker -pinned: false ---- - -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +--- +title: VkjlwnvbioWBV +emoji: 📚 +colorFrom: indigo +colorTo: gray +sdk: docker +pinned: false +app_port: 8000 +--- + +Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference diff --git a/proto/attachment.proto b/proto/attachment.proto new file mode 100644 index 0000000000000000000000000000000000000000..a8ffecd352b690bf2753e70718b0d47a699bdf71 --- /dev/null +++ b/proto/attachment.proto @@ -0,0 +1,57 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +import "options.proto"; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message Attachment { + oneof value { + string plain_text = 1; + ExecutedShellCommand executed_shell_command = 2; + RunningShellCommand running_shell_command = 3; + DriveObject drive_object = 4; + } +} + +message ExecutedShellCommand { + string command = 1; + string output = 2; + int32 exit_code = 3; +} + +message RunningShellCommand { + string command = 1; + LongRunningShellCommandSnapshot snapshot = 2; +} + +message LongRunningShellCommandSnapshot { + string output = 1; +} + +message DriveObject { + string uid = 1; + + oneof object_payload { + Workflow workflow = 2; + Notebook notebook = 3; + GenericStringObject generic_string_object = 4; + } +} + +message Workflow { + string name = 1; + string description = 2; + string command = 3; +} + +message Notebook { + string title = 1; + string content = 2; +} + +message GenericStringObject { + string payload = 1; + string object_type = 2; +} diff --git a/proto/citations.proto b/proto/citations.proto new file mode 100644 index 0000000000000000000000000000000000000000..3a16614737a4b2dbfb3a2623bc3ff89665d05d54 --- /dev/null +++ b/proto/citations.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message Citation { + string document_id = 1; + DocumentType document_type = 2; +} + +enum DocumentType { + WARP_DRIVE_WORKFLOW = 0; + WARP_DRIVE_NOTEBOOK = 1; + WARP_DRIVE_ENV_VAR = 2; + RULE = 3; + WARP_DOCUMENTATION = 4; + WEB_PAGE = 5; + UNKNOWN = 6; +} diff --git a/proto/debug.proto b/proto/debug.proto new file mode 100644 index 0000000000000000000000000000000000000000..ad53fd355c830d462cf285b424b08271761d9082 --- /dev/null +++ b/proto/debug.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +import "task.proto"; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message TaskList { + repeated Task tasks = 1; + repeated string ordered_message_ids = 2; +} diff --git a/proto/file_content.proto b/proto/file_content.proto new file mode 100644 index 0000000000000000000000000000000000000000..1c3e86f2cd066fbb2e22952b2f3af1effd6036ab --- /dev/null +++ b/proto/file_content.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +import "options.proto"; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message FileContentLineRange { + uint32 start = 1; + uint32 end = 2; +} + +message FileContent { + string file_path = 1; + string content = 2; + FileContentLineRange line_range = 3; +} diff --git a/proto/input_context.proto b/proto/input_context.proto new file mode 100644 index 0000000000000000000000000000000000000000..9c78d9fad7c96536f3d09456354291c97c775681 --- /dev/null +++ b/proto/input_context.proto @@ -0,0 +1,64 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +import "google/protobuf/timestamp.proto"; +import "file_content.proto"; +import "attachment.proto"; +import "options.proto"; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message InputContext { + Directory directory = 1; + message Directory { + string pwd = 1; + string home = 2; + bool pwd_file_symbols_indexed = 3; + } + + OperatingSystem operating_system = 2; + message OperatingSystem { + string platform = 1; + string distribution = 2; + } + + Shell shell = 3; + message Shell { + string name = 1; + string version = 2; + } + + google.protobuf.Timestamp current_time = 4; + + repeated Codebase codebases = 8; + message Codebase { + string name = 1; + string path = 2; + } + + repeated ProjectRules project_rules = 10; + message ProjectRules { + string root_path = 1; + repeated FileContent active_rule_files = 2; + repeated string additional_rule_file_paths = 3; + } + + repeated ExecutedShellCommand executed_shell_commands = 5 [deprecated = true]; + + repeated SelectedText selected_text = 6; + message SelectedText { + string text = 1; + } + + repeated Image images = 7; + message Image { + bytes data = 1; + string mime_type = 2; + } + + repeated File files = 9; + message File { + FileContent content = 1; + } +} diff --git a/proto/options.proto b/proto/options.proto new file mode 100644 index 0000000000000000000000000000000000000000..9e6d66c412161b4c9515bddd87a56459103dc7cc --- /dev/null +++ b/proto/options.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +import "google/protobuf/descriptor.proto"; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +extend google.protobuf.FieldOptions { + bool sensitive = 50000; + bool internal = 50001; +} diff --git a/proto/request.proto b/proto/request.proto new file mode 100644 index 0000000000000000000000000000000000000000..c6bc799f8f33690525b365bd50f84e22ec2cf8c8 --- /dev/null +++ b/proto/request.proto @@ -0,0 +1,173 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +import "google/protobuf/struct.proto"; +import "input_context.proto"; +import "attachment.proto"; +import "options.proto"; +import "suggestions.proto"; +import "task.proto"; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message Request { + TaskContext task_context = 1; + message TaskContext { + repeated Task tasks = 1; + string active_task_id = 2; + } + + Input input = 2; + message Input { + InputContext context = 1; + + oneof type { + UserInputs user_inputs = 6; + QueryWithCannedResponse query_with_canned_response = 4; + AutoCodeDiffQuery auto_code_diff_query = 5; + ResumeConversation resume_conversation = 7; + InitProjectRules init_project_rules = 8; + UserQuery user_query = 2 [deprecated = true]; + ToolCallResult tool_call_result = 3 [deprecated = true]; + } + + message UserQuery { + string query = 1; + map referenced_attachments = 2; + } + + message UserInputs { + repeated UserInput inputs = 1; + message UserInput { + oneof input { + UserQuery user_query = 1; + ToolCallResult tool_call_result = 2; + } + } + } + + message ToolCallResult { + string tool_call_id = 1; + + oneof result { + RunShellCommandResult run_shell_command = 2; + ReadFilesResult read_files = 3; + SearchCodebaseResult search_codebase = 4; + ApplyFileDiffsResult apply_file_diffs = 5; + SuggestPlanResult suggest_plan = 6; + SuggestCreatePlanResult suggest_create_plan = 7; + GrepResult grep = 8; + FileGlobResult file_glob = 9; + RefineResult refine = 10; + ReadMCPResourceResult read_mcp_resource = 11; + CallMCPToolResult call_mcp_tool = 12; + WriteToLongRunningShellCommandResult write_to_long_running_shell_command = 13; + SuggestNewConversationResult suggest_new_conversation = 14; + FileGlobV2Result file_glob_v2 = 15; + } + + message RefineResult { + UserQuery user_query = 1; + } + } + + message QueryWithCannedResponse { + string query = 1; + + oneof type { + Install install = 2; + Code code = 3; + Deploy deploy = 4; + SomethingElse something_else = 5; + CustomOnboardingRequest custom_onboarding_request = 6; + AgenticOnboardingKickoff agentic_onboarding_kickoff = 7; + } + + message Install { + + } + + message Code { + + } + + message Deploy { + + } + + message SomethingElse { + + } + + message CustomOnboardingRequest { + + } + + message AgenticOnboardingKickoff { + + } + } + + message AutoCodeDiffQuery { + string query = 1; + } + + message ResumeConversation { + + } + + message InitProjectRules { + + } + } + + Settings settings = 3; + message Settings { + ModelConfig model_config = 1; + message ModelConfig { + string base = 1; + string planning = 2; + string coding = 3; + } + + bool rules_enabled = 2; + bool web_context_retrieval_enabled = 3; + bool supports_parallel_tool_calls = 4; + bool use_anthropic_text_editor_tools = 5; + bool planning_enabled = 6; + bool warp_drive_context_enabled = 7; + bool supports_create_files = 8; + repeated ToolType supported_tools = 9; + bool supports_long_running_commands = 10; + bool should_preserve_file_content_in_history = 11; + bool supports_todos_ui = 12; + bool supports_linked_code_blocks = 13; + } + + Metadata metadata = 4; + message Metadata { + string conversation_id = 1; + map logging = 2; + } + + Suggestions existing_suggestions = 5; + + MCPContext mcp_context = 6; + message MCPContext { + repeated MCPResource resources = 1; + message MCPResource { + string uri = 1; + string name = 2; + string description = 3; + string mime_type = 4; + } + + repeated MCPTool tools = 2; + message MCPTool { + string name = 1; + string description = 2; + google.protobuf.Struct input_schema = 3; + } + } +} diff --git a/proto/response.proto b/proto/response.proto new file mode 100644 index 0000000000000000000000000000000000000000..dcfb7a51a3e064fdbecb9d1cd6b5f554978c792e --- /dev/null +++ b/proto/response.proto @@ -0,0 +1,159 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +import "google/protobuf/field_mask.proto"; +import "options.proto"; +import "suggestions.proto"; +import "task.proto"; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message ResponseEvent { + oneof type { + StreamInit init = 1; + ClientActions client_actions = 2; + StreamFinished finished = 3; + } + + message StreamInit { + string conversation_id = 1; + string request_id = 2; + } + + message ClientActions { + repeated ClientAction actions = 1; + } + + message StreamFinished { + repeated TokenUsage token_usage = 8; + message TokenUsage { + string model_id = 1; + uint32 total_input = 2; + uint32 output = 3; + uint32 input_cache_read = 4; + uint32 input_cache_write = 5; + float cost_in_cents = 6; + } + + bool should_refresh_model_config = 9; + + RequestCost request_cost = 10; + message RequestCost { + float exact = 1; + } + + ContextWindowInfo context_window_info = 11; + message ContextWindowInfo { + float context_window_usage = 1; + bool summarized = 2; + } + + oneof reason { + Other other = 1; + Done done = 2; + ReachedMaxTokenLimit max_token_limit = 3; + QuotaLimit quota_limit = 4; + ContextWindowExceeded context_window_exceeded = 5; + LLMUnavailable llm_unavailable = 6; + InternalError internal_error = 7; + } + + message Other { + + } + + message Done { + + } + + message ReachedMaxTokenLimit { + + } + + message QuotaLimit { + + } + + message ContextWindowExceeded { + + } + + message LLMUnavailable { + + } + + message InternalError { + string message = 1; + } + } +} + +message ClientAction { + oneof action { + CreateTask create_task = 1; + UpdateTaskStatus update_task_status = 2; + AddMessagesToTask add_messages_to_task = 3; + UpdateTaskMessage update_task_message = 4; + AppendToMessageContent append_to_message_content = 5; + Suggestions show_suggestions = 6; + UpdateTaskSummary update_task_summary = 7; + UpdateTaskDescription update_task_description = 8; + BeginTransaction begin_transaction = 9; + CommitTransaction commit_transaction = 10; + RollbackTransaction rollback_transaction = 11; + StartNewConversation start_new_conversation = 12; + } + + message CreateTask { + Task task = 1; + } + + message UpdateTaskStatus { + string task_id = 1; + TaskStatus task_status = 2; + } + + message UpdateTaskDescription { + string task_id = 1; + string description = 2; + } + + message AddMessagesToTask { + string task_id = 1; + repeated Message messages = 2; + } + + message UpdateTaskMessage { + string task_id = 3; + Message message = 1; + google.protobuf.FieldMask mask = 2; + } + + message AppendToMessageContent { + string task_id = 3; + Message message = 1; + google.protobuf.FieldMask mask = 2; + } + + message UpdateTaskSummary { + string task_id = 1; + string summary = 2; + } + + message BeginTransaction { + + } + + message CommitTransaction { + + } + + message RollbackTransaction { + + } + + message StartNewConversation { + string start_from_message_id = 1; + } +} diff --git a/proto/suggestions.proto b/proto/suggestions.proto new file mode 100644 index 0000000000000000000000000000000000000000..d120159879ed59599eb21b80fdbff2a661aa12c7 --- /dev/null +++ b/proto/suggestions.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message Suggestions { + repeated SuggestedRule rules = 1; + repeated SuggestedAgentModeWorkflow workflows = 2; +} + +message SuggestedRule { + string name = 1; + string content = 2; + string logging_id = 3; +} + +message SuggestedAgentModeWorkflow { + string name = 1; + string prompt = 2; + string logging_id = 3; +} diff --git a/proto/task.proto b/proto/task.proto new file mode 100644 index 0000000000000000000000000000000000000000..a95e0442ce78deec32838fecd50a9e0161d385b7 --- /dev/null +++ b/proto/task.proto @@ -0,0 +1,503 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +import "google/protobuf/empty.proto"; +import "google/protobuf/descriptor.proto"; +import "google/protobuf/struct.proto"; +import "citations.proto"; +import "input_context.proto"; +import "attachment.proto"; +import "file_content.proto"; +import "options.proto"; +import "todo.proto"; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message Task { + string id = 1; + string description = 2; + + Dependencies dependencies = 3; + message Dependencies { + string parent_task_id = 1; + repeated string sibling_dependencies = 2; + } + + TaskStatus status = 4; + repeated Message messages = 5; + string summary = 6; +} + +message TaskStatus { + oneof status { + Pending pending = 1; + InProgress in_progress = 2; + Blocked blocked = 3; + Succeeded succeeded = 4; + Failed failed = 5; + Aborted aborted = 6; + } + + message Pending { + + } + + message InProgress { + + } + + message Blocked { + + } + + message Succeeded { + + } + + message Failed { + + } + + message Aborted { + + } +} + +message Message { + string id = 1; + string task_id = 11; + string server_message_data = 7; + repeated Citation citations = 8; + + oneof message { + UserQuery user_query = 2; + AgentOutput agent_output = 3; + ToolCall tool_call = 4; + ToolCallResult tool_call_result = 5; + ServerEvent server_event = 6; + SystemQuery system_query = 9; + UpdateTodos update_todos = 10; + } + + message UserQuery { + string query = 1; + InputContext context = 2; + map referenced_attachments = 3; + } + + message SystemQuery { + InputContext context = 2; + + oneof type { + AutoCodeDiff auto_code_diff = 1; + ResumeConversation resume_conversation = 3; + } + } + + message AutoCodeDiff { + string query = 1; + } + + message ResumeConversation { + + } + + message AgentOutput { + string text = 1; + string reasoning = 2; + } + + message ToolCall { + string tool_call_id = 1; + + oneof tool { + RunShellCommand run_shell_command = 2; + SearchCodebase search_codebase = 3; + Server server = 4; + ReadFiles read_files = 5; + ApplyFileDiffs apply_file_diffs = 6; + SuggestPlan suggest_plan = 7; + SuggestCreatePlan suggest_create_plan = 8; + Grep grep = 9; + FileGlob file_glob = 10 [deprecated = true]; + ReadMCPResource read_mcp_resource = 11; + CallMCPTool call_mcp_tool = 12; + WriteToLongRunningShellCommand write_to_long_running_shell_command = 13; + SuggestNewConversation suggest_new_conversation = 14; + FileGlobV2 file_glob_v2 = 15; + } + + message Server { + string payload = 1; + } + + message RunShellCommand { + string command = 1; + bool is_read_only = 2; + bool uses_pager = 3; + repeated Citation citations = 4; + bool is_risky = 5; + } + + message WriteToLongRunningShellCommand { + bytes input = 1; + } + + message SuggestNewConversation { + string message_id = 1; + } + + message ReadFiles { + repeated File files = 1; + message File { + string name = 1; + repeated FileContentLineRange line_ranges = 2; + } + } + + message SearchCodebase { + string query = 1; + repeated string path_filters = 2; + string codebase_path = 3; + } + + message ApplyFileDiffs { + string summary = 1; + + repeated FileDiff diffs = 2; + message FileDiff { + string file_path = 1; + string search = 2; + string replace = 3; + } + + repeated NewFile new_files = 3; + message NewFile { + string file_path = 1; + string content = 2; + } + } + + message SuggestPlan { + string summary = 1; + repeated Task proposed_tasks = 2; + } + + message SuggestCreatePlan { + + } + + message Grep { + repeated string queries = 1; + string path = 2; + } + + message FileGlob { + repeated string patterns = 1; + string path = 2; + } + + message FileGlobV2 { + repeated string patterns = 1; + string search_dir = 2; + int32 max_matches = 3; + int32 max_depth = 4; + int32 min_depth = 5; + } + + message ReadMCPResource { + string uri = 1; + } + + message CallMCPTool { + string name = 1; + google.protobuf.Struct args = 2; + } + } + + message ToolCallResult { + string tool_call_id = 1; + InputContext context = 11; + + oneof result { + RunShellCommandResult run_shell_command = 2; + SearchCodebaseResult search_codebase = 3; + ServerResult server = 4; + ReadFilesResult read_files = 5; + ApplyFileDiffsResult apply_file_diffs = 6; + SuggestPlanResult suggest_plan = 7; + SuggestCreatePlanResult suggest_create_plan = 8; + GrepResult grep = 9; + FileGlobResult file_glob = 10 [deprecated = true]; + RefineResult refine = 13; + google.protobuf.Empty cancel = 14; + ReadMCPResourceResult read_mcp_resource = 15; + CallMCPToolResult call_mcp_tool = 16; + WriteToLongRunningShellCommandResult write_to_long_running_shell_command = 17; + SuggestNewConversationResult suggest_new_conversation = 18; + FileGlobV2Result file_glob_v2 = 19; + } + + message ServerResult { + string serialized_result = 1; + } + + message RefineResult { + UserQuery user_query = 1; + } + } + + message ServerEvent { + string payload = 1; + } + + message UpdateTodos { + oneof operation { + CreateTodoList create_todo_list = 1; + UpdatePendingTodos update_pending_todos = 2; + MarkTodosCompleted mark_todos_completed = 3; + } + } +} + +message RunShellCommandResult { + string command = 3; + string output = 1 [deprecated = true]; + int32 exit_code = 2 [deprecated = true]; + + oneof result { + LongRunningShellCommandSnapshot long_running_command_snapshot = 4; + ShellCommandFinished command_finished = 5; + } +} + +message ReadFilesResult { + oneof result { + Success success = 1; + Error error = 2; + } + + message Success { + repeated FileContent files = 1; + } + + message Error { + string message = 1; + } +} + +message SearchCodebaseResult { + oneof result { + Success success = 1; + Error error = 2; + } + + message Success { + repeated FileContent files = 1; + } + + message Error { + string message = 1; + } +} + +message ApplyFileDiffsResult { + oneof result { + Success success = 1; + Error error = 2; + } + + message Success { + repeated FileContent updated_files = 1 [deprecated = true]; + + repeated UpdatedFileContent updated_files_v2 = 2; + message UpdatedFileContent { + FileContent file = 1; + bool was_edited_by_user = 2; + } + } + + message Error { + string message = 1; + } +} + +message SuggestCreatePlanResult { + bool accepted = 1; +} + +message SuggestPlanResult { + oneof result { + google.protobuf.Empty accepted = 1; + UserEditedPlan user_edited_plan = 2; + } + + message UserEditedPlan { + string plan_text = 1; + } +} + +message GrepResult { + oneof result { + Success success = 1; + Error error = 2; + } + + message Success { + repeated GrepFileMatch matched_files = 1; + message GrepFileMatch { + string file_path = 1; + + repeated GrepLineMatch matched_lines = 2; + message GrepLineMatch { + uint32 line_number = 1; + } + } + } + + message Error { + string message = 1; + } +} + +message FileGlobResult { + oneof result { + Success success = 1; + Error error = 2; + } + + message Success { + string matched_files = 1; + } + + message Error { + string message = 1; + } +} + +message FileGlobV2Result { + oneof result { + Success success = 1; + Error error = 2; + } + + message Success { + repeated FileGlobMatch matched_files = 1; + message FileGlobMatch { + string file_path = 1; + } + } + + message Error { + string message = 1; + } +} + +message MCPResourceContent { + string uri = 1; + + oneof content_type { + Text text = 2; + Binary binary = 3; + } + + message Text { + string content = 1; + string mime_type = 2; + } + + message Binary { + bytes data = 1; + string mime_type = 2; + } +} + +message ReadMCPResourceResult { + oneof result { + Success success = 1; + Error error = 2; + } + + message Success { + repeated MCPResourceContent contents = 1; + } + + message Error { + string message = 1; + } +} + +message WriteToLongRunningShellCommandResult { + oneof result { + LongRunningShellCommandSnapshot long_running_command_snapshot = 1; + ShellCommandFinished command_finished = 2; + } +} + +message SuggestNewConversationResult { + oneof result { + Accepted accepted = 1; + Rejected rejected = 2; + } + + message Accepted { + string message_id = 1; + } + + message Rejected { + + } +} + +message ShellCommandFinished { + string output = 1; + int32 exit_code = 2; +} + +message CallMCPToolResult { + oneof result { + Success success = 1; + Error error = 2; + } + + message Success { + repeated Result results = 1; + message Result { + oneof result { + Text text = 1; + Image image = 2; + MCPResourceContent resource = 3; + } + + message Text { + string text = 1; + } + + message Image { + bytes data = 1; + string mime_type = 2; + } + } + } + + message Error { + string message = 1; + } +} + +enum ToolType { + RUN_SHELL_COMMAND = 0; + SEARCH_CODEBASE = 1; + READ_FILES = 2; + APPLY_FILE_DIFFS = 3; + SUGGEST_PLAN = 4; + SUGGEST_CREATE_PLAN = 5; + GREP = 6; + FILE_GLOB = 7; + READ_MCP_RESOURCE = 8; + CALL_MCP_TOOL = 9; + WRITE_TO_LONG_RUNNING_SHELL_COMMAND = 10; + SUGGEST_NEW_CONVERSATION = 11; + FILE_GLOB_V2 = 12; +} diff --git a/proto/todo.proto b/proto/todo.proto new file mode 100644 index 0000000000000000000000000000000000000000..337ab8ea4ddb12c3416d05f249fbeef432995a6a --- /dev/null +++ b/proto/todo.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package warp.multi_agent.v1; + +option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1"; + +message TodoItem { + string id = 1; + string title = 2; + string description = 3; +} + +message CreateTodoList { + repeated TodoItem initial_todos = 1; +} + +message UpdatePendingTodos { + repeated TodoItem updated_pending_todos = 1; +} + +message MarkTodosCompleted { + repeated string todo_ids = 1; +} diff --git a/protobuf2openai/__init__.py b/protobuf2openai/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..155ec4aa74862735b3895f116c1002833d91accb --- /dev/null +++ b/protobuf2openai/__init__.py @@ -0,0 +1,3 @@ +# Package for converting between Warp protobuf JSON and OpenAI Chat Completions API + +__all__ = [] \ No newline at end of file diff --git a/protobuf2openai/__pycache__/__init__.cpython-312.pyc b/protobuf2openai/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dae535394b81ff30dca68165a8264ff67a77e820 Binary files /dev/null and b/protobuf2openai/__pycache__/__init__.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/app.cpython-312.pyc b/protobuf2openai/__pycache__/app.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3eeea8fb1c1a73bebd9a1f373ad46eefa4258fc4 Binary files /dev/null and b/protobuf2openai/__pycache__/app.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/bridge.cpython-312.pyc b/protobuf2openai/__pycache__/bridge.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2864681b4dc1472af283aa586ac18042ed1e7323 Binary files /dev/null and b/protobuf2openai/__pycache__/bridge.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/config.cpython-312.pyc b/protobuf2openai/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0fad6c7eea2d6efd5bea3f12d3feea2f8c95ab03 Binary files /dev/null and b/protobuf2openai/__pycache__/config.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/helpers.cpython-312.pyc b/protobuf2openai/__pycache__/helpers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f492cdbc2896e2c88c286bcd05f86667db076df8 Binary files /dev/null and b/protobuf2openai/__pycache__/helpers.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/logging.cpython-312.pyc b/protobuf2openai/__pycache__/logging.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..151a236565011df7b2b324cd6f3ca94ba9bed5b0 Binary files /dev/null and b/protobuf2openai/__pycache__/logging.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/models.cpython-312.pyc b/protobuf2openai/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b901f25928d6b2ab285e4eec1d1b3b28d317c34d Binary files /dev/null and b/protobuf2openai/__pycache__/models.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/packets.cpython-312.pyc b/protobuf2openai/__pycache__/packets.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..694a71ad171a16190a313ebea49495912b6e7ecb Binary files /dev/null and b/protobuf2openai/__pycache__/packets.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/reorder.cpython-312.pyc b/protobuf2openai/__pycache__/reorder.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..34eec76ee2b626645ae373382fff67204a5b8fcf Binary files /dev/null and b/protobuf2openai/__pycache__/reorder.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/router.cpython-312.pyc b/protobuf2openai/__pycache__/router.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d749fd35dd891f36a4199e4ec42985d17945e77c Binary files /dev/null and b/protobuf2openai/__pycache__/router.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/sse_transform.cpython-312.pyc b/protobuf2openai/__pycache__/sse_transform.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..51323524a0b01ed34ef6c0124973529bc7d85892 Binary files /dev/null and b/protobuf2openai/__pycache__/sse_transform.cpython-312.pyc differ diff --git a/protobuf2openai/__pycache__/state.cpython-312.pyc b/protobuf2openai/__pycache__/state.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50534e3fb819d66e5d8e3bdf68a1860aa205f23d Binary files /dev/null and b/protobuf2openai/__pycache__/state.cpython-312.pyc differ diff --git a/protobuf2openai/app.py b/protobuf2openai/app.py new file mode 100644 index 0000000000000000000000000000000000000000..decc498a7f19cb5e6656c7d2e31aa5fb6db3555a --- /dev/null +++ b/protobuf2openai/app.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import asyncio +import json +from contextlib import asynccontextmanager + +from fastapi import FastAPI + +from .logging import logger + +from .config import WARMUP_INIT_RETRIES, WARMUP_INIT_DELAY_S +from .bridge import initialize_once +from .router import router + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """应用生命周期管理""" + # 启动时执行 + try: + logger.info("[OpenAI Compat] Server starting with direct module integration") + logger.info("[OpenAI Compat] Endpoints: GET /healthz, GET /v1/models, POST /v1/chat/completions") + except Exception: + pass + + # 移除HTTP健康检查,现在使用直接模块调用 + logger.info("[OpenAI Compat] 跳过HTTP健康检查,使用直接模块集成") + + try: + await initialize_once() + except Exception as e: + logger.warning(f"[OpenAI Compat] Warmup initialize_once on startup failed: {e}") + + yield + # 关闭时执行(如果需要的话) + + +app = FastAPI( + title="OpenAI Chat Completions (Warp bridge) - Streaming", + lifespan=lifespan +) +app.include_router(router) \ No newline at end of file diff --git a/protobuf2openai/bridge.py b/protobuf2openai/bridge.py new file mode 100644 index 0000000000000000000000000000000000000000..95c542aef3162134a75bda8a02e7fe44642123c5 --- /dev/null +++ b/protobuf2openai/bridge.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import json +import time +import uuid +from typing import Any, Dict, Optional + +from .logging import logger + +from .config import ( + WARMUP_INIT_RETRIES, + WARMUP_INIT_DELAY_S, + WARMUP_REQUEST_RETRIES, + WARMUP_REQUEST_DELAY_S, +) +from .packets import packet_template +from .state import STATE, ensure_tool_ids + +# 导入warp2protobuf模块的函数,替代HTTP调用 +from warp2protobuf.core.protobuf_utils import dict_to_protobuf_bytes +from warp2protobuf.warp.api_client import send_protobuf_to_warp_api_parsed +from warp2protobuf.core.schema_sanitizer import sanitize_mcp_input_schema_in_packet + + +async def bridge_send_stream(packet: Dict[str, Any]) -> Dict[str, Any]: + """直接调用warp2protobuf模块,替代HTTP调用""" + try: + logger.info("[OpenAI Compat] Bridge request (direct call)") + logger.info("[OpenAI Compat] Bridge request payload: %s", json.dumps(packet, ensure_ascii=False)) + + # 应用schema清理 + wrapped = {"json_data": packet} + wrapped = sanitize_mcp_input_schema_in_packet(wrapped) + actual_data = wrapped.get("json_data", packet) + + # 转换为protobuf + protobuf_bytes = dict_to_protobuf_bytes(actual_data, "warp.multi_agent.v1.Request") + logger.info(f"[OpenAI Compat] JSON编码为protobuf成功: {len(protobuf_bytes)} 字节") + + # 直接调用API客户端 + response_text, conversation_id, task_id, parsed_events = await send_protobuf_to_warp_api_parsed(protobuf_bytes) + + # 构造响应 + result = { + "response": response_text, + "conversation_id": conversation_id, + "task_id": task_id, + "request_size": len(protobuf_bytes), + "response_size": len(response_text), + "message_type": "warp.multi_agent.v1.Request", + "parsed_events": parsed_events, + "events_count": len(parsed_events), + "events_summary": {} + } + + if parsed_events: + event_type_counts = {} + for event in parsed_events: + event_data = event.get("parsed_data", {}) + event_type = event.get("event_type", "UNKNOWN") + event_type_counts[event_type] = event_type_counts.get(event_type, 0) + 1 + result["events_summary"] = event_type_counts + + logger.info(f"[OpenAI Compat] Bridge调用成功,响应长度: {len(response_text)} 字符,事件数量: {len(parsed_events)}") + return result + + except Exception as e: + import traceback + error_details = { + "error": str(e), + "error_type": type(e).__name__, + "traceback": traceback.format_exc(), + } + logger.error(f"[OpenAI Compat] Bridge调用失败: {e}") + raise Exception(f"bridge_error: {e}") + + +async def initialize_once() -> None: + """初始化函数,现在使用直接调用,无需HTTP健康检查""" + if STATE.conversation_id: + return + + ensure_tool_ids() + + first_task_id = STATE.baseline_task_id or str(uuid.uuid4()) + STATE.baseline_task_id = first_task_id + + # 移除HTTP健康检查,因为现在是直接调用模块函数 + logger.info("[OpenAI Compat] 使用直接模块调用,跳过HTTP健康检查") + + pkt = packet_template() + pkt["task_context"]["active_task_id"] = first_task_id + pkt["input"]["user_inputs"]["inputs"].append({"user_query": {"query": "warmup"}}) + + last_exc: Optional[Exception] = None + for attempt in range(1, WARMUP_REQUEST_RETRIES + 1): + try: + resp = await bridge_send_stream(pkt) + break + except Exception as e: + last_exc = e + logger.warning(f"[OpenAI Compat] Warmup attempt {attempt}/{WARMUP_REQUEST_RETRIES} failed: {e}") + if attempt < WARMUP_REQUEST_RETRIES: + time.sleep(WARMUP_REQUEST_DELAY_S) + else: + raise + + STATE.conversation_id = resp.get("conversation_id") or STATE.conversation_id + ret_task_id = resp.get("task_id") + if isinstance(ret_task_id, str) and ret_task_id: + STATE.baseline_task_id = ret_task_id \ No newline at end of file diff --git a/protobuf2openai/config.py b/protobuf2openai/config.py new file mode 100644 index 0000000000000000000000000000000000000000..e01a3399da4db328747b8b1c1486d33d7651adb9 --- /dev/null +++ b/protobuf2openai/config.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +import os + +# 移除HTTP相关配置,现在使用直接模块调用 +# BRIDGE_BASE_URL = os.getenv("WARP_BRIDGE_URL", "http://127.0.0.1:8000") +# FALLBACK_BRIDGE_URLS = [ +# BRIDGE_BASE_URL, +# "http://127.0.0.1:8000", +# ] + +# 保留重试配置,用于warmup +WARMUP_INIT_RETRIES = int(os.getenv("WARP_COMPAT_INIT_RETRIES", "10")) +WARMUP_INIT_DELAY_S = float(os.getenv("WARP_COMPAT_INIT_DELAY", "0.5")) +WARMUP_REQUEST_RETRIES = int(os.getenv("WARP_COMPAT_WARMUP_RETRIES", "3")) +WARMUP_REQUEST_DELAY_S = float(os.getenv("WARP_COMPAT_WARMUP_DELAY", "1.5")) \ No newline at end of file diff --git a/protobuf2openai/helpers.py b/protobuf2openai/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..080ce50b735f00129cca0a487156da870e2f2973 --- /dev/null +++ b/protobuf2openai/helpers.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from typing import Any, Dict, List + + +def _get(d: Dict[str, Any], *names: str) -> Any: + for n in names: + if isinstance(d, dict) and n in d: + return d[n] + return None + + +def normalize_content_to_list(content: Any) -> List[Dict[str, Any]]: + segments: List[Dict[str, Any]] = [] + try: + if isinstance(content, str): + return [{"type": "text", "text": content}] + if isinstance(content, list): + for item in content: + if isinstance(item, dict): + t = item.get("type") or ("text" if isinstance(item.get("text"), str) else None) + if t == "text" and isinstance(item.get("text"), str): + segments.append({"type": "text", "text": item.get("text")}) + else: + seg: Dict[str, Any] = {} + if t: + seg["type"] = t + if isinstance(item.get("text"), str): + seg["text"] = item.get("text") + if seg: + segments.append(seg) + return segments + if isinstance(content, dict): + if isinstance(content.get("text"), str): + return [{"type": "text", "text": content.get("text")}] + except Exception: + return [] + return [] + + +def segments_to_text(segments: List[Dict[str, Any]]) -> str: + parts: List[str] = [] + for seg in segments: + if isinstance(seg, dict) and seg.get("type") == "text" and isinstance(seg.get("text"), str): + parts.append(seg.get("text") or "") + return "".join(parts) + + +def segments_to_warp_results(segments: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + results: List[Dict[str, Any]] = [] + for seg in segments: + if isinstance(seg, dict) and seg.get("type") == "text" and isinstance(seg.get("text"), str): + results.append({"text": {"text": seg.get("text")}}) + return results \ No newline at end of file diff --git a/protobuf2openai/logging.py b/protobuf2openai/logging.py new file mode 100644 index 0000000000000000000000000000000000000000..2690b511f54d742feb5009b4e46d602a7ac494d0 --- /dev/null +++ b/protobuf2openai/logging.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Local logging for protobuf2openai package to avoid cross-package dependencies. +""" +import logging + +_logger = logging.getLogger("protobuf2openai") +_logger.setLevel(logging.INFO) + +# Remove existing handlers to prevent duplication +for h in _logger.handlers[:]: + _logger.removeHandler(h) + +console_handler = logging.StreamHandler() +console_handler.setLevel(logging.INFO) + +fmt = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s') +console_handler.setFormatter(fmt) + +_logger.addHandler(console_handler) + +logger = _logger \ No newline at end of file diff --git a/protobuf2openai/models.py b/protobuf2openai/models.py new file mode 100644 index 0000000000000000000000000000000000000000..68215bf34e83158384570bbfe11b393dfb8b89df --- /dev/null +++ b/protobuf2openai/models.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +from typing import Any, Dict, List, Optional, Union +from pydantic import BaseModel, Field + + +class ChatMessage(BaseModel): + role: str + content: Optional[Union[str, List[Dict[str, Any]]]] = "" + tool_call_id: Optional[str] = None + tool_calls: Optional[List[Dict[str, Any]]] = None + name: Optional[str] = None + + +class OpenAIFunctionDef(BaseModel): + name: str + description: Optional[str] = None + parameters: Optional[Dict[str, Any]] = None + + +class OpenAITool(BaseModel): + type: str = Field("function", description="Only 'function' is supported") + function: OpenAIFunctionDef + + +class ChatCompletionsRequest(BaseModel): + model: Optional[str] = None + messages: List[ChatMessage] + stream: Optional[bool] = False + tools: Optional[List[OpenAITool]] = None + tool_choice: Optional[Any] = None \ No newline at end of file diff --git a/protobuf2openai/packets.py b/protobuf2openai/packets.py new file mode 100644 index 0000000000000000000000000000000000000000..724cedd9e6ad17ff8efdc3707b15eeee31dfefbd --- /dev/null +++ b/protobuf2openai/packets.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import uuid +from typing import Any, Dict, List, Optional +import json + +from .state import STATE, ensure_tool_ids +from .helpers import normalize_content_to_list, segments_to_text, segments_to_warp_results +from .models import ChatMessage + + +def packet_template() -> Dict[str, Any]: + return { + "task_context": {"active_task_id": ""}, + "input": {"context": {}, "user_inputs": {"inputs": []}}, + "settings": { + "model_config": { + "base": "claude-4.1-opus", + "planning": "gpt-5 (high reasoning)", + "coding": "auto", + }, + "rules_enabled": False, + "web_context_retrieval_enabled": False, + "supports_parallel_tool_calls": False, + "planning_enabled": False, + "warp_drive_context_enabled": False, + "supports_create_files": False, + "use_anthropic_text_editor_tools": False, + "supports_long_running_commands": False, + "should_preserve_file_content_in_history": False, + "supports_todos_ui": False, + "supports_linked_code_blocks": False, + "supported_tools": [9], + }, + "metadata": {"logging": {"is_autodetected_user_query": True, "entrypoint": "USER_INITIATED"}}, + } + + +def map_history_to_warp_messages(history: List[ChatMessage], task_id: str, system_prompt_for_last_user: Optional[str] = None, attach_to_history_last_user: bool = False) -> List[Dict[str, Any]]: + ensure_tool_ids() + msgs: List[Dict[str, Any]] = [] + # Insert server tool_call preamble as first message + msgs.append({ + "id": (STATE.tool_message_id or str(uuid.uuid4())), + "task_id": task_id, + "tool_call": { + "tool_call_id": (STATE.tool_call_id or str(uuid.uuid4())), + "server": {"payload": "IgIQAQ=="}, + }, + }) + + # Determine the last input message index (either last 'user' or last 'tool' with tool_call_id) + last_input_index: Optional[int] = None + for idx in range(len(history) - 1, -1, -1): + _m = history[idx] + if _m.role == "user": + last_input_index = idx + break + if _m.role == "tool" and _m.tool_call_id: + last_input_index = idx + break + + for i, m in enumerate(history): + mid = str(uuid.uuid4()) + # Skip the final input message; it will be placed into input.user_inputs + if (last_input_index is not None) and (i == last_input_index): + continue + if m.role == "user": + user_query_obj: Dict[str, Any] = {"query": segments_to_text(normalize_content_to_list(m.content))} + msgs.append({"id": mid, "task_id": task_id, "user_query": user_query_obj}) + elif m.role == "assistant": + _assistant_text = segments_to_text(normalize_content_to_list(m.content)) + if _assistant_text: + msgs.append({"id": mid, "task_id": task_id, "agent_output": {"text": _assistant_text}}) + for tc in (m.tool_calls or []): + msgs.append({ + "id": str(uuid.uuid4()), + "task_id": task_id, + "tool_call": { + "tool_call_id": tc.get("id") or str(uuid.uuid4()), + "call_mcp_tool": { + "name": (tc.get("function", {}) or {}).get("name", ""), + "args": (json.loads((tc.get("function", {}) or {}).get("arguments", "{}")) if isinstance((tc.get("function", {}) or {}).get("arguments"), str) else (tc.get("function", {}) or {}).get("arguments", {})) or {}, + }, + }, + }) + elif m.role == "tool": + # Preserve tool_result adjacency by placing it directly in task_context + if m.tool_call_id: + msgs.append({ + "id": str(uuid.uuid4()), + "task_id": task_id, + "tool_call_result": { + "tool_call_id": m.tool_call_id, + "call_mcp_tool": { + "success": { + "results": segments_to_warp_results(normalize_content_to_list(m.content)) + } + }, + }, + }) + return msgs + + +def attach_user_and_tools_to_inputs(packet: Dict[str, Any], history: List[ChatMessage], system_prompt_text: Optional[str]) -> None: + # Use the final post-reorder message as input (user or tool result) + if not history: + assert False, "post-reorder 必须至少包含一条消息" + last = history[-1] + if last.role == "user": + user_query_payload: Dict[str, Any] = {"query": segments_to_text(normalize_content_to_list(last.content))} + if system_prompt_text: + user_query_payload["referenced_attachments"] = { + "SYSTEM_PROMPT": { + "plain_text": f"""you are not allowed to call following tools: - `read_files` +- `write_files` +- `run_commands` +- `list_files` +- `str_replace_editor` +- `ask_followup_question` +- `attempt_completion`{system_prompt_text}""" + } + } + packet["input"]["user_inputs"]["inputs"].append({"user_query": user_query_payload}) + return + if last.role == "tool" and last.tool_call_id: + packet["input"]["user_inputs"]["inputs"].append({ + "tool_call_result": { + "tool_call_id": last.tool_call_id, + "call_mcp_tool": { + "success": {"results": segments_to_warp_results(normalize_content_to_list(last.content))} + }, + } + }) + return + # If neither, assert to catch protocol violations + assert False, "post-reorder 最后一条必须是 user 或 tool 结果" \ No newline at end of file diff --git a/protobuf2openai/reorder.py b/protobuf2openai/reorder.py new file mode 100644 index 0000000000000000000000000000000000000000..27c9f577b75e58daf2f202e449a3f085fa37162c --- /dev/null +++ b/protobuf2openai/reorder.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +from typing import Dict, List, Optional +from .models import ChatMessage +from .helpers import normalize_content_to_list, segments_to_text + + +def reorder_messages_for_anthropic(history: List[ChatMessage]) -> List[ChatMessage]: + if not history: + return [] + + expanded: List[ChatMessage] = [] + for m in history: + if m.role == "user": + items = normalize_content_to_list(m.content) + if isinstance(m.content, list) and len(items) > 1: + for seg in items: + if isinstance(seg, dict) and seg.get("type") == "text" and isinstance(seg.get("text"), str): + expanded.append(ChatMessage(role="user", content=seg.get("text"))) + else: + expanded.append(ChatMessage(role="user", content=[seg] if isinstance(seg, dict) else seg)) + else: + expanded.append(m) + elif m.role == "assistant" and m.tool_calls and len(m.tool_calls) > 1: + _assistant_text = segments_to_text(normalize_content_to_list(m.content)) + if _assistant_text: + expanded.append(ChatMessage(role="assistant", content=_assistant_text)) + for tc in (m.tool_calls or []): + expanded.append(ChatMessage(role="assistant", content=None, tool_calls=[tc])) + else: + expanded.append(m) + + last_input_tool_id: Optional[str] = None + last_input_is_tool = False + for m in reversed(expanded): + if m.role == "tool" and m.tool_call_id: + last_input_tool_id = m.tool_call_id + last_input_is_tool = True + break + if m.role == "user": + break + + tool_results_by_id: Dict[str, ChatMessage] = {} + assistant_tc_ids: set[str] = set() + for m in expanded: + if m.role == "tool" and m.tool_call_id and m.tool_call_id not in tool_results_by_id: + tool_results_by_id[m.tool_call_id] = m + if m.role == "assistant" and m.tool_calls: + try: + for tc in (m.tool_calls or []): + _id = (tc or {}).get("id") + if isinstance(_id, str) and _id: + assistant_tc_ids.add(_id) + except Exception: + pass + + result: List[ChatMessage] = [] + trailing_assistant_msg: Optional[ChatMessage] = None + for m in expanded: + if m.role == "tool": + # Preserve unmatched tool results inline + if not m.tool_call_id or m.tool_call_id not in assistant_tc_ids: + result.append(m) + if m.tool_call_id: + tool_results_by_id.pop(m.tool_call_id, None) + continue + if m.role == "assistant" and m.tool_calls: + ids: List[str] = [] + try: + for tc in (m.tool_calls or []): + _id = (tc or {}).get("id") + if isinstance(_id, str) and _id: + ids.append(_id) + except Exception: + pass + + if last_input_is_tool and last_input_tool_id and (last_input_tool_id in ids): + if trailing_assistant_msg is None: + trailing_assistant_msg = m + continue + + result.append(m) + for _id in ids: + tr = tool_results_by_id.pop(_id, None) + if tr is not None: + result.append(tr) + continue + result.append(m) + + if last_input_is_tool and last_input_tool_id and trailing_assistant_msg is not None: + result.append(trailing_assistant_msg) + tr = tool_results_by_id.pop(last_input_tool_id, None) + if tr is not None: + result.append(tr) + + return result \ No newline at end of file diff --git a/protobuf2openai/router.py b/protobuf2openai/router.py new file mode 100644 index 0000000000000000000000000000000000000000..ea5d786637ea143bc96abeb099185b0053d59bcb --- /dev/null +++ b/protobuf2openai/router.py @@ -0,0 +1,207 @@ +from __future__ import annotations + +import asyncio +import json +import time +import uuid +from typing import Any, Dict, List, Optional + +from fastapi import APIRouter, HTTPException +from fastapi.responses import StreamingResponse + +from .logging import logger + +from .models import ChatCompletionsRequest, ChatMessage +from .reorder import reorder_messages_for_anthropic +from .helpers import normalize_content_to_list, segments_to_text +from .packets import packet_template, map_history_to_warp_messages, attach_user_and_tools_to_inputs +from .state import STATE +from .bridge import initialize_once, bridge_send_stream +from .sse_transform import stream_openai_sse + +# 导入warp2protobuf模块,替代HTTP调用 +from warp2protobuf.config.models import get_all_unique_models +from warp2protobuf.core.auth import refresh_jwt_if_needed + + +router = APIRouter() + + +@router.get("/") +def root(): + return {"service": "OpenAI Chat Completions (Warp bridge) - Streaming", "status": "ok"} + + +@router.get("/healthz") +def health_check(): + return {"status": "ok", "service": "OpenAI Chat Completions (Warp bridge) - Streaming"} + + +@router.get("/v1/models") +def list_models(): + """OpenAI-compatible model listing. Direct call to get_all_unique_models.""" + try: + models = get_all_unique_models() + return {"object": "list", "data": models} + except Exception as e: + logger.error(f"❌ 获取模型列表失败: {e}") + raise HTTPException(500, f"获取模型列表失败: {str(e)}") + + +@router.post("/v1/chat/completions") +async def chat_completions(req: ChatCompletionsRequest): + try: + await initialize_once() + except Exception as e: + logger.warning(f"[OpenAI Compat] initialize_once failed or skipped: {e}") + + if not req.messages: + raise HTTPException(400, "messages 不能为空") + + # 1) 打印接收到的 Chat Completions 原始请求体 + try: + logger.info("[OpenAI Compat] 接收到的 Chat Completions 请求体(原始): %s", json.dumps(req.dict(), ensure_ascii=False)) + except Exception: + logger.info("[OpenAI Compat] 接收到的 Chat Completions 请求体(原始) 序列化失败") + + # 整理消息 + history: List[ChatMessage] = reorder_messages_for_anthropic(list(req.messages)) + + # 2) 打印整理后的请求体(post-reorder) + try: + logger.info("[OpenAI Compat] 整理后的请求体(post-reorder): %s", json.dumps({ + **req.dict(), + "messages": [m.dict() for m in history] + }, ensure_ascii=False)) + except Exception: + logger.info("[OpenAI Compat] 整理后的请求体(post-reorder) 序列化失败") + + system_prompt_text: Optional[str] = None + try: + chunks: List[str] = [] + for _m in history: + if _m.role == "system": + _txt = segments_to_text(normalize_content_to_list(_m.content)) + if _txt.strip(): + chunks.append(_txt) + if chunks: + system_prompt_text = "\n\n".join(chunks) + except Exception: + system_prompt_text = None + + task_id = STATE.baseline_task_id or str(uuid.uuid4()) + packet = packet_template() + packet["task_context"] = { + "tasks": [{ + "id": task_id, + "description": "", + "status": {"in_progress": {}}, + "messages": map_history_to_warp_messages(history, task_id, None, False), + }], + "active_task_id": task_id, + } + + packet.setdefault("settings", {}).setdefault("model_config", {}) + packet["settings"]["model_config"]["base"] = req.model or packet["settings"]["model_config"].get("base") or "claude-4.1-opus" + + if STATE.conversation_id: + packet.setdefault("metadata", {})["conversation_id"] = STATE.conversation_id + + attach_user_and_tools_to_inputs(packet, history, system_prompt_text) + + if req.tools: + mcp_tools: List[Dict[str, Any]] = [] + for t in req.tools: + if t.type != "function" or not t.function: + continue + mcp_tools.append({ + "name": t.function.name, + "description": t.function.description or "", + "input_schema": t.function.parameters or {}, + }) + if mcp_tools: + packet.setdefault("mcp_context", {}).setdefault("tools", []).extend(mcp_tools) + + # 3) 打印转换成 protobuf JSON 的请求体(发送到 bridge 的数据包) + try: + logger.info("[OpenAI Compat] 转换成 Protobuf JSON 的请求体: %s", json.dumps(packet, ensure_ascii=False)) + except Exception: + logger.info("[OpenAI Compat] 转换成 Protobuf JSON 的请求体 序列化失败") + + created_ts = int(time.time()) + completion_id = str(uuid.uuid4()) + model_id = req.model or "warp-default" + + if req.stream: + async def _agen(): + async for chunk in stream_openai_sse(packet, completion_id, created_ts, model_id): + yield chunk + return StreamingResponse(_agen(), media_type="text/event-stream", headers={"Cache-Control": "no-cache", "Connection": "keep-alive"}) + + try: + bridge_resp = await bridge_send_stream(packet) + except Exception as e: + # 如果是429错误(配额用尽),尝试刷新JWT + if "429" in str(e): + try: + await refresh_jwt_if_needed() + logger.warning("[OpenAI Compat] Tried JWT refresh after 429 error") + bridge_resp = await bridge_send_stream(packet) + except Exception as _e: + logger.warning("[OpenAI Compat] JWT refresh attempt failed after 429: %s", _e) + raise HTTPException(429, f"bridge_error: {e}") + else: + raise HTTPException(502, f"bridge_error: {e}") + + try: + STATE.conversation_id = bridge_resp.get("conversation_id") or STATE.conversation_id + ret_task_id = bridge_resp.get("task_id") + if isinstance(ret_task_id, str) and ret_task_id: + STATE.baseline_task_id = ret_task_id + except Exception: + pass + + tool_calls: List[Dict[str, Any]] = [] + try: + parsed_events = bridge_resp.get("parsed_events", []) or [] + for ev in parsed_events: + evd = ev.get("parsed_data") or ev.get("raw_data") or {} + client_actions = evd.get("client_actions") or evd.get("clientActions") or {} + actions = client_actions.get("actions") or client_actions.get("Actions") or [] + for action in actions: + add_msgs = action.get("add_messages_to_task") or action.get("addMessagesToTask") or {} + if not isinstance(add_msgs, dict): + continue + for message in add_msgs.get("messages", []) or []: + tc = message.get("tool_call") or message.get("toolCall") or {} + call_mcp = tc.get("call_mcp_tool") or tc.get("callMcpTool") or {} + if isinstance(call_mcp, dict) and call_mcp.get("name"): + try: + args_obj = call_mcp.get("args", {}) or {} + args_str = json.dumps(args_obj, ensure_ascii=False) + except Exception: + args_str = "{}" + tool_calls.append({ + "id": tc.get("tool_call_id") or str(uuid.uuid4()), + "type": "function", + "function": {"name": call_mcp.get("name"), "arguments": args_str}, + }) + except Exception: + pass + + if tool_calls: + msg_payload = {"role": "assistant", "content": "", "tool_calls": tool_calls} + finish_reason = "tool_calls" + else: + response_text = bridge_resp.get("response", "") + msg_payload = {"role": "assistant", "content": response_text} + finish_reason = "stop" + + final = { + "id": completion_id, + "object": "chat.completion", + "created": created_ts, + "model": model_id, + "choices": [{"index": 0, "message": msg_payload, "finish_reason": finish_reason}], + } + return final \ No newline at end of file diff --git a/protobuf2openai/sse_transform.py b/protobuf2openai/sse_transform.py new file mode 100644 index 0000000000000000000000000000000000000000..a8e34f8f4554fd8864ac5d973f33af82903504e2 --- /dev/null +++ b/protobuf2openai/sse_transform.py @@ -0,0 +1,297 @@ +from __future__ import annotations + +import json +import uuid +from typing import Any, AsyncGenerator, Dict + +from .logging import logger +from .helpers import _get + +# 导入内部模块,替代HTTP调用 +from warp2protobuf.core.protobuf_utils import dict_to_protobuf_bytes +from warp2protobuf.warp.api_client import send_protobuf_to_warp_api_parsed +import httpx +import os +from warp2protobuf.core.protobuf_utils import protobuf_to_dict +from warp2protobuf.core.auth import get_valid_jwt, acquire_anonymous_access_token, refresh_jwt_if_needed +from warp2protobuf.config.settings import WARP_URL as CONFIG_WARP_URL +from warp2protobuf.core.schema_sanitizer import sanitize_mcp_input_schema_in_packet + + +def _get_event_type(event_data: dict) -> str: + """获取事件类型""" + if "init" in event_data: + return "INIT" + elif "client_actions" in event_data or "clientActions" in event_data: + return "CLIENT_ACTIONS" + elif "finished" in event_data: + return "FINISHED" + else: + return "UNKNOWN_EVENT" + + +async def stream_openai_sse(packet: Dict[str, Any], completion_id: str, created_ts: int, model_id: str) -> AsyncGenerator[str, None]: + """使用直接模块调用实现的SSE流处理,替代HTTP调用""" + try: + # 发出首个OpenAI格式的SSE事件 + first = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": created_ts, + "model": model_id, + "choices": [{"index": 0, "delta": {"role": "assistant"}}], + } + logger.info("[OpenAI Compat] 转换后的 SSE(emit): %s", json.dumps(first, ensure_ascii=False)) + yield f"data: {json.dumps(first, ensure_ascii=False)}\n\n" + + # 应用schema清理 + wrapped = {"json_data": packet} + wrapped = sanitize_mcp_input_schema_in_packet(wrapped) + actual_data = wrapped.get("json_data", packet) + + # 转换为protobuf + protobuf_bytes = dict_to_protobuf_bytes(actual_data, "warp.multi_agent.v1.Request") + logger.info(f"[OpenAI Compat] JSON编码为protobuf成功: {len(protobuf_bytes)} 字节") + + tool_calls_emitted = False + + try: + # 直接处理SSE流,实时返回事件 + warp_url = CONFIG_WARP_URL + + verify_opt = True + insecure_env = os.getenv("WARP_INSECURE_TLS", "").lower() + if insecure_env in ("1", "true", "yes"): + verify_opt = False + logger.warning("TLS verification disabled via WARP_INSECURE_TLS for OpenAI SSE streaming") + + async with httpx.AsyncClient(http2=True, timeout=httpx.Timeout(60.0), verify=verify_opt, trust_env=True) as client: + # 最多尝试两次:第一次失败且为401/429时尝试刷新token并重试一次 + for attempt in range(2): + jwt = await get_valid_jwt() if attempt == 0 else jwt + headers = { + "accept": "text/event-stream", + "content-type": "application/x-protobuf", + "x-warp-client-version": "v0.2025.08.06.08.12.stable_02", + "x-warp-os-category": "Windows", + "x-warp-os-name": "Windows", + "x-warp-os-version": "11 (26100)", + "authorization": f"Bearer {jwt}", + "content-length": str(len(protobuf_bytes)), + } + + async with client.stream("POST", warp_url, headers=headers, content=protobuf_bytes) as response: + if response.status_code != 200: + error_text = await response.aread() + error_content = error_text.decode('utf-8') if error_text else "No error content" + + # 检测JWT token无效错误并在第一次失败时尝试刷新token + if response.status_code == 401 and attempt == 0: + logger.warning("WARP API 返回 401 (token无效, OpenAI SSE)。尝试刷新JWT token并重试一次…") + try: + refresh_success = await refresh_jwt_if_needed() + if refresh_success: + jwt = await get_valid_jwt() + logger.info("JWT token刷新成功,重试API调用 (OpenAI SSE)") + continue + else: + logger.warning("JWT token刷新失败,尝试申请匿名token (OpenAI SSE)") + new_jwt = await acquire_anonymous_access_token() + if new_jwt: + jwt = new_jwt + continue + except Exception as e: + logger.warning(f"JWT token刷新异常 (OpenAI SSE): {e}") + + # 检测配额耗尽错误并在第一次失败时尝试获取新token + elif response.status_code == 429 and attempt == 0 and ( + ("No remaining quota" in error_content) or ("No AI requests remaining" in error_content) + ): + logger.warning("WARP API 返回 429 (配额用尽, OpenAI SSE)。删除当前token并重新获取…") + try: + from warp2protobuf.core.auth import remove_token_from_pool + remove_token_from_pool(jwt) + new_jwt = await get_valid_jwt() + if new_jwt and new_jwt != jwt: + jwt = new_jwt + continue + except Exception as e: + logger.warning(f"获取新token失败 (OpenAI SSE): {e}") + + # 其他错误或第二次失败 + logger.error(f"WARP API HTTP ERROR (OpenAI SSE) {response.status_code}: {error_content}") + raise Exception(f"Warp API Error (HTTP {response.status_code}): {error_content}") + + logger.info(f"✅ 收到HTTP {response.status_code}响应 (OpenAI SSE)") + logger.info("开始实时处理SSE事件流...") + + import re as _re + def _parse_payload_bytes(data_str: str): + s = _re.sub(r"\s+", "", data_str or "") + if not s: + return None + if _re.fullmatch(r"[0-9a-fA-F]+", s or ""): + try: + return bytes.fromhex(s) + except Exception: + pass + pad = "=" * ((4 - (len(s) % 4)) % 4) + try: + import base64 as _b64 + return _b64.urlsafe_b64decode(s + pad) + except Exception: + try: + return _b64.b64decode(s + pad) + except Exception: + return None + + current_data = "" + event_count = 0 + + async for line in response.aiter_lines(): + if line.startswith("data:"): + payload = line[5:].strip() + if not payload: + continue + if payload == "[DONE]": + logger.info("收到[DONE]标记,结束处理") + break + current_data += payload + continue + + if (line.strip() == "") and current_data: + raw_bytes = _parse_payload_bytes(current_data) + current_data = "" + if raw_bytes is None: + continue + + try: + event_data = protobuf_to_dict(raw_bytes, "warp.multi_agent.v1.ResponseEvent") + event_count += 1 + event_type = _get_event_type(event_data) + logger.info(f"🔄 实时处理 Event #{event_count}: {event_type}") + + # 实时处理每个事件 + async for chunk in _process_single_event(event_data, completion_id, created_ts, model_id, tool_calls_emitted): + if chunk.get("tool_calls_emitted"): + tool_calls_emitted = True + if chunk.get("sse_data"): + yield chunk["sse_data"] + + except Exception as parse_err: + logger.debug(f"解析事件失败,跳过: {str(parse_err)[:100]}") + continue + + logger.info(f"✅ 实时流处理完成,共处理 {event_count} 个事件") + break # 成功处理,跳出重试循环 + + except Exception as e: + logger.error(f"[OpenAI Compat] Stream processing failed: {e}") + raise e + + # 发出完成标记 + logger.info("[OpenAI Compat] 转换后的 SSE(emit): [DONE]") + yield "data: [DONE]\n\n" + + except Exception as e: + logger.error(f"[OpenAI Compat] Stream processing failed: {e}") + error_chunk = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": created_ts, + "model": model_id, + "choices": [{"index": 0, "delta": {}, "finish_reason": "error"}], + "error": {"message": str(e)}, + } + logger.info("[OpenAI Compat] 转换后的 SSE(emit error): %s", json.dumps(error_chunk, ensure_ascii=False)) + yield f"data: {json.dumps(error_chunk, ensure_ascii=False)}\n\n" + yield "data: [DONE]\n\n" + + +async def _process_single_event(event_data: dict, completion_id: str, created_ts: int, model_id: str, tool_calls_emitted: bool) -> AsyncGenerator[dict, None]: + """处理单个事件并生成SSE数据""" + if "init" in event_data: + return + + # 处理完成事件 + if "finished" in event_data: + done_chunk = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": created_ts, + "model": model_id, + "choices": [{"index": 0, "delta": {}, "finish_reason": ("tool_calls" if tool_calls_emitted else "stop")}], + } + logger.info("[OpenAI Compat] 转换后的 SSE(emit done): %s", json.dumps(done_chunk, ensure_ascii=False)) + yield {"sse_data": f"data: {json.dumps(done_chunk, ensure_ascii=False)}\n\n"} + return + + client_actions = _get(event_data, "client_actions", "clientActions") + if isinstance(client_actions, dict): + actions = _get(client_actions, "actions", "Actions") or [] + for action in actions: + # 处理文本追加 + append_data = _get(action, "append_to_message_content", "appendToMessageContent") + if isinstance(append_data, dict): + message = append_data.get("message", {}) + agent_output = _get(message, "agent_output", "agentOutput") or {} + text_content = agent_output.get("text", "") + if text_content: + delta = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": created_ts, + "model": model_id, + "choices": [{"index": 0, "delta": {"content": text_content}}], + } + logger.info("[OpenAI Compat] 转换后的 SSE(emit): %s", json.dumps(delta, ensure_ascii=False)) + yield {"sse_data": f"data: {json.dumps(delta, ensure_ascii=False)}\n\n"} + + # 处理消息添加 + messages_data = _get(action, "add_messages_to_task", "addMessagesToTask") + if isinstance(messages_data, dict): + messages = messages_data.get("messages", []) + for message in messages: + # 处理工具调用 + tool_call = _get(message, "tool_call", "toolCall") or {} + call_mcp = _get(tool_call, "call_mcp_tool", "callMcpTool") or {} + if isinstance(call_mcp, dict) and call_mcp.get("name"): + try: + args_obj = call_mcp.get("args", {}) or {} + args_str = json.dumps(args_obj, ensure_ascii=False) + except Exception: + args_str = "{}" + tool_call_id = tool_call.get("tool_call_id") or str(uuid.uuid4()) + delta = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": created_ts, + "model": model_id, + "choices": [{ + "index": 0, + "delta": { + "tool_calls": [{ + "index": 0, + "id": tool_call_id, + "type": "function", + "function": {"name": call_mcp.get("name"), "arguments": args_str}, + }] + } + }], + } + logger.info("[OpenAI Compat] 转换后的 SSE(emit tool_calls): %s", json.dumps(delta, ensure_ascii=False)) + yield {"sse_data": f"data: {json.dumps(delta, ensure_ascii=False)}\n\n", "tool_calls_emitted": True} + else: + # 处理文本消息 + agent_output = _get(message, "agent_output", "agentOutput") or {} + text_content = agent_output.get("text", "") + if text_content: + delta = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": created_ts, + "model": model_id, + "choices": [{"index": 0, "delta": {"content": text_content}}], + } + logger.info("[OpenAI Compat] 转换后的 SSE(emit): %s", json.dumps(delta, ensure_ascii=False)) + yield {"sse_data": f"data: {json.dumps(delta, ensure_ascii=False)}\n\n"} \ No newline at end of file diff --git a/protobuf2openai/state.py b/protobuf2openai/state.py new file mode 100644 index 0000000000000000000000000000000000000000..95b4c9ab0aef5abd2268374f6cddb6518fa1a3de --- /dev/null +++ b/protobuf2openai/state.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +import uuid +from typing import Optional +from pydantic import BaseModel + + +class BridgeState(BaseModel): + conversation_id: Optional[str] = None + baseline_task_id: Optional[str] = None + tool_call_id: Optional[str] = None + tool_message_id: Optional[str] = None + + +STATE = BridgeState() + +# Initialize tool ids lazily when needed + +def ensure_tool_ids(): + if not STATE.tool_call_id: + STATE.tool_call_id = str(uuid.uuid4()) + if not STATE.tool_message_id: + STATE.tool_message_id = str(uuid.uuid4()) \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..4b5e2d4ca0eb5fb28310e2da217e51a68e7564c9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,24 @@ +[project] +name = "warptestui" +version = "0.1.0" +description = "Test UI for Warp" +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "fastapi[standard]", + "uvicorn[standard]", + "httpx[http2]", + "protobuf", + "grpcio-tools", + "python-dotenv", + "websockets>=15.0.1", + "requests>=2.32.5", + "openai>=1.106.0", +] + +[project.scripts] +warp-server = "server:main" + +[[tool.uv.index]] +url = "https://mirrors.ustc.edu.cn/pypi/simple" +default = true diff --git a/server.py b/server.py new file mode 100644 index 0000000000000000000000000000000000000000..f895cae3173f2ba7203761af6f294889973385f7 --- /dev/null +++ b/server.py @@ -0,0 +1,357 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Warp Protobuf 桥接服务器启动文件 + +提供 Warp API 桥接端点(/api/warp/send_stream、/api/warp/send_stream_sse)、/healthz 与 /v1/models。已移除 GUI、静态文件与 WebSocket 功能。 +""" + +import os +import asyncio +import json +from pathlib import Path + +import uvicorn +from fastapi import FastAPI, HTTPException +from contextlib import asynccontextmanager +# 新增:类型导入 +from typing import Any, Dict, List + +from warp2protobuf.api.protobuf_routes import app as protobuf_app +from warp2protobuf.core.logging import logger + +from warp2protobuf.core.auth import acquire_anonymous_access_token +from warp2protobuf.config.models import get_all_unique_models + +# 导入OpenAI兼容路由 +from protobuf2openai.router import router as openai_router + + +# ============= JSON Schema 清理函数已移至 warp2protobuf.core.schema_sanitizer 模块 ============= + + +# ============= 应用创建 ============= + +def create_app() -> FastAPI: + """创建FastAPI应用""" + + # 使用protobuf路由的应用作为主应用 + app = protobuf_app + + # 挂载OpenAI兼容路由 + app.include_router(openai_router) + + # 挂载输入 schema 清理中间件(覆盖 Warp 相关端点) + + + + + # ============= OpenAI 兼容:模型列表接口(通过router提供) ============= + + return app + + +def create_app_with_lifespan() -> FastAPI: + """创建带有lifespan事件处理的FastAPI应用""" + app = FastAPI( + title="Warp Protobuf Bridge Server", + description="Warp API 桥接服务器,提供 Protobuf 编解码与 OpenAI 兼容接口", + version="1.0.0", + lifespan=lifespan + ) + + # 添加CORS中间件 + from fastapi.middleware.cors import CORSMiddleware + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + # 挂载子应用 + app.mount("/api", protobuf_app) + + # 包含OpenAI兼容路由 + app.include_router(openai_router) + + # ============= 根路径与健康检查 ============= + @app.get("/") + async def root(): + return { + "service": "Warp Protobuf Bridge Server", + "status": "running", + "endpoints": { + "health": "/healthz", + "models": "/v1/models", + "protobuf_bridge": "/api/warp/send_stream", + "sse_bridge": "/api/warp/send_stream_sse", + "auth_refresh": "/api/auth/refresh" + } + } + + @app.get("/healthz") + async def health_check(): + return {"status": "ok", "service": "Warp Protobuf Bridge Server"} + + return app + + +############################################################ +# server_message_data 深度编解码工具 +############################################################ + +# 说明: +# 根据抓包与分析,server_message_data 是 Base64URL 编码的 proto3 消息: +# - 字段 1:string(通常为 36 字节 UUID) +# - 字段 3:google.protobuf.Timestamp(字段1=seconds,字段2=nanos) +# 可能出现:仅 Timestamp、仅 UUID、或 UUID + Timestamp。 + +from typing import Dict, Optional, Tuple +import base64 +from datetime import datetime, timezone +try: + from zoneinfo import ZoneInfo # Python 3.9+ +except Exception: + ZoneInfo = None # type: ignore + + +def _b64url_decode_padded(s: str) -> bytes: + t = s.replace("-", "+").replace("_", "/") + pad = (-len(t)) % 4 + if pad: + t += "=" * pad + return base64.b64decode(t) + + +def _b64url_encode_nopad(b: bytes) -> str: + return base64.urlsafe_b64encode(b).decode("ascii").rstrip("=") + + +def _read_varint(buf: bytes, i: int) -> Tuple[int, int]: + shift = 0 + val = 0 + while i < len(buf): + b = buf[i] + i += 1 + val |= (b & 0x7F) << shift + if not (b & 0x80): + return val, i + shift += 7 + if shift > 63: + break + raise ValueError("invalid varint") + + +def _write_varint(v: int) -> bytes: + out = bytearray() + vv = int(v) + while True: + to_write = vv & 0x7F + vv >>= 7 + if vv: + out.append(to_write | 0x80) + else: + out.append(to_write) + break + return bytes(out) + + +def _make_key(field_no: int, wire_type: int) -> bytes: + return _write_varint((field_no << 3) | wire_type) + + +def _decode_timestamp(buf: bytes) -> Tuple[Optional[int], Optional[int]]: + # google.protobuf.Timestamp: field 1 = seconds (int64 varint), field 2 = nanos (int32 varint) + i = 0 + seconds: Optional[int] = None + nanos: Optional[int] = None + while i < len(buf): + key, i = _read_varint(buf, i) + field_no = key >> 3 + wt = key & 0x07 + if wt == 0: # varint + val, i = _read_varint(buf, i) + if field_no == 1: + seconds = int(val) + elif field_no == 2: + nanos = int(val) + elif wt == 2: # length-delimited (not expected inside Timestamp) + ln, i2 = _read_varint(buf, i) + i = i2 + ln + elif wt == 1: + i += 8 + elif wt == 5: + i += 4 + else: + break + return seconds, nanos + + +def _encode_timestamp(seconds: Optional[int], nanos: Optional[int]) -> bytes: + parts = bytearray() + if seconds is not None: + parts += _make_key(1, 0) # field 1, varint + parts += _write_varint(int(seconds)) + if nanos is not None: + parts += _make_key(2, 0) # field 2, varint + parts += _write_varint(int(nanos)) + return bytes(parts) + + +def decode_server_message_data(b64url: str) -> Dict: + """解码 Base64URL 的 server_message_data,返回结构化信息。""" + try: + raw = _b64url_decode_padded(b64url) + except Exception as e: + return {"error": f"base64url decode failed: {e}", "raw_b64url": b64url} + + i = 0 + uuid: Optional[str] = None + seconds: Optional[int] = None + nanos: Optional[int] = None + + while i < len(raw): + key, i = _read_varint(raw, i) + field_no = key >> 3 + wt = key & 0x07 + if wt == 2: # length-delimited + ln, i2 = _read_varint(raw, i) + i = i2 + data = raw[i:i+ln] + i += ln + if field_no == 1: # uuid string + try: + uuid = data.decode("utf-8") + except Exception: + uuid = None + elif field_no == 3: # google.protobuf.Timestamp + seconds, nanos = _decode_timestamp(data) + elif wt == 0: # varint -> not expected, skip + _, i = _read_varint(raw, i) + elif wt == 1: + i += 8 + elif wt == 5: + i += 4 + else: + break + + out: Dict[str, Any] = {} + if uuid is not None: + out["uuid"] = uuid + if seconds is not None: + out["seconds"] = seconds + if nanos is not None: + out["nanos"] = nanos + return out + + +def encode_server_message_data(uuid: str = None, seconds: int = None, nanos: int = None) -> str: + """将 uuid/seconds/nanos 组合编码为 Base64URL 字符串。""" + parts = bytearray() + if uuid: + b = uuid.encode("utf-8") + parts += _make_key(1, 2) # field 1, length-delimited + parts += _write_varint(len(b)) + parts += b + + if seconds is not None or nanos is not None: + ts = _encode_timestamp(seconds, nanos) + parts += _make_key(3, 2) # field 3, length-delimited + parts += _write_varint(len(ts)) + parts += ts + + return _b64url_encode_nopad(bytes(parts)) + + +async def startup_tasks(): + """启动时执行的任务""" + logger.info("="*60) + logger.info("Warp Protobuf编解码服务器启动") + logger.info("="*60) + + # 检查protobuf运行时 + try: + from warp2protobuf.core.protobuf import ensure_proto_runtime + ensure_proto_runtime() + logger.info("✅ Protobuf运行时初始化成功") + except Exception as e: + logger.error(f"❌ Protobuf运行时初始化失败: {e}") + raise + + # 检查JWT token + try: + from warp2protobuf.core.auth import get_jwt_token, is_token_expired, refresh_jwt_if_needed + token = get_jwt_token() + if token and not is_token_expired(token): + logger.info("✅ JWT token有效") + elif not token: + logger.warning("⚠️ 未找到JWT token,尝试申请匿名访问token用于额度初始化…") + try: + new_token = await acquire_anonymous_access_token() + if new_token: + logger.info("✅ 匿名访问token申请成功") + else: + logger.warning("⚠️ 匿名访问token申请失败") + except Exception as e2: + logger.warning(f"⚠️ 匿名访问token申请异常: {e2}") + else: + logger.warning("⚠️ JWT token无效或已过期,尝试自动刷新…") + try: + refresh_success = await refresh_jwt_if_needed() + if refresh_success: + logger.info("✅ JWT token自动刷新成功") + else: + logger.warning("⚠️ JWT token自动刷新失败,建议手动运行: uv run refresh_jwt.py") + except Exception as e3: + logger.warning(f"⚠️ JWT token自动刷新异常: {e3},建议手动运行: uv run refresh_jwt.py") + except Exception as e: + logger.warning(f"⚠️ JWT检查失败: {e}") + + # OpenAI 兼容层已集成到当前服务器中 + + # 显示可用端点 + logger.info("-"*40) + logger.info("可用的API端点:") + logger.info(" GET / - 服务信息") + logger.info(" GET /healthz - 健康检查") + logger.info(" GET /v1/models - 模型列表(OpenAI兼容)") + logger.info(" POST /api/warp/send_stream - Warp API 转发(返回解析事件)") + logger.info(" POST /api/warp/send_stream_sse - Warp API 转发(实时SSE)") + logger.info(" POST /api/auth/refresh - 刷新JWT token") + logger.info("="*60) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """应用生命周期管理""" + # 启动时执行 + await startup_tasks() + yield + # 关闭时执行(如果需要的话) + + +def main(): + """主函数""" + # 创建应用(使用lifespan) + app = create_app_with_lifespan() + + # 启动服务器 + try: + uvicorn.run( + app, + host="0.0.0.0", + port=8000, + log_level="info", + access_log=True + ) + except KeyboardInterrupt: + logger.info("服务器被用户停止") + except Exception as e: + logger.error(f"服务器启动失败: {e}") + raise + + +if __name__ == "__main__": + main() diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000000000000000000000000000000000000..db6c24c72060b7d4b4a6aaccd1442d97047c219a --- /dev/null +++ b/uv.lock @@ -0,0 +1,863 @@ +version = 1 +revision = 1 +requires-python = ">=3.13" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "anyio" +version = "4.10.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213 }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.3" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175 }, +] + +[[package]] +name = "click" +version = "8.2.1" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 }, +] + +[[package]] +name = "email-validator" +version = "2.2.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521 }, +] + +[[package]] +name = "fastapi" +version = "0.116.1" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631 }, +] + +[package.optional-dependencies] +standard = [ + { name = "email-validator" }, + { name = "fastapi-cli", extra = ["standard"] }, + { name = "httpx" }, + { name = "jinja2" }, + { name = "python-multipart" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cli" +version = "0.0.8" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "rich-toolkit" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/c6/94/3ef75d9c7c32936ecb539b9750ccbdc3d2568efd73b1cb913278375f4533/fastapi_cli-0.0.8.tar.gz", hash = "sha256:2360f2989b1ab4a3d7fc8b3a0b20e8288680d8af2e31de7c38309934d7f8a0ee", size = 16884 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e0/3f/6ad3103c5f59208baf4c798526daea6a74085bb35d1c161c501863470476/fastapi_cli-0.0.8-py3-none-any.whl", hash = "sha256:0ea95d882c85b9219a75a65ab27e8da17dac02873e456850fa0a726e96e985eb", size = 10770 }, +] + +[package.optional-dependencies] +standard = [ + { name = "fastapi-cloud-cli" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cloud-cli" +version = "0.1.5" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "httpx" }, + { name = "pydantic", extra = ["email"] }, + { name = "rich-toolkit" }, + { name = "rignore" }, + { name = "sentry-sdk" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/a9/2e/3b6e5016affc310e5109bc580f760586eabecea0c8a7ab067611cd849ac0/fastapi_cloud_cli-0.1.5.tar.gz", hash = "sha256:341ee585eb731a6d3c3656cb91ad38e5f39809bf1a16d41de1333e38635a7937", size = 22710 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e5/a6/5aa862489a2918a096166fd98d9fe86b7fd53c607678b3fa9d8c432d88d5/fastapi_cloud_cli-0.1.5-py3-none-any.whl", hash = "sha256:d80525fb9c0e8af122370891f9fa83cf5d496e4ad47a8dd26c0496a6c85a012a", size = 18992 }, +] + +[[package]] +name = "grpcio" +version = "1.74.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d4/d8/1004a5f468715221450e66b051c839c2ce9a985aa3ee427422061fcbb6aa/grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89", size = 5449488 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/94/0e/33731a03f63740d7743dced423846c831d8e6da808fcd02821a4416df7fa/grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01", size = 10974059 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/0d/c6/3d2c14d87771a421205bdca991467cfe473ee4c6a1231c1ede5248c62ab8/grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e", size = 5945647 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/c5/83/5a354c8aaff58594eef7fffebae41a0f8995a6258bbc6809b800c33d4c13/grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91", size = 6626101 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/3f/ca/4fdc7bf59bf6994aa45cbd4ef1055cd65e2884de6113dbd49f75498ddb08/grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249", size = 6182562 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/fd/48/2869e5b2c1922583686f7ae674937986807c2f676d08be70d0a541316270/grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362", size = 6303425 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/a6/0e/bac93147b9a164f759497bc6913e74af1cb632c733c7af62c0336782bd38/grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f", size = 6996533 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/84/35/9f6b2503c1fd86d068b46818bbd7329db26a87cdd8c01e0d1a9abea1104c/grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20", size = 6491489 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/75/33/a04e99be2a82c4cbc4039eb3a76f6c3632932b9d5d295221389d10ac9ca7/grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa", size = 3805811 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/34/80/de3eb55eb581815342d097214bed4c59e806b05f1b3110df03b2280d6dfd/grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24", size = 4489214 }, +] + +[[package]] +name = "grpcio-tools" +version = "1.74.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/90/c8/bca79cb8c14bb63027831039919c801db9f593c7504c09433934f5dff6a4/grpcio_tools-1.74.0.tar.gz", hash = "sha256:88ab9eb18b6ac1b4872add6b394073bd8d44eee7c32e4dc60a022e25ffaffb95", size = 5390007 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/2f/65/307a72cf4bfa553a25e284bd1f27b94a53816ac01ddf432c398117b91b2a/grpcio_tools-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e2e22460355adbd0f25fdd7ed8b9ae53afb3875b9d5f34cdf1cf12559418245e", size = 2545750 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/5b/8e/9b2217c15baadc7cfca3eba9f980e147452ca82f41767490f619edea3489/grpcio_tools-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:0cab5a2c6ae75b555fee8a1a9a9b575205171e1de392fe2d4139a29e67d8f5bb", size = 5838169 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ea/42/a6a158b7e91c0a358cddf3f9088b004c2bfa42d1f96154b9b8eb17e16d73/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:9b18afca48b55832402a716ea4634ef2b68927a8a17ddf4038f51812299255c9", size = 2517140 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/05/db/d4576a07b2d1211822a070f76a99a9f4f4cb63496a02964ce77c88df8a28/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85f442a9e89e276bf89a0c9c76ea71647a927d967759333c1fa40300c27f7bd", size = 2905214 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/77/dc/3713e75751f862d8c84f823ba935d486c0aac0b6f789fa61fbde04ad5019/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051ce925b0b99ae2daf61b3cba19962b8655cc2a72758ce4081b89272206f5a3", size = 2656245 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/bd/e4/01f9e8e0401d8e11a70ae8aff6899eb8c16536f69a0a9ffb25873588721c/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:98c7b8eb0de6984cd7fa7335ce3383b3bb9a1559edc238c811df88008d5d3593", size = 3052327 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/28/c2/264b4e705375a834c9c7462847ae435c0be1644f03a705d3d7464af07bd5/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f8f7d17b7573b9a2a6b4183fa4a56a2ab17370c8d0541e1424cf0c9c6f863434", size = 3500706 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/c0/cc034cec5871a1918e7888e8ce700e06fab5bbb328f998a2f2750cd603b5/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:db08b91ea0cd66dc4b1b929100e7aa84c9c10c51573c8282ec1ba05b41f887ef", size = 3125098 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/69/55/5792b681af82b3ff1e50ce0ccfbb6d52fc68a13932ed3da57e58d7dfb67b/grpcio_tools-1.74.0-cp313-cp313-win32.whl", hash = "sha256:4b6c5efb331ae9e5f614437f4a5938459a8a5a1ab3dfe133d2bbdeaba39b894d", size = 992431 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/94/9f/626f0fe6bfc1c6917785c6a5ee2eb8c07b5a30771e4bf4cff3c1ab5b431b/grpcio_tools-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8324cd67f61f7900d227b36913ee5f0302ba3ba8777c8bc705afa8174098d28", size = 1157064 }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, +] + +[[package]] +name = "h2" +version = "4.2.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957 }, +] + +[[package]] +name = "hpack" +version = "4.1.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357 }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, +] + +[[package]] +name = "httptools" +version = "0.6.4" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682 }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, +] + +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] + +[[package]] +name = "hyperframe" +version = "6.1.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, +] + +[[package]] +name = "jiter" +version = "0.10.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/2e/b0/279597e7a270e8d22623fea6c5d4eeac328e7d95c236ed51a2b884c54f70/jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644", size = 311617 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/91/e3/0916334936f356d605f54cc164af4060e3e7094364add445a3bc79335d46/jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a", size = 318947 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/6a/8e/fd94e8c02d0e94539b7d669a7ebbd2776e51f329bb2c84d4385e8063a2ad/jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6", size = 344618 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/6f/b0/f9f0a2ec42c6e9c2e61c327824687f1e2415b767e1089c1d9135f43816bd/jiter-0.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:554dedfd05937f8fc45d17ebdf298fe7e0c77458232bcb73d9fbbf4c6455f5b3", size = 368829 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e8/57/5bbcd5331910595ad53b9fd0c610392ac68692176f05ae48d6ce5c852967/jiter-0.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc299da7789deacf95f64052d97f75c16d4fc8c4c214a22bf8d859a4288a1c2", size = 491034 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/9b/be/c393df00e6e6e9e623a73551774449f2f23b6ec6a502a3297aeeece2c65a/jiter-0.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5161e201172de298a8a1baad95eb85db4fb90e902353b1f6a41d64ea64644e25", size = 388529 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/42/3e/df2235c54d365434c7f150b986a6e35f41ebdc2f95acea3036d99613025d/jiter-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2227db6ba93cb3e2bf67c87e594adde0609f146344e8207e8730364db27041", size = 350671 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/c6/77/71b0b24cbcc28f55ab4dbfe029f9a5b73aeadaba677843fc6dc9ed2b1d0a/jiter-0.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15acb267ea5e2c64515574b06a8bf393fbfee6a50eb1673614aa45f4613c0cca", size = 390864 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/6a/d3/ef774b6969b9b6178e1d1e7a89a3bd37d241f3d3ec5f8deb37bbd203714a/jiter-0.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:901b92f2e2947dc6dfcb52fd624453862e16665ea909a08398dde19c0731b7f4", size = 522989 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/0c/41/9becdb1d8dd5d854142f45a9d71949ed7e87a8e312b0bede2de849388cb9/jiter-0.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d0cb9a125d5a3ec971a094a845eadde2db0de85b33c9f13eb94a0c63d463879e", size = 513495 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/9c/36/3468e5a18238bdedae7c4d19461265b5e9b8e288d3f86cd89d00cbb48686/jiter-0.10.0-cp313-cp313-win32.whl", hash = "sha256:48a403277ad1ee208fb930bdf91745e4d2d6e47253eedc96e2559d1e6527006d", size = 211289 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/07/1c96b623128bcb913706e294adb5f768fb7baf8db5e1338ce7b4ee8c78ef/jiter-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:75f9eb72ecb640619c29bf714e78c9c46c9c4eaafd644bf78577ede459f330d4", size = 205074 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/54/46/caa2c1342655f57d8f0f2519774c6d67132205909c65e9aa8255e1d7b4f4/jiter-0.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:28ed2a4c05a1f32ef0e1d24c2611330219fed727dae01789f4a335617634b1ca", size = 318225 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/43/84/c7d44c75767e18946219ba2d703a5a32ab37b0bc21886a97bc6062e4da42/jiter-0.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a4c418b1ec86a195f1ca69da8b23e8926c752b685af665ce30777233dfe070", size = 350235 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/01/16/f5a0135ccd968b480daad0e6ab34b0c7c5ba3bc447e5088152696140dcb3/jiter-0.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d7bfed2fe1fe0e4dda6ef682cee888ba444b21e7a6553e03252e4feb6cf0adca", size = 207278 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/1c/9b/1d646da42c3de6c2188fdaa15bce8ecb22b635904fc68be025e21249ba44/jiter-0.10.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5e9251a5e83fab8d87799d3e1a46cb4b7f2919b895c6f4483629ed2446f66522", size = 310866 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ad/0e/26538b158e8a7c7987e94e7aeb2999e2e82b1f9d2e1f6e9874ddf71ebda0/jiter-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:023aa0204126fe5b87ccbcd75c8a0d0261b9abdbbf46d55e7ae9f8e22424eeb8", size = 318772 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/7b/fb/d302893151caa1c2636d6574d213e4b34e31fd077af6050a9c5cbb42f6fb/jiter-0.10.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c189c4f1779c05f75fc17c0c1267594ed918996a231593a21a5ca5438445216", size = 344534 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/01/d8/5780b64a149d74e347c5128d82176eb1e3241b1391ac07935693466d6219/jiter-0.10.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15720084d90d1098ca0229352607cd68256c76991f6b374af96f36920eae13c4", size = 369087 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e8/5b/f235a1437445160e777544f3ade57544daf96ba7e96c1a5b24a6f7ac7004/jiter-0.10.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f2fb68e5f1cfee30e2b2a09549a00683e0fde4c6a2ab88c94072fc33cb7426", size = 490694 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/85/a9/9c3d4617caa2ff89cf61b41e83820c27ebb3f7b5fae8a72901e8cd6ff9be/jiter-0.10.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce541693355fc6da424c08b7edf39a2895f58d6ea17d92cc2b168d20907dee12", size = 388992 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/68/b1/344fd14049ba5c94526540af7eb661871f9c54d5f5601ff41a959b9a0bbd/jiter-0.10.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31c50c40272e189d50006ad5c73883caabb73d4e9748a688b216e85a9a9ca3b9", size = 351723 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/41/89/4c0e345041186f82a31aee7b9d4219a910df672b9fef26f129f0cda07a29/jiter-0.10.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa3402a2ff9815960e0372a47b75c76979d74402448509ccd49a275fa983ef8a", size = 392215 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/55/58/ee607863e18d3f895feb802154a2177d7e823a7103f000df182e0f718b38/jiter-0.10.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:1956f934dca32d7bb647ea21d06d93ca40868b505c228556d3373cbd255ce853", size = 522762 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/15/d0/9123fb41825490d16929e73c212de9a42913d68324a8ce3c8476cae7ac9d/jiter-0.10.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:fcedb049bdfc555e261d6f65a6abe1d5ad68825b7202ccb9692636c70fcced86", size = 513427 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d8/b3/2bd02071c5a2430d0b70403a34411fc519c2f227da7b03da9ba6a956f931/jiter-0.10.0-cp314-cp314-win32.whl", hash = "sha256:ac509f7eccca54b2a29daeb516fb95b6f0bd0d0d8084efaf8ed5dfc7b9f0b357", size = 210127 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/03/0c/5fe86614ea050c3ecd728ab4035534387cd41e7c1855ef6c031f1ca93e3f/jiter-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ed975b83a2b8639356151cef5c0d597c68376fc4922b45d0eb384ac058cfa00", size = 318527 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/b3/4a/4175a563579e884192ba6e81725fc0448b042024419be8d83aa8a80a3f44/jiter-0.10.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa96f2abba33dc77f79b4cf791840230375f9534e5fac927ccceb58c5e604a5", size = 354213 }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "openai" +version = "1.106.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/11/3a/ef6c6842ea4df48453f5ff7b624178273d9137acb318afba3872a5f3da49/openai-1.106.0.tar.gz", hash = "sha256:8c5ae2ae61a619cd8ba22aeda8fdff00428280041eff5be5555287634ea6f460", size = 561133 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/01/66/465e4e8095becd1cd8c0a32283d530e7866f434031eafdc93cc1f04869d7/openai-1.106.0-py3-none-any.whl", hash = "sha256:47bf9d07df203cd2b7f90ac2da84aea40340dbdebb2da2f4f70e3a133c605d57", size = 930767 }, +] + +[[package]] +name = "protobuf" +version = "6.32.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/c0/df/fb4a8eeea482eca989b51cffd274aac2ee24e825f0bf3cbce5281fa1567b/protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2", size = 440614 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/33/18/df8c87da2e47f4f1dcc5153a81cd6bca4e429803f4069a299e236e4dd510/protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741", size = 424409 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e1/59/0a820b7310f8139bd8d5a9388e6a38e1786d179d6f33998448609296c229/protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e", size = 435735 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/cc/5b/0d421533c59c789e9c9894683efac582c06246bf24bb26b753b149bd88e4/protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0", size = 426449 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ec/7b/607764ebe6c7a23dcee06e054fd1de3d5841b7648a90fd6def9a3bb58c5e/protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1", size = 322869 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/40/01/2e730bd1c25392fc32e3268e02446f0d77cb51a2c3a8486b1798e34d5805/protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c", size = 322009 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/9c/f2/80ffc4677aac1bc3519b26bc7f7f5de7fce0ee2f7e36e59e27d8beb32dd1/protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783", size = 169287 }, +] + +[[package]] +name = "pydantic" +version = "2.11.7" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782 }, +] + +[package.optional-dependencies] +email = [ + { name = "email-validator" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556 }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 }, +] + +[[package]] +name = "rich" +version = "14.1.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368 }, +] + +[[package]] +name = "rich-toolkit" +version = "0.15.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/65/36/cdb3d51371ad0cccbf1541506304783bd72d55790709b8eb68c0d401a13a/rich_toolkit-0.15.0.tar.gz", hash = "sha256:3f5730e9f2d36d0bfe01cf723948b7ecf4cc355d2b71e2c00e094f7963128c09", size = 115118 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/75/e4/b0794eefb3cf78566b15e5bf576492c1d4a92ce5f6da55675bc11e9ef5d8/rich_toolkit-0.15.0-py3-none-any.whl", hash = "sha256:ddb91008283d4a7989fd8ff0324a48773a7a2276229c6a3070755645538ef1bb", size = 29062 }, +] + +[[package]] +name = "rignore" +version = "0.6.4" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/73/46/05a94dc55ac03cf931d18e43b86ecee5ee054cb88b7853fffd741e35009c/rignore-0.6.4.tar.gz", hash = "sha256:e893fdd2d7fdcfa9407d0b7600ef2c2e2df97f55e1c45d4a8f54364829ddb0ab", size = 11633 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/db/a3/edd7d0d5cc0720de132b6651cef95ee080ce5fca11c77d8a47db848e5f90/rignore-0.6.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2b3b1e266ce45189240d14dfa1057f8013ea34b9bc8b3b44125ec8d25fdb3985", size = 885304 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/93/a1/d8d2fb97a6548307507d049b7e93885d4a0dfa1c907af5983fd9f9362a21/rignore-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45fe803628cc14714df10e8d6cdc23950a47eb9eb37dfea9a4779f4c672d2aa0", size = 818799 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/b1/cd/949981fcc180ad5ba7b31c52e78b74b2dea6b7bf744ad4c0c4b212f6da78/rignore-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e439f034277a947a4126e2da79dbb43e33d73d7c09d3d72a927e02f8a16f59aa", size = 892024 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/b0/d3/9042d701a8062d9c88f87760bbc2695ee2c23b3f002d34486b72a85f8efe/rignore-0.6.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84b5121650ae24621154c7bdba8b8970b0739d8146505c9f38e0cda9385d1004", size = 871430 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/eb/50/3370249b984212b7355f3d9241aa6d02e706067c6d194a2614dfbc0f5b27/rignore-0.6.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52b0957b585ab48a445cf8ac1dbc33a272ab060835e583b4f95aa8c67c23fb2b", size = 1160559 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/6c/6f/2ad7f925838091d065524f30a8abda846d1813eee93328febf262b5cda21/rignore-0.6.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50359e0d5287b5e2743bd2f2fbf05df619c8282fd3af12f6628ff97b9675551d", size = 939947 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/1f/01/626ec94d62475ae7ef8b00ef98cea61cbea52a389a666703c97c4673d406/rignore-0.6.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efe18096dcb1596757dfe0b412aab6d32564473ae7ee58dea0a8b4be5b1a2e3b", size = 949471 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e8/c3/699c4f03b3c46f4b5c02f17a0a339225da65aad547daa5b03001e7c6a382/rignore-0.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b79c212d9990a273ad91e8d9765e1766ef6ecedd3be65375d786a252762ba385", size = 974912 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/cd/35/04626c12f9f92a9fc789afc2be32838a5d9b23b6fa8b2ad4a8625638d15b/rignore-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6ffa7f2a8894c65aa5dc4e8ac8bbdf39a326c0c6589efd27686cfbb48f0197d", size = 1067281 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/9c/8f17baf3b984afea151cb9094716f6f1fb8e8737db97fc6eb6d494bd0780/rignore-0.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a63f5720dffc8d8fb0a4d02fafb8370a4031ebf3f99a4e79f334a91e905b7349", size = 1134414 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/10/88/ef84ffa916a96437c12cefcc39d474122da9626d75e3a2ebe09ec5d32f1b/rignore-0.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ce33982da47ac5dc09d19b04fa8d7c9aa6292fc0bd1ecf33076989faa8886094", size = 1109330 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/27/43/2ada5a2ec03b82e903610a1c483f516f78e47700ee6db9823f739e08b3af/rignore-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d899621867aa266824fbd9150e298f19d25b93903ef0133c09f70c65a3416eca", size = 1120381 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/3b/99/e7bcc643085131cb14dbea772def72bf1f6fe9037171ebe177c4f228abc8/rignore-0.6.4-cp313-cp313-win32.whl", hash = "sha256:d0615a6bf4890ec5a90b5fb83666822088fbd4e8fcd740c386fcce51e2f6feea", size = 641761 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d9/25/7798908044f27dea1a8abdc75c14523e33770137651e5f775a15143f4218/rignore-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:145177f0e32716dc2f220b07b3cde2385b994b7ea28d5c96fbec32639e9eac6f", size = 719876 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/b4/e3/ae1e30b045bf004ad77bbd1679b9afff2be8edb166520921c6f29420516a/rignore-0.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e55bf8f9bbd186f58ab646b4a08718c77131d28a9004e477612b0cbbd5202db2", size = 891776 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/45/a9/1193e3bc23ca0e6eb4f17cf4b99971237f97cfa6f241d98366dff90a6d09/rignore-0.6.4-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2521f7bf3ee1f2ab22a100a3a4eed39a97b025804e5afe4323528e9ce8f084a5", size = 871442 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/20/83/4c52ae429a0b2e1ce667e35b480e9a6846f9468c443baeaed5d775af9485/rignore-0.6.4-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cc35773a8a9c119359ef974d0856988d4601d4daa6f532c05f66b4587cf35bc", size = 1159844 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/c1/2f/c740f5751f464c937bfe252dc15a024ae081352cfe80d94aa16d6a617482/rignore-0.6.4-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b665b1ea14457d7b49e834baabc635a3b8c10cfb5cca5c21161fabdbfc2b850e", size = 939456 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/fc/dd/68dbb08ac0edabf44dd144ff546a3fb0253c5af708e066847df39fc9188f/rignore-0.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c7fd339f344a8548724f289495b835bed7b81174a0bc1c28c6497854bd8855db", size = 1067070 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/3b/3a/7e7ea6f0d31d3f5beb0f2cf2c4c362672f5f7f125714458673fc579e2bed/rignore-0.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:91dc94b1cc5af8d6d25ce6edd29e7351830f19b0a03b75cb3adf1f76d00f3007", size = 1134598 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/06/1b3307f6437d29bede5a95738aa89e6d910ba68d4054175c9f60d8e2c6b1/rignore-0.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4d1918221a249e5342b60fd5fa513bf3d6bf272a8738e66023799f0c82ecd788", size = 1108862 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/b0/d5/b37c82519f335f2c472a63fc6215c6f4c51063ecf3166e3acf508011afbd/rignore-0.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:240777332b859dc89dcba59ab6e3f1e062bc8e862ffa3e5f456e93f7fd5cb415", size = 1120002 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ac/72/2f05559ed5e69bdfdb56ea3982b48e6c0017c59f7241f7e1c5cae992b347/rignore-0.6.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b0e548753e55cc648f1e7b02d9f74285fe48bb49cec93643d31e563773ab3f", size = 949454 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/0b/92/186693c8f838d670510ac1dfb35afbe964320fbffb343ba18f3d24441941/rignore-0.6.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6971ac9fdd5a0bd299a181096f091c4f3fd286643adceba98eccc03c688a6637", size = 974663 }, +] + +[[package]] +name = "sentry-sdk" +version = "2.35.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/31/83/055dc157b719651ef13db569bb8cf2103df11174478649735c1b2bf3f6bc/sentry_sdk-2.35.0.tar.gz", hash = "sha256:5ea58d352779ce45d17bc2fa71ec7185205295b83a9dbb5707273deb64720092", size = 343014 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/36/3d/742617a7c644deb0c1628dcf6bb2d2165ab7c6aab56fe5222758994007f8/sentry_sdk-2.35.0-py2.py3-none-any.whl", hash = "sha256:6e0c29b9a5d34de8575ffb04d289a987ff3053cf2c98ede445bea995e3830263", size = 363806 }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486 }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "starlette" +version = "0.47.2" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/04/57/d062573f391d062710d4088fa1369428c38d51460ab6fedff920efef932e/starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8", size = 2583948 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/f7/1f/b876b1f83aef204198a42dc101613fefccb32258e5428b5f9259677864b4/starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b", size = 72984 }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, +] + +[[package]] +name = "typer" +version = "0.16.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317 }, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906 }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552 }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795 }, +] + +[[package]] +name = "uvicorn" +version = "0.35.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406 }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.21.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 }, +] + +[[package]] +name = "warptestui" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "fastapi", extra = ["standard"] }, + { name = "grpcio-tools" }, + { name = "httpx", extra = ["http2"] }, + { name = "openai" }, + { name = "protobuf" }, + { name = "python-dotenv" }, + { name = "requests" }, + { name = "uvicorn", extra = ["standard"] }, + { name = "websockets" }, +] + +[package.metadata] +requires-dist = [ + { name = "fastapi", extras = ["standard"] }, + { name = "grpcio-tools" }, + { name = "httpx", extras = ["http2"] }, + { name = "openai", specifier = ">=1.106.0" }, + { name = "protobuf" }, + { name = "python-dotenv" }, + { name = "requests", specifier = ">=2.32.5" }, + { name = "uvicorn", extras = ["standard"] }, + { name = "websockets", specifier = ">=15.0.1" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.0" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d3/42/fae874df96595556a9089ade83be34a2e04f0f11eb53a8dbf8a8a5e562b4/watchfiles-1.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5007f860c7f1f8df471e4e04aaa8c43673429047d63205d1630880f7637bca30", size = 402004 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/fa/55/a77e533e59c3003d9803c09c44c3651224067cbe7fb5d574ddbaa31e11ca/watchfiles-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:20ecc8abbd957046f1fe9562757903f5eaf57c3bce70929fda6c7711bb58074a", size = 393671 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/05/68/b0afb3f79c8e832e6571022611adbdc36e35a44e14f129ba09709aa4bb7a/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2f0498b7d2a3c072766dba3274fe22a183dbea1f99d188f1c6c72209a1063dc", size = 449772 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ff/05/46dd1f6879bc40e1e74c6c39a1b9ab9e790bf1f5a2fe6c08b463d9a807f4/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:239736577e848678e13b201bba14e89718f5c2133dfd6b1f7846fa1b58a8532b", size = 456789 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/8b/ca/0eeb2c06227ca7f12e50a47a3679df0cd1ba487ea19cf844a905920f8e95/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eff4b8d89f444f7e49136dc695599a591ff769300734446c0a86cba2eb2f9895", size = 482551 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/31/47/2cecbd8694095647406645f822781008cc524320466ea393f55fe70eed3b/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12b0a02a91762c08f7264e2e79542f76870c3040bbc847fb67410ab81474932a", size = 597420 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d9/7e/82abc4240e0806846548559d70f0b1a6dfdca75c1b4f9fa62b504ae9b083/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29e7bc2eee15cbb339c68445959108803dc14ee0c7b4eea556400131a8de462b", size = 477950 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/25/0d/4d564798a49bf5482a4fa9416dea6b6c0733a3b5700cb8a5a503c4b15853/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9481174d3ed982e269c090f780122fb59cee6c3796f74efe74e70f7780ed94c", size = 451706 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/81/b5/5516cf46b033192d544102ea07c65b6f770f10ed1d0a6d388f5d3874f6e4/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:80f811146831c8c86ab17b640801c25dc0a88c630e855e2bef3568f30434d52b", size = 625814 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/0c/dd/7c1331f902f30669ac3e754680b6edb9a0dd06dea5438e61128111fadd2c/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:60022527e71d1d1fda67a33150ee42869042bce3d0fcc9cc49be009a9cded3fb", size = 622820 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/1b/14/36d7a8e27cd128d7b1009e7715a7c02f6c131be9d4ce1e5c3b73d0e342d8/watchfiles-1.1.0-cp313-cp313-win32.whl", hash = "sha256:32d6d4e583593cb8576e129879ea0991660b935177c0f93c6681359b3654bfa9", size = 279194 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/25/41/2dd88054b849aa546dbeef5696019c58f8e0774f4d1c42123273304cdb2e/watchfiles-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:f21af781a4a6fbad54f03c598ab620e3a77032c5878f3d780448421a6e1818c7", size = 292349 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/c8/cf/421d659de88285eb13941cf11a81f875c176f76a6d99342599be88e08d03/watchfiles-1.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:5366164391873ed76bfdf618818c82084c9db7fac82b64a20c44d335eec9ced5", size = 283836 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/45/10/6faf6858d527e3599cc50ec9fcae73590fbddc1420bd4fdccfebffeedbc6/watchfiles-1.1.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:17ab167cca6339c2b830b744eaf10803d2a5b6683be4d79d8475d88b4a8a4be1", size = 400343 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/03/20/5cb7d3966f5e8c718006d0e97dfe379a82f16fecd3caa7810f634412047a/watchfiles-1.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:328dbc9bff7205c215a7807da7c18dce37da7da718e798356212d22696404339", size = 392916 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/8c/07/d8f1176328fa9e9581b6f120b017e286d2a2d22ae3f554efd9515c8e1b49/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7208ab6e009c627b7557ce55c465c98967e8caa8b11833531fdf95799372633", size = 449582 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/66/e8/80a14a453cf6038e81d072a86c05276692a1826471fef91df7537dba8b46/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8f6f72974a19efead54195bc9bed4d850fc047bb7aa971268fd9a8387c89011", size = 456752 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/5a/25/0853b3fe0e3c2f5af9ea60eb2e781eade939760239a72c2d38fc4cc335f6/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d181ef50923c29cf0450c3cd47e2f0557b62218c50b2ab8ce2ecaa02bd97e670", size = 481436 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/9e/4af0056c258b861fbb29dcb36258de1e2b857be4a9509e6298abcf31e5c9/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adb4167043d3a78280d5d05ce0ba22055c266cf8655ce942f2fb881262ff3cdf", size = 596016 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/c5/fa/95d604b58aa375e781daf350897aaaa089cff59d84147e9ccff2447c8294/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c5701dc474b041e2934a26d31d39f90fac8a3dee2322b39f7729867f932b1d4", size = 476727 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/65/95/fe479b2664f19be4cf5ceeb21be05afd491d95f142e72d26a42f41b7c4f8/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b067915e3c3936966a8607f6fe5487df0c9c4afb85226613b520890049deea20", size = 451864 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d3/8a/3c4af14b93a15ce55901cd7a92e1a4701910f1768c78fb30f61d2b79785b/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:9c733cda03b6d636b4219625a4acb5c6ffb10803338e437fb614fef9516825ef", size = 625626 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/da/f5/cf6aa047d4d9e128f4b7cde615236a915673775ef171ff85971d698f3c2c/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:cc08ef8b90d78bfac66f0def80240b0197008e4852c9f285907377b2947ffdcb", size = 622744 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/2c/00/70f75c47f05dea6fd30df90f047765f6fc2d6eb8b5a3921379b0b04defa2/watchfiles-1.1.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9974d2f7dc561cce3bb88dfa8eb309dab64c729de85fba32e98d75cf24b66297", size = 402114 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/53/03/acd69c48db4a1ed1de26b349d94077cca2238ff98fd64393f3e97484cae6/watchfiles-1.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c68e9f1fcb4d43798ad8814c4c1b61547b014b667216cb754e606bfade587018", size = 393879 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/2f/c8/a9a2a6f9c8baa4eceae5887fecd421e1b7ce86802bcfc8b6a942e2add834/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95ab1594377effac17110e1352989bdd7bdfca9ff0e5eeccd8c69c5389b826d0", size = 450026 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/51/d572260d98388e6e2b967425c985e07d47ee6f62e6455cefb46a6e06eda5/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fba9b62da882c1be1280a7584ec4515d0a6006a94d6e5819730ec2eab60ffe12", size = 457917 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/c6/2d/4258e52917bf9f12909b6ec314ff9636276f3542f9d3807d143f27309104/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3434e401f3ce0ed6b42569128b3d1e3af773d7ec18751b918b89cd49c14eaafb", size = 483602 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/84/99/bee17a5f341a4345fe7b7972a475809af9e528deba056f8963d61ea49f75/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa257a4d0d21fcbca5b5fcba9dca5a78011cb93c0323fb8855c6d2dfbc76eb77", size = 596758 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/40/76/e4bec1d59b25b89d2b0716b41b461ed655a9a53c60dc78ad5771fda5b3e6/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fd1b3879a578a8ec2076c7961076df540b9af317123f84569f5a9ddee64ce92", size = 477601 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/1f/fa/a514292956f4a9ce3c567ec0c13cce427c158e9f272062685a8a727d08fc/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62cc7a30eeb0e20ecc5f4bd113cd69dcdb745a07c68c0370cea919f373f65d9e", size = 451936 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/32/5d/c3bf927ec3bbeb4566984eba8dd7a8eb69569400f5509904545576741f88/watchfiles-1.1.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:891c69e027748b4a73847335d208e374ce54ca3c335907d381fde4e41661b13b", size = 626243 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e6/65/6e12c042f1a68c556802a84d54bb06d35577c81e29fba14019562479159c/watchfiles-1.1.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:12fe8eaffaf0faa7906895b4f8bb88264035b3f0243275e0bf24af0436b27259", size = 623073 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/89/ab/7f79d9bf57329e7cbb0a6fd4c7bd7d0cee1e4a8ef0041459f5409da3506c/watchfiles-1.1.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:bfe3c517c283e484843cb2e357dd57ba009cff351edf45fb455b5fbd1f45b15f", size = 400872 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/df/d5/3f7bf9912798e9e6c516094db6b8932df53b223660c781ee37607030b6d3/watchfiles-1.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a9ccbf1f129480ed3044f540c0fdbc4ee556f7175e5ab40fe077ff6baf286d4e", size = 392877 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/0d/c5/54ec7601a2798604e01c75294770dbee8150e81c6e471445d7601610b495/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba0e3255b0396cac3cc7bbace76404dd72b5438bf0d8e7cefa2f79a7f3649caa", size = 449645 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/0a/04/c2f44afc3b2fce21ca0b7802cbd37ed90a29874f96069ed30a36dfe57c2b/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4281cd9fce9fc0a9dbf0fc1217f39bf9cf2b4d315d9626ef1d4e87b84699e7e8", size = 457424 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/9f/b0/eec32cb6c14d248095261a04f290636da3df3119d4040ef91a4a50b29fa5/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d2404af8db1329f9a3c9b79ff63e0ae7131986446901582067d9304ae8aaf7f", size = 481584 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/d1/e2/ca4bb71c68a937d7145aa25709e4f5d68eb7698a25ce266e84b55d591bbd/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e78b6ed8165996013165eeabd875c5dfc19d41b54f94b40e9fff0eb3193e5e8e", size = 596675 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/a1/dd/b0e4b7fb5acf783816bc950180a6cd7c6c1d2cf7e9372c0ea634e722712b/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:249590eb75ccc117f488e2fabd1bfa33c580e24b96f00658ad88e38844a040bb", size = 477363 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/69/c4/088825b75489cb5b6a761a4542645718893d395d8c530b38734f19da44d2/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05686b5487cfa2e2c28ff1aa370ea3e6c5accfe6435944ddea1e10d93872147", size = 452240 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/10/8c/22b074814970eeef43b7c44df98c3e9667c1f7bf5b83e0ff0201b0bd43f9/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d0e10e6f8f6dc5762adee7dece33b722282e1f59aa6a55da5d493a97282fedd8", size = 625607 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/32/fa/a4f5c2046385492b2273213ef815bf71a0d4c1943b784fb904e184e30201/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:af06c863f152005c7592df1d6a7009c836a247c9d8adb78fef8575a5a98699db", size = 623315 }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" } +sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 } +wheels = [ + { url = "https://mirrors.ustc.edu.cn/pypi/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837 }, + { url = "https://mirrors.ustc.edu.cn/pypi/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 }, +] diff --git a/warp2protobuf/__init__.py b/warp2protobuf/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1992bb18b0699dc3debede12c0f74d5a1766f2e9 --- /dev/null +++ b/warp2protobuf/__init__.py @@ -0,0 +1,4 @@ +# Re-exported compatibility package for legacy src.* modules +# This package proxies to existing code under src to enable gradual migration. + +__all__ = [] \ No newline at end of file diff --git a/warp2protobuf/__pycache__/__init__.cpython-312.pyc b/warp2protobuf/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af37550d1291906451a61a79c34d2a36d307caa8 Binary files /dev/null and b/warp2protobuf/__pycache__/__init__.cpython-312.pyc differ diff --git a/warp2protobuf/__pycache__/__init__.cpython-313.pyc b/warp2protobuf/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..91b95973e1b01ef994f0554f21f910a4d22b9134 Binary files /dev/null and b/warp2protobuf/__pycache__/__init__.cpython-313.pyc differ diff --git a/warp2protobuf/api/__init__.py b/warp2protobuf/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..859f102362b5e3985fa0a95450761475688ca1c0 --- /dev/null +++ b/warp2protobuf/api/__init__.py @@ -0,0 +1,3 @@ +# API subpackage for warp2protobuf + +__all__ = [] \ No newline at end of file diff --git a/warp2protobuf/api/__pycache__/__init__.cpython-312.pyc b/warp2protobuf/api/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..34ff90766bdfa8272975959cd520cfeb701bd3df Binary files /dev/null and b/warp2protobuf/api/__pycache__/__init__.cpython-312.pyc differ diff --git a/warp2protobuf/api/__pycache__/__init__.cpython-313.pyc b/warp2protobuf/api/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..42b9b3c8a4f3489044b4609c3cb3e9818385e7ad Binary files /dev/null and b/warp2protobuf/api/__pycache__/__init__.cpython-313.pyc differ diff --git a/warp2protobuf/api/__pycache__/protobuf_routes.cpython-312.pyc b/warp2protobuf/api/__pycache__/protobuf_routes.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd90705db869f86a023ad17777a0187a96dc6438 Binary files /dev/null and b/warp2protobuf/api/__pycache__/protobuf_routes.cpython-312.pyc differ diff --git a/warp2protobuf/api/__pycache__/protobuf_routes.cpython-313.pyc b/warp2protobuf/api/__pycache__/protobuf_routes.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..912e93e2b9c5157fec1d62da350b22f982501fd6 Binary files /dev/null and b/warp2protobuf/api/__pycache__/protobuf_routes.cpython-313.pyc differ diff --git a/warp2protobuf/api/protobuf_routes.py b/warp2protobuf/api/protobuf_routes.py new file mode 100644 index 0000000000000000000000000000000000000000..081ba418f95875ade439371befb64bc02dd4cd55 --- /dev/null +++ b/warp2protobuf/api/protobuf_routes.py @@ -0,0 +1,409 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Warp API 桥接路由 + +提供 /healthz、/api/warp/send_stream、/api/warp/send_stream_sse 等最小桥接端点与 JWT 管理。 +""" +import json + +import asyncio +import httpx +from typing import Any, Dict, List, Optional +from datetime import datetime + +from fastapi import FastAPI, HTTPException + +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel + +from ..core.logging import logger +from ..core.protobuf_utils import protobuf_to_dict, dict_to_protobuf_bytes +from ..core.auth import get_jwt_token, refresh_jwt_if_needed, is_token_expired, get_valid_jwt, acquire_anonymous_access_token + + +from ..config.settings import CLIENT_VERSION, OS_CATEGORY, OS_NAME, OS_VERSION, WARP_URL as CONFIG_WARP_URL +from ..core.server_message_data import decode_server_message_data, encode_server_message_data + + +def _encode_smd_inplace(obj: Any) -> Any: + if isinstance(obj, dict): + new_d = {} + for k, v in obj.items(): + if k in ("server_message_data", "serverMessageData") and isinstance(v, dict): + try: + b64 = encode_server_message_data( + uuid=v.get("uuid"), + seconds=v.get("seconds"), + nanos=v.get("nanos"), + ) + new_d[k] = b64 + except Exception: + new_d[k] = v + else: + new_d[k] = _encode_smd_inplace(v) + return new_d + elif isinstance(obj, list): + return [_encode_smd_inplace(x) for x in obj] + else: + return obj + + +def _decode_smd_inplace(obj: Any) -> Any: + if isinstance(obj, dict): + new_d = {} + for k, v in obj.items(): + if k in ("server_message_data", "serverMessageData") and isinstance(v, str): + try: + dec = decode_server_message_data(v) + new_d[k] = dec + except Exception: + new_d[k] = v + else: + new_d[k] = _decode_smd_inplace(v) + return new_d + elif isinstance(obj, list): + return [_decode_smd_inplace(x) for x in obj] + else: + return obj +from ..core.schema_sanitizer import sanitize_mcp_input_schema_in_packet + + +class EncodeRequest(BaseModel): + json_data: Optional[Dict[str, Any]] = None + message_type: str = "warp.multi_agent.v1.Request" + + task_context: Optional[Dict[str, Any]] = None + input: Optional[Dict[str, Any]] = None + settings: Optional[Dict[str, Any]] = None + metadata: Optional[Dict[str, Any]] = None + mcp_context: Optional[Dict[str, Any]] = None + existing_suggestions: Optional[Dict[str, Any]] = None + client_version: Optional[str] = None + os_category: Optional[str] = None + os_name: Optional[str] = None + os_version: Optional[str] = None + + class Config: + extra = "allow" + + def get_data(self) -> Dict[str, Any]: + if self.json_data is not None: + return self.json_data + else: + data: Dict[str, Any] = {} + if self.task_context is not None: + data["task_context"] = self.task_context + if self.input is not None: + data["input"] = self.input + if self.settings is not None: + data["settings"] = self.settings + if self.metadata is not None: + data["metadata"] = self.metadata + if self.mcp_context is not None: + data["mcp_context"] = self.mcp_context + if self.existing_suggestions is not None: + data["existing_suggestions"] = self.existing_suggestions + if self.client_version is not None: + data["client_version"] = self.client_version + if self.os_category is not None: + data["os_category"] = self.os_category + if self.os_name is not None: + data["os_name"] = self.os_name + if self.os_version is not None: + data["os_version"] = self.os_version + + skip_keys = { + "json_data", "message_type", "task_context", "input", "settings", "metadata", + "mcp_context", "existing_suggestions", "client_version", "os_category", "os_name", "os_version" + } + try: + for k, v in self.__dict__.items(): + if v is None: + continue + if k in skip_keys: + continue + if k not in data: + data[k] = v + except Exception: + pass + return data + + + + + + + + + + +app = FastAPI(title="Warp Protobuf编解码服务器", version="1.0.0") +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +@app.get("/") +async def root(): + return {"message": "Warp Protobuf编解码服务器", "version": "1.0.0"} + + +@app.get("/healthz") +async def health_check(): + return {"status": "ok", "timestamp": datetime.now().isoformat()} + + + + + + + + + + + + + + + + + +@app.post("/api/auth/refresh") +async def refresh_auth_token(): + try: + success = await refresh_jwt_if_needed() + if success: + return {"success": True, "message": "JWT token刷新成功", "timestamp": datetime.now().isoformat()} + else: + return {"success": False, "message": "JWT token刷新失败", "suggestion": "检查网络连接或手动运行 'uv run refresh_jwt.py'"} + except Exception as e: + logger.error(f"❌ 刷新JWT token失败: {e}") + raise HTTPException(500, f"刷新token失败: {e}") + + + + + + + + + + + +@app.post("/api/warp/send_stream") +async def send_to_warp_api_parsed( + request: EncodeRequest +): + try: + logger.info(f"收到Warp API解析发送请求,消息类型: {request.message_type}") + actual_data = request.get_data() + if not actual_data: + raise HTTPException(400, "数据包不能为空") + wrapped = {"json_data": actual_data} + wrapped = sanitize_mcp_input_schema_in_packet(wrapped) + actual_data = wrapped.get("json_data", actual_data) + actual_data = _encode_smd_inplace(actual_data) + protobuf_bytes = dict_to_protobuf_bytes(actual_data, request.message_type) + logger.info(f"✅ JSON编码为protobuf成功: {len(protobuf_bytes)} 字节") + from ..warp.api_client import send_protobuf_to_warp_api_parsed + response_text, conversation_id, task_id, parsed_events = await send_protobuf_to_warp_api_parsed(protobuf_bytes) + parsed_events = _decode_smd_inplace(parsed_events) + response_data = {"response": response_text, "conversation_id": conversation_id, "task_id": task_id, "parsed_events": parsed_events} + result = {"response": response_text, "conversation_id": conversation_id, "task_id": task_id, "request_size": len(protobuf_bytes), "response_size": len(response_text), "message_type": request.message_type, "parsed_events": parsed_events, "events_count": len(parsed_events), "events_summary": {}} + if parsed_events: + event_type_counts = {} + for event in parsed_events: + event_type = event.get("event_type", "UNKNOWN") + event_type_counts[event_type] = event_type_counts.get(event_type, 0) + 1 + result["events_summary"] = event_type_counts + logger.info(f"✅ Warp API解析调用成功,响应长度: {len(response_text)} 字符,事件数量: {len(parsed_events)}") + return result + except Exception as e: + import traceback + error_details = {"error": str(e), "error_type": type(e).__name__, "traceback": traceback.format_exc(), "request_info": {"message_type": request.message_type, "json_size": len(str(actual_data)) if 'actual_data' in locals() else 0, "has_tools": "mcp_context" in (actual_data or {}), "has_history": "task_context" in (actual_data or {})}} + logger.error(f"❌ Warp API解析调用失败: {e}") + logger.error(f"错误详情: {error_details}") + + raise HTTPException(500, detail=error_details) + + +@app.post("/api/warp/send_stream_sse") +async def send_to_warp_api_stream_sse(request: EncodeRequest): + from fastapi.responses import StreamingResponse + import os as _os + import re as _re + try: + actual_data = request.get_data() + if not actual_data: + raise HTTPException(400, "数据包不能为空") + wrapped = {"json_data": actual_data} + wrapped = sanitize_mcp_input_schema_in_packet(wrapped) + actual_data = wrapped.get("json_data", actual_data) + actual_data = _encode_smd_inplace(actual_data) + protobuf_bytes = dict_to_protobuf_bytes(actual_data, request.message_type) + async def _agen(): + warp_url = CONFIG_WARP_URL + def _parse_payload_bytes(data_str: str): + s = _re.sub(r"\s+", "", data_str or "") + if not s: + return None + if _re.fullmatch(r"[0-9a-fA-F]+", s or ""): + try: + return bytes.fromhex(s) + except Exception: + pass + pad = "=" * ((4 - (len(s) % 4)) % 4) + try: + import base64 as _b64 + return _b64.urlsafe_b64decode(s + pad) + except Exception: + try: + return _b64.b64decode(s + pad) + except Exception: + return None + verify_opt = True + insecure_env = _os.getenv("WARP_INSECURE_TLS", "").lower() + if insecure_env in ("1", "true", "yes"): + verify_opt = False + logger.warning("TLS verification disabled via WARP_INSECURE_TLS for Warp API stream endpoint") + async with httpx.AsyncClient(http2=True, timeout=httpx.Timeout(60.0), verify=verify_opt, trust_env=True) as client: + # 最多尝试两次:第一次失败且为配额429时申请匿名token并重试一次 + jwt = None + for attempt in range(2): + if attempt == 0 or jwt is None: + jwt = await get_valid_jwt() + headers = { + "accept": "text/event-stream", + "content-type": "application/x-protobuf", + "x-warp-client-version": CLIENT_VERSION, + "x-warp-os-category": OS_CATEGORY, + "x-warp-os-name": OS_NAME, + "x-warp-os-version": OS_VERSION, + "authorization": f"Bearer {jwt}", + "content-length": str(len(protobuf_bytes)), + } + async with client.stream("POST", warp_url, headers=headers, content=protobuf_bytes) as response: + if response.status_code != 200: + error_text = await response.aread() + error_content = error_text.decode("utf-8") if error_text else "" + + # 401 token无效时,尝试刷新JWT token后重试一次 + if response.status_code == 401 and attempt == 0: + logger.warning("Warp API 返回 401 (token无效, SSE 代理)。尝试刷新JWT token并重试一次…") + try: + refresh_success = await refresh_jwt_if_needed() + if refresh_success: + jwt = await get_valid_jwt() + logger.info("JWT token刷新成功,重试API调用 (SSE 代理)") + continue + else: + logger.warning("JWT token刷新失败,尝试申请匿名token (SSE 代理)") + new_jwt = await acquire_anonymous_access_token() + if new_jwt: + jwt = new_jwt + continue + except Exception as e: + logger.warning(f"JWT token刷新异常 (SSE 代理): {e}") + + # 429 且包含配额信息时,删除当前token并重新获取 + if response.status_code == 429 and attempt == 0 and ( + ("No remaining quota" in error_content) or ("No AI requests remaining" in error_content) + ): + logger.warning("Warp API 返回 429 (配额用尽, SSE 代理)。删除当前token并重新获取…") + try: + from ..core.auth import remove_token_from_pool + remove_token_from_pool(jwt) + new_jwt = await get_valid_jwt() + except Exception: + new_jwt = None + if new_jwt and new_jwt != jwt: + jwt = new_jwt + # 重试 + continue + logger.error(f"Warp API HTTP error {response.status_code}: {error_content[:300]}") + yield f"data: {{\"error\": \"HTTP {response.status_code}\"}}\n\n" + yield "data: [DONE]\n\n" + return + try: + logger.info(f"✅ Warp API SSE连接已建立: {warp_url}") + logger.info(f"📦 请求字节数: {len(protobuf_bytes)}") + except Exception: + pass + current_data = "" + event_no = 0 + async for line in response.aiter_lines(): + if line.startswith("data:"): + payload = line[5:].strip() + if not payload: + continue + if payload == "[DONE]": + break + current_data += payload + continue + if (line.strip() == "") and current_data: + raw_bytes = _parse_payload_bytes(current_data) + current_data = "" + if raw_bytes is None: + continue + try: + event_data = protobuf_to_dict(raw_bytes, "warp.multi_agent.v1.ResponseEvent") + except Exception: + continue + def _get(d: Dict[str, Any], *names: str) -> Any: + for n in names: + if isinstance(d, dict) and n in d: + return d[n] + return None + event_type = "UNKNOWN_EVENT" + if isinstance(event_data, dict): + if "init" in event_data: + event_type = "INITIALIZATION" + else: + client_actions = _get(event_data, "client_actions", "clientActions") + if isinstance(client_actions, dict): + actions = _get(client_actions, "actions", "Actions") or [] + event_type = f"CLIENT_ACTIONS({len(actions)})" if actions else "CLIENT_ACTIONS_EMPTY" + elif "finished" in event_data: + event_type = "FINISHED" + event_no += 1 + try: + logger.info(f"🔄 SSE Event #{event_no}: {event_type}") + except Exception: + pass + out = {"event_number": event_no, "event_type": event_type, "parsed_data": event_data} + try: + chunk = json.dumps(out, ensure_ascii=False) + except Exception: + continue + yield f"data: {chunk}\n\n" + try: + logger.info("="*60) + logger.info("📊 SSE STREAM SUMMARY (代理)") + logger.info("="*60) + logger.info(f"📈 Total Events Forwarded: {event_no}") + logger.info("="*60) + except Exception: + pass + yield "data: [DONE]\n\n" + return + return StreamingResponse(_agen(), media_type="text/event-stream", headers={"Cache-Control": "no-cache", "Connection": "keep-alive"}) + except HTTPException: + raise + except Exception as e: + import traceback + error_details = {"error": str(e), "error_type": type(e).__name__, "traceback": traceback.format_exc()} + logger.error(f"Warp SSE转发端点错误: {e}") + raise HTTPException(500, detail=error_details) + + + + + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) \ No newline at end of file diff --git a/warp2protobuf/config/__init__.py b/warp2protobuf/config/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..12919d799e4bc6109ad8b400640b90dbd9fbda83 --- /dev/null +++ b/warp2protobuf/config/__init__.py @@ -0,0 +1,3 @@ +# Re-export common config modules +from .settings import * # noqa: F401,F403 +from .models import * # noqa: F401,F403 \ No newline at end of file diff --git a/warp2protobuf/config/__pycache__/__init__.cpython-312.pyc b/warp2protobuf/config/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81a4d3a22e37abe779b99cc016dd37611e35d9f5 Binary files /dev/null and b/warp2protobuf/config/__pycache__/__init__.cpython-312.pyc differ diff --git a/warp2protobuf/config/__pycache__/__init__.cpython-313.pyc b/warp2protobuf/config/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..280e41b8c98eb4a61baf6731d6d0bae8ea9ae039 Binary files /dev/null and b/warp2protobuf/config/__pycache__/__init__.cpython-313.pyc differ diff --git a/warp2protobuf/config/__pycache__/models.cpython-312.pyc b/warp2protobuf/config/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bfac890a29e540980de13bc6c7a664c417087813 Binary files /dev/null and b/warp2protobuf/config/__pycache__/models.cpython-312.pyc differ diff --git a/warp2protobuf/config/__pycache__/models.cpython-313.pyc b/warp2protobuf/config/__pycache__/models.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bfbf482d04dabbe4b53ac43aab2cdfc2e79036dc Binary files /dev/null and b/warp2protobuf/config/__pycache__/models.cpython-313.pyc differ diff --git a/warp2protobuf/config/__pycache__/settings.cpython-312.pyc b/warp2protobuf/config/__pycache__/settings.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4835f96855595c81107e2af1128afc69a594c696 Binary files /dev/null and b/warp2protobuf/config/__pycache__/settings.cpython-312.pyc differ diff --git a/warp2protobuf/config/__pycache__/settings.cpython-313.pyc b/warp2protobuf/config/__pycache__/settings.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb8fb684b91467ac0610d81465ab79fd7ecc08e2 Binary files /dev/null and b/warp2protobuf/config/__pycache__/settings.cpython-313.pyc differ diff --git a/warp2protobuf/config/models.py b/warp2protobuf/config/models.py new file mode 100644 index 0000000000000000000000000000000000000000..ae840d1d9479b53b969c3ec76f4ac290260ba2c9 --- /dev/null +++ b/warp2protobuf/config/models.py @@ -0,0 +1,328 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Model configuration and catalog for Warp API + +Contains model definitions, configurations, and OpenAI compatibility mappings. +""" +import time + + +def get_model_config(model_name: str) -> dict: + """ + Simple model configuration mapping. + All models use the same pattern: base model + o3 planning + auto coding + """ + # Known models that map directly + known_models = { + "claude-4-sonnet", "claude-4-opus", "claude-4.1-opus", + "gpt-5", "gpt-4o", "gpt-4.1", "o3", "o4-mini", + "gemini-2.5-pro", "warp-basic" + } + + model_name = model_name.lower().strip() + + # Use the model name directly if it's known, otherwise use "auto" + base_model = model_name if model_name in known_models else "auto" + + return { + "base": base_model, + "planning": "o3", + "coding": "auto" + } + + +def get_warp_models(): + """Get comprehensive list of Warp AI models from packet analysis""" + return { + "agent_mode": { + "default": "auto", + "models": [ + { + "id": "auto", + "display_name": "auto", + "description": "claude 4 sonnet", + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "warp-basic", + "display_name": "lite", + "description": "basic model", + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "gpt-5", + "display_name": "gpt-5", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "claude-4-sonnet", + "display_name": "claude 4 sonnet", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "claude-4-opus", + "display_name": "claude 4 opus", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "claude-4.1-opus", + "display_name": "claude 4.1 opus", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "gpt-4o", + "display_name": "gpt-4o", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "gpt-4.1", + "display_name": "gpt-4.1", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "o4-mini", + "display_name": "o4-mini", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "o3", + "display_name": "o3", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + }, + { + "id": "gemini-2.5-pro", + "display_name": "gemini 2.5 pro", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "agent" + } + ] + }, + "planning": { + "default": "o3", + "models": [ + { + "id": "warp-basic", + "display_name": "lite", + "description": "basic model", + "vision_supported": True, + "usage_multiplier": 1, + "category": "planning" + }, + { + "id": "gpt-5 (high reasoning)", + "display_name": "gpt-5", + "description": "high reasoning", + "vision_supported": False, + "usage_multiplier": 1, + "category": "planning" + }, + { + "id": "claude-4-opus", + "display_name": "claude 4 opus", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "planning" + }, + { + "id": "claude-4.1-opus", + "display_name": "claude 4.1 opus", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "planning" + }, + { + "id": "gpt-4.1", + "display_name": "gpt-4.1", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "planning" + }, + { + "id": "o4-mini", + "display_name": "o4-mini", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "planning" + }, + { + "id": "o3", + "display_name": "o3", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "planning" + } + ] + }, + "coding": { + "default": "auto", + "models": [ + { + "id": "auto", + "display_name": "auto", + "description": "claude 4 sonnet", + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "warp-basic", + "display_name": "lite", + "description": "basic model", + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "gpt-5", + "display_name": "gpt-5", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "claude-4-sonnet", + "display_name": "claude 4 sonnet", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "claude-4-opus", + "display_name": "claude 4 opus", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "claude-4.1-opus", + "display_name": "claude 4.1 opus", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "gpt-4o", + "display_name": "gpt-4o", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "gpt-4.1", + "display_name": "gpt-4.1", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "o4-mini", + "display_name": "o4-mini", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "o3", + "display_name": "o3", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + }, + { + "id": "gemini-2.5-pro", + "display_name": "gemini 2.5 pro", + "description": None, + "vision_supported": True, + "usage_multiplier": 1, + "category": "coding" + } + ] + } + } + + +def get_all_unique_models(): + """Get all unique models across all categories for OpenAI API compatibility""" + try: + models_data = get_warp_models() + unique_models = {} + + # Collect all unique models across categories + for category_data in models_data.values(): + for model in category_data["models"]: + model_id = model["id"] + if model_id not in unique_models: + # Create OpenAI-compatible model entry + unique_models[model_id] = { + "id": model_id, + "object": "model", + "created": int(time.time()), + "owned_by": "warp", + "display_name": model["display_name"], + "description": model["description"] or model["display_name"], + "vision_supported": model["vision_supported"], + "usage_multiplier": model["usage_multiplier"], + "categories": [model["category"]] + } + else: + # Add category if model appears in multiple categories + if model["category"] not in unique_models[model_id]["categories"]: + unique_models[model_id]["categories"].append(model["category"]) + + return list(unique_models.values()) + except Exception: + # Fallback to simple model list + return [ + { + "id": "auto", + "object": "model", + "created": int(time.time()), + "owned_by": "warp", + "display_name": "auto", + "description": "Auto-select best model" + } + ] \ No newline at end of file diff --git a/warp2protobuf/config/settings.py b/warp2protobuf/config/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..33080a8ea52d20dec10fc1523376b31681b22050 --- /dev/null +++ b/warp2protobuf/config/settings.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Configuration settings for Warp API server + +Contains environment variables, paths, and constants. +""" +import os +import pathlib +from dotenv import load_dotenv + +# Load environment variables +load_dotenv() + +# Path configurations +SCRIPT_DIR = pathlib.Path(__file__).resolve().parent.parent.parent +PROTO_DIR = SCRIPT_DIR / "proto" +LOGS_DIR = SCRIPT_DIR / "logs" + +# API configuration +WARP_URL = "https://app.warp.dev/ai/multi-agent" + +# Environment variables with defaults +HOST = os.getenv("HOST", "0.0.0.0") +PORT = int(os.getenv("PORT", "8002")) +WARP_JWT = os.getenv("WARP_JWT") + +# Client headers configuration +CLIENT_VERSION = "v0.2025.08.06.08.12.stable_02" +OS_CATEGORY = "Windows" +OS_NAME = "Windows" +OS_VERSION = "11 (26100)" + +# Protobuf field names for text detection +TEXT_FIELD_NAMES = ("text", "prompt", "query", "content", "message", "input") +PATH_HINT_BONUS = ("conversation", "query", "input", "user", "request", "delta") + +# Response parsing configuration +SYSTEM_STR = {"agent_output.text", "server_message_data", "USER_INITIATED", "agent_output", "text"} + +# JWT refresh configuration +REFRESH_TOKEN_B64 = "Z3JhbnRfdHlwZT1yZWZyZXNoX3Rva2VuJnJlZnJlc2hfdG9rZW49QU1mLXZCeFNSbWRodmVHR0JZTTY5cDA1a0RoSW4xaTd3c2NBTEVtQzlmWURScEh6akVSOWRMN2trLWtIUFl3dlk5Uk9rbXk1MHFHVGNJaUpaNEFtODZoUFhrcFZQTDkwSEptQWY1Zlo3UGVqeXBkYmNLNHdzbzhLZjNheGlTV3RJUk9oT2NuOU56R2FTdmw3V3FSTU5PcEhHZ0JyWW40SThrclc1N1I4X3dzOHU3WGNTdzh1MERpTDlIcnBNbTBMdHdzQ2g4MWtfNmJiMkNXT0ViMWxJeDNIV1NCVGVQRldzUQ==" +REFRESH_URL = "https://app.warp.dev/proxy/token?key=AIzaSyBdy3O3S9hrdayLJxJ7mriBR4qgUaUygAs" \ No newline at end of file diff --git a/warp2protobuf/core/__init__.py b/warp2protobuf/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ece4018b81e5a6418aa86d33cb8ea0648e95caec --- /dev/null +++ b/warp2protobuf/core/__init__.py @@ -0,0 +1,3 @@ +# Core subpackage for warp2protobuf + +__all__ = [] \ No newline at end of file diff --git a/warp2protobuf/core/__pycache__/__init__.cpython-312.pyc b/warp2protobuf/core/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f536558bcaa183f4b0df73cd8311da60f60f274 Binary files /dev/null and b/warp2protobuf/core/__pycache__/__init__.cpython-312.pyc differ diff --git a/warp2protobuf/core/__pycache__/__init__.cpython-313.pyc b/warp2protobuf/core/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..49d39c4bab30400a1973d0863c8a57e4a6e1c06f Binary files /dev/null and b/warp2protobuf/core/__pycache__/__init__.cpython-313.pyc differ diff --git a/warp2protobuf/core/__pycache__/auth.cpython-312.pyc b/warp2protobuf/core/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3338d3000786e4b1cce08742e26e0fbea1be3da Binary files /dev/null and b/warp2protobuf/core/__pycache__/auth.cpython-312.pyc differ diff --git a/warp2protobuf/core/__pycache__/auth.cpython-313.pyc b/warp2protobuf/core/__pycache__/auth.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8cb72c130517985543384bf4257ec4dcbe949f99 Binary files /dev/null and b/warp2protobuf/core/__pycache__/auth.cpython-313.pyc differ diff --git a/warp2protobuf/core/__pycache__/logging.cpython-312.pyc b/warp2protobuf/core/__pycache__/logging.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..97be18032c1d197f295c0317e7c99c086156bf2d Binary files /dev/null and b/warp2protobuf/core/__pycache__/logging.cpython-312.pyc differ diff --git a/warp2protobuf/core/__pycache__/logging.cpython-313.pyc b/warp2protobuf/core/__pycache__/logging.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ca6384df7d217e61f34864929b112bc309c374b2 Binary files /dev/null and b/warp2protobuf/core/__pycache__/logging.cpython-313.pyc differ diff --git a/warp2protobuf/core/__pycache__/protobuf.cpython-312.pyc b/warp2protobuf/core/__pycache__/protobuf.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ec9957292860f5f146118b9d84ab8b00aab88a62 Binary files /dev/null and b/warp2protobuf/core/__pycache__/protobuf.cpython-312.pyc differ diff --git a/warp2protobuf/core/__pycache__/protobuf.cpython-313.pyc b/warp2protobuf/core/__pycache__/protobuf.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cfc4f771fa150447e46af9dee7b66fa8136c41f0 Binary files /dev/null and b/warp2protobuf/core/__pycache__/protobuf.cpython-313.pyc differ diff --git a/warp2protobuf/core/__pycache__/protobuf_utils.cpython-312.pyc b/warp2protobuf/core/__pycache__/protobuf_utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54ad07b089253c86e1e82675311bbb5854785f18 Binary files /dev/null and b/warp2protobuf/core/__pycache__/protobuf_utils.cpython-312.pyc differ diff --git a/warp2protobuf/core/__pycache__/protobuf_utils.cpython-313.pyc b/warp2protobuf/core/__pycache__/protobuf_utils.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4910b447f55979f56338e26f02334341b40ef59 Binary files /dev/null and b/warp2protobuf/core/__pycache__/protobuf_utils.cpython-313.pyc differ diff --git a/warp2protobuf/core/__pycache__/schema_sanitizer.cpython-312.pyc b/warp2protobuf/core/__pycache__/schema_sanitizer.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e692c82ff7d83ad08b4db1dbcbbd613b850bab0 Binary files /dev/null and b/warp2protobuf/core/__pycache__/schema_sanitizer.cpython-312.pyc differ diff --git a/warp2protobuf/core/__pycache__/schema_sanitizer.cpython-313.pyc b/warp2protobuf/core/__pycache__/schema_sanitizer.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5c5d7245c58dc98516e91878761fc4c7d1f8e5ab Binary files /dev/null and b/warp2protobuf/core/__pycache__/schema_sanitizer.cpython-313.pyc differ diff --git a/warp2protobuf/core/__pycache__/server_message_data.cpython-312.pyc b/warp2protobuf/core/__pycache__/server_message_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..66aa14a1edbfb95da4793606360ef7986bb54fc5 Binary files /dev/null and b/warp2protobuf/core/__pycache__/server_message_data.cpython-312.pyc differ diff --git a/warp2protobuf/core/__pycache__/server_message_data.cpython-313.pyc b/warp2protobuf/core/__pycache__/server_message_data.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..89d9ea9b344a2818df0091ecc7e04e596a00d25d Binary files /dev/null and b/warp2protobuf/core/__pycache__/server_message_data.cpython-313.pyc differ diff --git a/warp2protobuf/core/__pycache__/stream_processor.cpython-312.pyc b/warp2protobuf/core/__pycache__/stream_processor.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2c6f3eb57bde9452185001e7660e16d26d888d18 Binary files /dev/null and b/warp2protobuf/core/__pycache__/stream_processor.cpython-312.pyc differ diff --git a/warp2protobuf/core/auth.py b/warp2protobuf/core/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..ecaec0ca6860e14951985e2917f3b7e83c8eb432 --- /dev/null +++ b/warp2protobuf/core/auth.py @@ -0,0 +1,526 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +JWT Authentication for Warp API + +Handles JWT token management, refresh, and validation. +Integrates functionality from refresh_jwt.py. +""" +import base64 +import json +import os +import time +from pathlib import Path +import httpx +import asyncio +from dotenv import load_dotenv, set_key +from typing import List, Dict, Optional +import random + +from ..config.settings import REFRESH_TOKEN_B64, REFRESH_URL, CLIENT_VERSION, OS_CATEGORY, OS_NAME, OS_VERSION +from .logging import logger, log + + +def decode_jwt_payload(token: str) -> dict: + """Decode JWT payload to check expiration""" + try: + parts = token.split('.') + if len(parts) != 3: + return {} + payload_b64 = parts[1] + padding = 4 - len(payload_b64) % 4 + if padding != 4: + payload_b64 += '=' * padding + payload_bytes = base64.urlsafe_b64decode(payload_b64) + payload = json.loads(payload_bytes.decode('utf-8')) + return payload + except Exception as e: + logger.debug(f"Error decoding JWT: {e}") + return {} + + +def is_token_expired(token: str, buffer_minutes: int = 5) -> bool: + payload = decode_jwt_payload(token) + if not payload or 'exp' not in payload: + return True + expiry_time = payload['exp'] + current_time = time.time() + buffer_time = buffer_minutes * 60 + return (expiry_time - current_time) <= buffer_time + + +async def refresh_jwt_token() -> dict: + """Refresh the JWT token using the refresh token. + + Prefers environment variable WARP_REFRESH_TOKEN when present; otherwise + falls back to the baked-in REFRESH_TOKEN_B64 payload. + """ + logger.info("Refreshing JWT token...") + # Prefer dynamic refresh token from environment if present + env_refresh = os.getenv("WARP_REFRESH_TOKEN") + if env_refresh: + payload = f"grant_type=refresh_token&refresh_token={env_refresh}".encode("utf-8") + else: + payload = base64.b64decode(REFRESH_TOKEN_B64) + headers = { + "x-warp-client-version": CLIENT_VERSION, + "x-warp-os-category": OS_CATEGORY, + "x-warp-os-name": OS_NAME, + "x-warp-os-version": OS_VERSION, + "content-type": "application/x-www-form-urlencoded", + "accept": "*/*", + "accept-encoding": "gzip, br", + "content-length": str(len(payload)) + } + try: + async with httpx.AsyncClient(timeout=30.0) as client: + response = await client.post( + REFRESH_URL, + headers=headers, + content=payload + ) + if response.status_code == 200: + token_data = response.json() + logger.info("Token refresh successful") + return token_data + else: + logger.error(f"Token refresh failed: {response.status_code}") + logger.error(f"Response: {response.text}") + return {} + except Exception as e: + logger.error(f"Error refreshing token: {e}") + return {} + + +def update_env_file(new_jwt: str) -> bool: + """ + 更新.env文件中的WARP_JWT + 只有在账号更新时才调用此函数 + """ + env_path = Path(".env") + try: + set_key(str(env_path), "WARP_JWT", new_jwt) + logger.info("Updated .env file with new JWT token") + return True + except Exception as e: + logger.error(f"Error updating .env file: {e}") + return False + + +def update_env_refresh_token(refresh_token: str) -> bool: + """ + 更新.env文件中的WARP_REFRESH_TOKEN + 只有在账号更新时才调用此函数 + """ + env_path = Path(".env") + try: + set_key(str(env_path), "WARP_REFRESH_TOKEN", refresh_token) + logger.info("Updated .env with WARP_REFRESH_TOKEN") + return True + except Exception as e: + logger.error(f"Error updating .env WARP_REFRESH_TOKEN: {e}") + return False + + +async def check_and_refresh_token() -> bool: + current_jwt = os.getenv("WARP_JWT") + if not current_jwt: + logger.warning("No JWT token found in environment") + token_data = await refresh_jwt_token() + if token_data and "access_token" in token_data: + return update_env_file(token_data["access_token"]) + return False + logger.debug("Checking current JWT token expiration...") + if is_token_expired(current_jwt, buffer_minutes=15): + logger.info("JWT token is expired or expiring soon, refreshing...") + token_data = await refresh_jwt_token() + if token_data and "access_token" in token_data: + new_jwt = token_data["access_token"] + if not is_token_expired(new_jwt, buffer_minutes=0): + logger.info("New token is valid") + return update_env_file(new_jwt) + else: + logger.warning("New token appears to be invalid or expired") + return False + else: + logger.error("Failed to get new token from refresh") + return False + else: + payload = decode_jwt_payload(current_jwt) + if payload and 'exp' in payload: + expiry_time = payload['exp'] + time_left = expiry_time - time.time() + hours_left = time_left / 3600 + logger.debug(f"Current token is still valid ({hours_left:.1f} hours remaining)") + else: + logger.debug("Current token appears valid") + return True + + +async def get_valid_jwt() -> str: + # 优先从令牌池获取匿名token + pool_token = await _get_token_from_pool() + if pool_token: + logger.debug("Using token from anonymous pool") + return pool_token + + # 如果没有可用的匿名token,尝试使用.env中的JWT + from dotenv import load_dotenv as _load + _load(override=True) + jwt = os.getenv("WARP_JWT") + if not jwt: + logger.info("No JWT token found, attempting to refresh...") + if await check_and_refresh_token(): + _load(override=True) + jwt = os.getenv("WARP_JWT") + if not jwt: + raise RuntimeError("WARP_JWT is not set and refresh failed") + if is_token_expired(jwt, buffer_minutes=2): + logger.info("JWT token is expired or expiring soon, attempting to refresh...") + if await check_and_refresh_token(): + _load(override=True) + jwt = os.getenv("WARP_JWT") + if not jwt or is_token_expired(jwt, buffer_minutes=0): + logger.warning("Warning: New token has short expiry but proceeding anyway") + else: + logger.warning("Warning: JWT token refresh failed, trying to use existing token") + return jwt + + +def get_jwt_token() -> str: + from dotenv import load_dotenv as _load + _load() + return os.getenv("WARP_JWT", "") + + +async def refresh_jwt_if_needed() -> bool: + try: + return await check_and_refresh_token() + except Exception as e: + logger.error(f"JWT refresh failed: {e}") + return False + + +# ============ Anonymous token acquisition (quota refresh) ============ + +_ANON_GQL_URL = "https://app.warp.dev/graphql/v2?op=CreateAnonymousUser" +_IDENTITY_TOOLKIT_BASE = "https://identitytoolkit.googleapis.com/v1/accounts:signInWithCustomToken" + +# 内存缓存匿名token,避免写入.env +_anonymous_token_cache = None + +# 匿名令牌池配置 +PROXY_POOL_KEY = os.getenv("PROXY_POOL_KEY") +# 当没有代理池时,强制令牌数量为1 +TEMP_ACCOUNTS_NUMBER = int(os.getenv("TEMP_ACCOUNTS_NUMBER", "3")) if PROXY_POOL_KEY else 1 +PROXY_POOL_URL = "https://proxy.doudouzi.me/random/us" + +# 匿名令牌池 +_anonymous_token_pool: List[Dict] = [] +_pool_lock = asyncio.Lock() + + +async def _get_proxy() -> Optional[str]: + """ + 从代理池获取代理 + """ + if not PROXY_POOL_KEY: + return None + + try: + params = { + "api_key": PROXY_POOL_KEY, + "check": "true" + } + async with httpx.AsyncClient(timeout=httpx.Timeout(10.0)) as client: + response = await client.get(PROXY_POOL_URL, params=params) + if response.status_code == 200: + proxy_url = response.text.strip() + logger.debug(f"获取到代理: {proxy_url[:50]}...") + return proxy_url + else: + logger.warning(f"代理池请求失败: {response.status_code}") + return None + except Exception as e: + logger.warning(f"获取代理失败: {e}") + return None + + +def _extract_google_api_key_from_refresh_url() -> str: + try: + # REFRESH_URL like: https://app.warp.dev/proxy/token?key=API_KEY + from urllib.parse import urlparse, parse_qs + parsed = urlparse(REFRESH_URL) + qs = parse_qs(parsed.query) + key = qs.get("key", [""])[0] + return key + except Exception: + return "" + + +async def _create_anonymous_user(proxy: Optional[str] = None) -> dict: + headers = { + "accept-encoding": "gzip, br", + "content-type": "application/json", + "x-warp-client-version": CLIENT_VERSION, + "x-warp-os-category": OS_CATEGORY, + "x-warp-os-name": OS_NAME, + "x-warp-os-version": OS_VERSION, + } + # GraphQL payload per anonymous.MD + query = ( + "mutation CreateAnonymousUser($input: CreateAnonymousUserInput!, $requestContext: RequestContext!) {\n" + " createAnonymousUser(input: $input, requestContext: $requestContext) {\n" + " __typename\n" + " ... on CreateAnonymousUserOutput {\n" + " expiresAt\n" + " anonymousUserType\n" + " firebaseUid\n" + " idToken\n" + " isInviteValid\n" + " responseContext { serverVersion }\n" + " }\n" + " ... on UserFacingError {\n" + " error { __typename message }\n" + " responseContext { serverVersion }\n" + " }\n" + " }\n" + "}\n" + ) + variables = { + "input": { + "anonymousUserType": "NATIVE_CLIENT_ANONYMOUS_USER_FEATURE_GATED", + "expirationType": "NO_EXPIRATION", + "referralCode": None + }, + "requestContext": { + "clientContext": {"version": CLIENT_VERSION}, + "osContext": { + "category": OS_CATEGORY, + "linuxKernelVersion": None, + "name": OS_NAME, + "version": OS_VERSION, + } + } + } + body = {"query": query, "variables": variables, "operationName": "CreateAnonymousUser"} + + # 配置代理 + client_kwargs = {"timeout": httpx.Timeout(30.0)} + if proxy: + client_kwargs["proxy"] = proxy + + async with httpx.AsyncClient(**client_kwargs) as client: + resp = await client.post(_ANON_GQL_URL, headers=headers, json=body) + if resp.status_code != 200: + raise RuntimeError(f"CreateAnonymousUser failed: HTTP {resp.status_code} {resp.text[:200]}") + data = resp.json() + return data + + +async def _exchange_id_token_for_refresh_token(id_token: str, proxy: Optional[str] = None) -> dict: + key = _extract_google_api_key_from_refresh_url() + url = f"{_IDENTITY_TOOLKIT_BASE}?key={key}" if key else f"{_IDENTITY_TOOLKIT_BASE}?key=AIzaSyBdy3O3S9hrdayLJxJ7mriBR4qgUaUygAs" + headers = { + "accept-encoding": "gzip, br", + "content-type": "application/x-www-form-urlencoded", + "x-warp-client-version": CLIENT_VERSION, + "x-warp-os-category": OS_CATEGORY, + "x-warp-os-name": OS_NAME, + "x-warp-os-version": OS_VERSION, + } + form = { + "returnSecureToken": "true", + "token": id_token, + } + # 配置代理 + client_kwargs = {"timeout": httpx.Timeout(30.0)} + if proxy: + client_kwargs["proxy"] = proxy + + async with httpx.AsyncClient(**client_kwargs) as client: + resp = await client.post(url, headers=headers, data=form) + if resp.status_code != 200: + raise RuntimeError(f"signInWithCustomToken failed: HTTP {resp.status_code} {resp.text[:200]}") + return resp.json() + + +async def _create_single_anonymous_token(proxy: Optional[str] = None) -> Dict: + """ + 创建单个匿名令牌 + """ + logger.info(f"Creating anonymous token{' with proxy' if proxy else ''}...") + data = await _create_anonymous_user(proxy) + id_token = None + try: + id_token = data["data"]["createAnonymousUser"].get("idToken") + except Exception: + pass + if not id_token: + raise RuntimeError(f"CreateAnonymousUser did not return idToken: {data}") + + signin = await _exchange_id_token_for_refresh_token(id_token, proxy) + refresh_token = signin.get("refreshToken") + if not refresh_token: + raise RuntimeError(f"signInWithCustomToken did not return refreshToken: {signin}") + + # 获取access_token + payload = f"grant_type=refresh_token&refresh_token={refresh_token}".encode("utf-8") + headers = { + "x-warp-client-version": CLIENT_VERSION, + "x-warp-os-category": OS_CATEGORY, + "x-warp-os-name": OS_NAME, + "x-warp-os-version": OS_VERSION, + "content-type": "application/x-www-form-urlencoded", + "accept": "*/*", + "accept-encoding": "gzip, br", + "content-length": str(len(payload)) + } + + # 配置代理 + client_kwargs = {"timeout": httpx.Timeout(30.0)} + if proxy: + client_kwargs["proxy"] = proxy + + async with httpx.AsyncClient(**client_kwargs) as client: + resp = await client.post(REFRESH_URL, headers=headers, content=payload) + if resp.status_code != 200: + raise RuntimeError(f"Acquire access_token failed: HTTP {resp.status_code} {resp.text[:200]}") + token_data = resp.json() + access = token_data.get("access_token") + if not access: + raise RuntimeError(f"No access_token in response: {token_data}") + + return { + "access_token": access, + "refresh_token": refresh_token, + "created_at": time.time(), + "proxy": proxy + } + + +async def _maintain_token_pool(): + """ + 维护令牌池,确保池中有足够的有效令牌 + 注意:此函数应在已获取_pool_lock的情况下调用 + """ + # 移除过期的令牌 + valid_tokens = [] + for token_info in _anonymous_token_pool: + token = token_info["access_token"] + if is_token_expired(token, buffer_minutes=5): + logger.info("Removing expired token from pool") + else: + valid_tokens.append(token_info) + + _anonymous_token_pool.clear() + _anonymous_token_pool.extend(valid_tokens) + + # 补充令牌到目标数量 + current_count = len(_anonymous_token_pool) + needed_count = TEMP_ACCOUNTS_NUMBER - current_count + + if needed_count > 0: + logger.info(f"Creating {needed_count} new anonymous tokens for pool") + for i in range(needed_count): + try: + # 获取代理 + proxy = await _get_proxy() + token_info = await _create_single_anonymous_token(proxy) + _anonymous_token_pool.append(token_info) + logger.info(f"Added token {i+1}/{needed_count} to pool") + except Exception as e: + logger.error(f"Failed to create anonymous token {i+1}: {e}") + + +async def _get_token_from_pool() -> Optional[str]: + """ + 从令牌池获取一个有效的令牌(复用,不删除) + """ + async with _pool_lock: + # 先尝试维护令牌池 + await _maintain_token_pool() + + # 永远选择第一个有效令牌(队列方式:先进先用,但不删除) + for token_info in _anonymous_token_pool: + token = token_info["access_token"] + if not is_token_expired(token, buffer_minutes=2): + logger.debug(f"Reusing token from pool, pool size: {len(_anonymous_token_pool)}") + return token + + return None + + +async def _create_replacement_token(): + """ + 异步创建替换令牌 + """ + try: + proxy = await _get_proxy() + new_token_info = await _create_single_anonymous_token(proxy) + async with _pool_lock: + _anonymous_token_pool.append(new_token_info) + logger.debug("Added replacement token to pool") + except Exception as e: + logger.error(f"Failed to create replacement token: {e}") + + +async def acquire_anonymous_access_token() -> str: + """Acquire a new anonymous access token (quota refresh) and cache in memory. + + Returns the new access token string. Raises on failure. + """ + # 优先从令牌池获取 + token = await _get_token_from_pool() + if token: + logger.info("Using token from pool") + return token + + # 如果池中没有可用令牌,创建新的 + logger.info("Creating new anonymous token as fallback") + proxy = await _get_proxy() + token_info = await _create_single_anonymous_token(proxy) + return token_info["access_token"] + + +def remove_token_from_pool(token: str): + """ + 从令牌池中删除指定的令牌 + """ + global _anonymous_token_pool + original_count = len(_anonymous_token_pool) + _anonymous_token_pool = [t for t in _anonymous_token_pool if t["access_token"] != token] + removed_count = original_count - len(_anonymous_token_pool) + if removed_count > 0: + logger.info(f"Removed {removed_count} token(s) from pool: {token[:20]}...") + # 异步触发池维护以补充新令牌 + asyncio.create_task(_trigger_pool_maintenance()) + else: + logger.debug(f"Token not found in pool: {token[:20]}...") + + +async def _trigger_pool_maintenance(): + """ + 异步触发池维护 + """ + try: + async with _pool_lock: + await _maintain_token_pool() + except Exception as e: + logger.error(f"Pool maintenance failed: {e}") + + +def print_token_info(): + current_jwt = os.getenv("WARP_JWT") + if not current_jwt: + logger.info("No JWT token found") + return + payload = decode_jwt_payload(current_jwt) + if not payload: + logger.info("Cannot decode JWT token") + return + logger.info("=== JWT Token Information ===") + if 'email' in payload: + logger.info(f"Email: {payload['email']}") + if 'user_id' in payload: + logger.info(f"User ID: {payload['user_id']}") \ No newline at end of file diff --git a/warp2protobuf/core/logging.py b/warp2protobuf/core/logging.py new file mode 100644 index 0000000000000000000000000000000000000000..37f83660b01586d0129eaf49766fc142539d113b --- /dev/null +++ b/warp2protobuf/core/logging.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Logging system for Warp API server + +Provides console-only logging. +""" +import logging + + +def setup_logging(): + """Configure console-only logging system""" + + logger = logging.getLogger('warp_api') + logger.setLevel(logging.DEBUG) + + for handler in logger.handlers[:]: + logger.removeHandler(handler) + + console_handler = logging.StreamHandler() + console_handler.setLevel(logging.INFO) + + formatter = logging.Formatter( + '%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s' + ) + console_handler.setFormatter(formatter) + + logger.addHandler(console_handler) + + return logger + + +# Initialize logger +logger = setup_logging() + + +def log(*a): + """Legacy log function for backward compatibility""" + logger.info(" ".join(str(x) for x in a)) \ No newline at end of file diff --git a/warp2protobuf/core/protobuf.py b/warp2protobuf/core/protobuf.py new file mode 100644 index 0000000000000000000000000000000000000000..3879965596717665253de78696900645709c41ac --- /dev/null +++ b/warp2protobuf/core/protobuf.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Protobuf runtime for Warp API + +Handles protobuf compilation, message creation, and request building. +""" +import os +import re +import json +import time +import uuid +import pathlib +import tempfile +from typing import Any, Dict, List, Optional, Tuple + +from google.protobuf import descriptor_pool, descriptor_pb2 +from google.protobuf.descriptor import FieldDescriptor as FD +from google.protobuf.message_factory import GetMessageClass +from google.protobuf import struct_pb2 + +from ..config.settings import PROTO_DIR, CLIENT_VERSION, OS_CATEGORY, OS_NAME, OS_VERSION, TEXT_FIELD_NAMES, PATH_HINT_BONUS +from .logging import logger, log + +# Global protobuf state +_pool: Optional[descriptor_pool.DescriptorPool] = None +ALL_MSGS: List[str] = [] + + +def _find_proto_files(root: pathlib.Path) -> List[str]: + """Find necessary .proto files in the given directory, excluding problematic test files""" + if not root.exists(): + return [] + + essential_files = [ + "options.proto", # 基础选项,被其他文件依赖 + "file_content.proto", # 文件内容定义 + "attachment.proto", # 附件定义 + "todo.proto", # TODO定义 + "suggestions.proto", # 建议定义 + "debug.proto", # 调试定义 + "input_context.proto", # 输入上下文 + "citations.proto", # 引用定义 + "task.proto", # 任务定义 + "request.proto", # 请求定义 + "response.proto" # 响应定义 + ] + + found_files = [] + for file_name in essential_files: + file_path = root / file_name + if file_path.exists(): + found_files.append(str(file_path)) + logger.debug(f"Found essential proto file: {file_name}") + + if not found_files: + logger.warning("Essential proto files not found, scanning all files...") + exclude_patterns = [ + "unittest", "test", "sample_messages", "java_features", + "legacy_features", "descriptor_test" + ] + + for proto_file in root.rglob("*.proto"): + file_name = proto_file.name.lower() + if not any(pattern in file_name for pattern in exclude_patterns): + found_files.append(str(proto_file)) + + logger.info(f"Selected {len(found_files)} proto files for compilation") + return found_files + + +def _build_descset(proto_files: List[str], includes: List[str]) -> bytes: + from grpc_tools import protoc + try: + from importlib.resources import files as pkg_files + tool_inc = str(pkg_files("grpc_tools").joinpath("_proto")) + except Exception: + tool_inc = None + + outdir = pathlib.Path(tempfile.mkdtemp(prefix="desc_")) + out = outdir / "bundle.pb" + args = ["protoc", f"--descriptor_set_out={out}", "--include_imports"] + for inc in includes: + args.append(f"-I{inc}") + if tool_inc: + args.append(f"-I{tool_inc}") + args.extend(proto_files) + rc = protoc.main(args) + if rc != 0 or not out.exists(): + raise RuntimeError("protoc failed to produce descriptor set") + return out.read_bytes() + + +def _load_pool_from_descset(descset: bytes): + global _pool, ALL_MSGS + fds = descriptor_pb2.FileDescriptorSet() + fds.ParseFromString(descset) + pool = descriptor_pool.DescriptorPool() + for fd in fds.file: + pool.Add(fd) + names: List[str] = [] + for fd in fds.file: + pkg = fd.package + def walk(m, prefix): + full = f"{prefix}.{m.name}" if prefix else m.name + names.append(full) + for nested in m.nested_type: + walk(nested, full) + for m in fd.message_type: + walk(m, pkg) + _pool, ALL_MSGS = pool, names + log(f"proto loaded: {len(ALL_MSGS)} message type(s)") + + +def ensure_proto_runtime(): + if _pool is not None: + return + files = _find_proto_files(PROTO_DIR) + if not files: + raise RuntimeError(f"No .proto found under {PROTO_DIR}") + desc = _build_descset(files, [str(PROTO_DIR)]) + _load_pool_from_descset(desc) + + +def msg_cls(full: str): + desc = _pool.FindMessageTypeByName(full) # type: ignore + return GetMessageClass(desc) + + +def _list_text_paths(desc, max_depth=6): + out: List[Tuple[List[FD], int]] = [] + def walk(cur_desc, cur_path: List[FD], depth: int): + if depth > max_depth: + return + for f in cur_desc.fields: + base = 0 + if f.name.lower() in TEXT_FIELD_NAMES: base += 10 + for hint in PATH_HINT_BONUS: + if hint in f.name.lower(): base += 2 + if f.type == FD.TYPE_STRING: + out.append((cur_path + [f], base + depth)) + elif f.type == FD.TYPE_MESSAGE: + walk(f.message_type, cur_path + [f], depth + 1) + walk(desc, [], 0) + return out + + +def _pick_best_request_schema() -> Tuple[str, List[FD]]: + ensure_proto_runtime() + try: + request_type = "warp.multi_agent.v1.Request" + d = _pool.FindMessageTypeByName(request_type) # type: ignore + path_names = ["input", "user_inputs", "inputs", "user_query", "query"] + path_fields = [] + current_desc = d + + for field_name in path_names: + field = current_desc.fields_by_name.get(field_name) + if not field: + raise RuntimeError(f"Field '{field_name}' not found") + path_fields.append(field) + if field.type == FD.TYPE_MESSAGE: + current_desc = field.message_type + + log("using modern request format:", request_type, " :: ", ".".join(path_names)) + return request_type, path_fields + + except Exception as e: + log(f"Failed to use modern format, falling back to auto-detection: {e}") + best: Optional[Tuple[str, List[FD], int]] = None + for full in ALL_MSGS: + try: + d = _pool.FindMessageTypeByName(full) # type: ignore + except Exception: + continue + name_bias = 0 + lname = full.lower() + for kw, w in (("request", 8), ("multi_agent", 6), ("multiagent", 6), + ("chat", 5), ("client", 2), ("message", 1), ("input", 1)): + if kw in lname: name_bias += w + for path, score in _list_text_paths(d): + total = score + name_bias + max(0, 6 - len(path)) + if best is None or total > best[2]: + best = (full, path, total) + if not best: + raise RuntimeError("Could not auto-detect request root & text field from proto/") + full, path, _ = best + log("auto-detected request:", full, " :: ", ".".join(f.name for f in path)) + return full, path + + +_REQ_CACHE: Optional[Tuple[str, List[FD]]] = None + +def get_request_schema() -> Tuple[str, List[FD]]: + global _REQ_CACHE + if _REQ_CACHE is None: + _REQ_CACHE = _pick_best_request_schema() + return _REQ_CACHE + + +def _set_text_at_path(msg, path_fields: List[FD], text: str): + cur = msg + for i, f in enumerate(path_fields): + last = (i == len(path_fields) - 1) + try: + is_repeated = f.is_repeated + except AttributeError: + is_repeated = (f.label == FD.LABEL_REPEATED) + + if is_repeated: + rep = getattr(cur, f.name) + if f.type == FD.TYPE_MESSAGE: + cur = rep.add() + elif f.type == FD.TYPE_STRING: + if not last: raise TypeError(f"path continues after repeated string field '{f.name}'") + rep.append(text); return + else: + raise TypeError(f"unsupported repeated scalar at '{f.name}'") + else: + if f.type == FD.TYPE_MESSAGE: + cur = getattr(cur, f.name) + if last: + raise TypeError(f"last field '{f.name}' is a message, not string") + elif f.type == FD.TYPE_STRING: + if not last: raise TypeError(f"path continues after string field '{f.name}'") + setattr(cur, f.name, text); return + else: + raise TypeError(f"unsupported scalar at '{f.name}'") + raise RuntimeError("failed to set text") + + +def build_request_bytes(user_text: str, model: str = "auto") -> bytes: + from ..config.models import get_model_config + + full, path = get_request_schema() + Cls = msg_cls(full) + msg = Cls() + _set_text_at_path(msg, path, user_text) + + if hasattr(msg, 'settings'): + settings = msg.settings + if hasattr(settings, 'model_config'): + model_config_dict = get_model_config(model) + model_config = settings.model_config + model_config.base = model_config_dict["base"] + model_config.planning = model_config_dict["planning"] + model_config.coding = model_config_dict["coding"] + logger.debug(f"Set model config: base={model_config.base}, planning={model_config.planning}, coding={model_config.coding}") + + settings.rules_enabled = False + settings.web_context_retrieval_enabled = False + settings.supports_parallel_tool_calls = False + settings.planning_enabled = False + settings.supports_create_files = False + settings.supports_long_running_commands = False + settings.supports_todos_ui = False + settings.supports_linked_code_blocks = False + + settings.use_anthropic_text_editor_tools = False + settings.warp_drive_context_enabled = False + settings.should_preserve_file_content_in_history = True + + try: + tool_types = [] + settings.supported_tools[:] = tool_types + logger.debug(f"Set supported_tools (legacy): {tool_types}") + except Exception as e: + logger.debug(f"Could not set supported_tools: {e}") + + logger.debug("Applied all valid Settings fields based on proto definition") + + if hasattr(msg, 'metadata'): + metadata = msg.metadata + metadata.conversation_id = f"rest-api-{uuid.uuid4().hex[:8]}" + + rootd = msg.DESCRIPTOR + for fn, val in ( + ("client_version", CLIENT_VERSION), + ("version", CLIENT_VERSION), + ("os_name", OS_NAME), + ("os_category", OS_CATEGORY), + ("os_version", OS_VERSION), + ): + f = rootd.fields_by_name.get(fn) + if f and f.type == FD.TYPE_STRING and f.label == FD.LABEL_OPTIONAL: + setattr(msg, fn, val) + + return msg.SerializeToString() \ No newline at end of file diff --git a/warp2protobuf/core/protobuf_utils.py b/warp2protobuf/core/protobuf_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..af398c0f910d0c516fbd098b15548801e23cadfa --- /dev/null +++ b/warp2protobuf/core/protobuf_utils.py @@ -0,0 +1,310 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Protobuf utility functions + +Shared functions for protobuf encoding/decoding across the application. +""" +from typing import Any, Dict +from fastapi import HTTPException +from .logging import logger +from .protobuf import ensure_proto_runtime, msg_cls +from google.protobuf.json_format import MessageToDict +from google.protobuf import struct_pb2 +from google.protobuf.descriptor import FieldDescriptor as _FD +from .server_message_data import decode_server_message_data, encode_server_message_data + + + + + +def protobuf_to_dict(protobuf_bytes: bytes, message_type: str) -> Dict: + """将protobuf字节转换为字典""" + ensure_proto_runtime() + + try: + MessageClass = msg_cls(message_type) + message = MessageClass() + message.ParseFromString(protobuf_bytes) + + data = MessageToDict(message, preserving_proto_field_name=True) + + # 在转换阶段自动解析 server_message_data(Base64URL -> 结构化对象) + data = _decode_smd_inplace(data) + return data + + except Exception as e: + logger.error(f"Protobuf解码失败: {e}") + raise HTTPException(500, f"Protobuf解码失败: {e}") + + + + + +def dict_to_protobuf_bytes(data_dict: Dict, message_type: str = "warp.multi_agent.v1.Request") -> bytes: + """字典转protobuf字节的包装函数""" + ensure_proto_runtime() + + try: + MessageClass = msg_cls(message_type) + message = MessageClass() + + # 在转换阶段自动处理 server_message_data(对象 -> Base64URL 字符串) + safe_dict = _encode_smd_inplace(data_dict) + + _populate_protobuf_from_dict(message, safe_dict, path="$") + + return message.SerializeToString() + + except Exception as e: + logger.error(f"Protobuf编码失败: {e}") + raise HTTPException(500, f"Protobuf编码失败: {e}") + + + + +def _fill_google_value_dynamic(value_msg: Any, py_value: Any) -> None: + """在动态 google.protobuf.Value 消息上填充 Python 值(不创建 struct_pb2.Value 实例)。""" + try: + if py_value is None: + setattr(value_msg, "null_value", 0) + return + if isinstance(py_value, bool): + setattr(value_msg, "bool_value", bool(py_value)) + return + if isinstance(py_value, (int, float)): + setattr(value_msg, "number_value", float(py_value)) + return + if isinstance(py_value, str): + setattr(value_msg, "string_value", py_value) + return + if isinstance(py_value, dict): + struct_value = getattr(value_msg, "struct_value") + _fill_google_struct_dynamic(struct_value, py_value) + return + if isinstance(py_value, list): + list_value = getattr(value_msg, "list_value") + values_rep = getattr(list_value, "values") + for item in py_value: + sub = values_rep.add() + _fill_google_value_dynamic(sub, item) + return + setattr(value_msg, "string_value", str(py_value)) + except Exception as e: + logger.warning(f"填充 google.protobuf.Value 失败: {e}") + + + + +def _fill_google_struct_dynamic(struct_msg: Any, py_dict: Dict[str, Any]) -> None: + """在动态 google.protobuf.Struct 上填充 Python dict(不使用 struct_pb2.Struct.update)。""" + try: + fields_map = getattr(struct_msg, "fields") + for k, v in py_dict.items(): + sub_val = fields_map[k] + _fill_google_value_dynamic(sub_val, v) + except Exception as e: + logger.warning(f"填充 google.protobuf.Struct 失败: {e}") + + + + +def _python_to_struct_value(py_value: Any) -> struct_pb2.Value: + v = struct_pb2.Value() + if py_value is None: + v.null_value = struct_pb2.NULL_VALUE + elif isinstance(py_value, bool): + v.bool_value = bool(py_value) + elif isinstance(py_value, (int, float)): + v.number_value = float(py_value) + elif isinstance(py_value, str): + v.string_value = py_value + elif isinstance(py_value, dict): + s = struct_pb2.Struct() + s.update(py_value) + v.struct_value.CopyFrom(s) + elif isinstance(py_value, list): + lv = struct_pb2.ListValue() + for item in py_value: + lv.values.append(_python_to_struct_value(item)) + v.list_value.CopyFrom(lv) + else: + v.string_value = str(py_value) + return v + + + + +def _populate_protobuf_from_dict(proto_msg, data_dict: Dict, path: str = "$"): + for key, value in data_dict.items(): + current_path = f"{path}.{key}" + if not hasattr(proto_msg, key): + logger.warning(f"忽略未知字段: {current_path}") + continue + + field = getattr(proto_msg, key) + fd = None + descriptor = getattr(proto_msg, "DESCRIPTOR", None) + if descriptor is not None: + fd = descriptor.fields_by_name.get(key) + + try: + if ( + fd is not None + and fd.type == _FD.TYPE_MESSAGE + and fd.message_type is not None + and fd.message_type.full_name == "google.protobuf.Struct" + and isinstance(value, dict) + ): + _fill_google_struct_dynamic(field, value) + continue + except Exception as e: + logger.warning(f"处理 Struct 字段 {current_path} 失败: {e}") + + if isinstance(field, struct_pb2.Struct) and isinstance(value, dict): + try: + field.update(value) + except Exception as e: + logger.warning(f"填充Struct失败: {current_path}: {e}") + continue + + try: + if ( + fd is not None + and fd.type == _FD.TYPE_MESSAGE + and fd.message_type is not None + and fd.message_type.GetOptions().map_entry + and isinstance(value, dict) + ): + value_desc = fd.message_type.fields_by_name.get("value") + for mk, mv in value.items(): + try: + if value_desc is not None and value_desc.type == _FD.TYPE_MESSAGE: + if value_desc.message_type is not None and value_desc.message_type.full_name == "google.protobuf.Value": + _fill_google_value_dynamic(field[mk], mv) + else: + sub_msg = field[mk] + if isinstance(mv, dict): + _populate_protobuf_from_dict(sub_msg, mv, path=f"{current_path}.{mk}") + else: + try: + logger.warning(f"map值类型不匹配,期望message: {current_path}.{mk}") + except Exception: + pass + else: + field[mk] = mv + except Exception as me: + logger.warning(f"设置 map 字段 {current_path}.{mk} 失败: {me}") + continue + except Exception as e: + logger.warning(f"处理 map 字段 {current_path} 失败: {e}") + + if isinstance(value, dict): + try: + _populate_protobuf_from_dict(field, value, path=current_path) + except Exception as e: + logger.error(f"填充子消息失败: {current_path}: {e}") + raise + elif isinstance(value, list): + # 处理 repeated enum:允许传入字符串名称或数字 + try: + if fd is not None and fd.type == _FD.TYPE_ENUM: + enum_desc = getattr(fd, "enum_type", None) + resolved_values = [] + for item in value: + if isinstance(item, str): + ev = enum_desc.values_by_name.get(item) if enum_desc is not None else None + if ev is not None: + resolved_values.append(ev.number) + else: + try: + resolved_values.append(int(item)) + except Exception: + logger.warning(f"无法解析枚举值 '{item}' 为 {current_path},已忽略") + else: + try: + resolved_values.append(int(item)) + except Exception: + logger.warning(f"无法转换枚举值 {item} 为整数: {current_path}") + field.extend(resolved_values) + continue + except Exception as e: + logger.warning(f"处理 repeated enum 字段 {current_path} 失败: {e}") + if value and isinstance(value[0], dict): + try: + for idx, item in enumerate(value): + new_item = field.add() # type: ignore[attr-defined] + _populate_protobuf_from_dict(new_item, item, path=f"{current_path}[{idx}]") + except Exception as e: + logger.warning(f"填充复合数组失败 {current_path}: {e}") + else: + try: + field.extend(value) + except Exception as e: + logger.warning(f"设置数组字段 {current_path} 失败: {e}") + else: + if key in ["in_progress", "resume_conversation"]: + field.SetInParent() + else: + try: + # 处理标量 enum:允许传入字符串名称或数字 + if fd is not None and fd.type == _FD.TYPE_ENUM: + enum_desc = getattr(fd, "enum_type", None) + if isinstance(value, str): + ev = enum_desc.values_by_name.get(value) if enum_desc is not None else None + if ev is not None: + setattr(proto_msg, key, ev.number) + continue + try: + setattr(proto_msg, key, int(value)) + continue + except Exception: + pass + # 其余情况直接赋值,若类型不匹配由底层抛错 + setattr(proto_msg, key, value) + except Exception as e: + logger.warning(f"设置字段 {current_path} 失败: {e}") + + +# ===== server_message_data 递归处理 ===== + +def _encode_smd_inplace(obj: Any) -> Any: + if isinstance(obj, dict): + new_d: Dict[str, Any] = {} + for k, v in obj.items(): + if k in ("server_message_data", "serverMessageData") and isinstance(v, dict): + try: + b64 = encode_server_message_data( + uuid=v.get("uuid"), + seconds=v.get("seconds"), + nanos=v.get("nanos"), + ) + new_d[k] = b64 + except Exception: + new_d[k] = v + else: + new_d[k] = _encode_smd_inplace(v) + return new_d + elif isinstance(obj, list): + return [_encode_smd_inplace(x) for x in obj] + else: + return obj + + +def _decode_smd_inplace(obj: Any) -> Any: + if isinstance(obj, dict): + new_d: Dict[str, Any] = {} + for k, v in obj.items(): + if k in ("server_message_data", "serverMessageData") and isinstance(v, str): + try: + dec = decode_server_message_data(v) + new_d[k] = dec + except Exception: + new_d[k] = v + else: + new_d[k] = _decode_smd_inplace(v) + return new_d + elif isinstance(obj, list): + return [_decode_smd_inplace(x) for x in obj] + else: + return obj \ No newline at end of file diff --git a/warp2protobuf/core/schema_sanitizer.py b/warp2protobuf/core/schema_sanitizer.py new file mode 100644 index 0000000000000000000000000000000000000000..cabc6b04a708d13de7de8510085ac20444c71a00 --- /dev/null +++ b/warp2protobuf/core/schema_sanitizer.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +""" +Shared utilities to validate and sanitize MCP tool input_schema in request packets. +Ensures JSON Schema correctness, removes empty values, and enforces non-empty +`type` and `description` for each property. Special handling for `headers`. +""" +from typing import Any, Dict, List + + +def _is_empty_value(value: Any) -> bool: + if value is None: + return True + if isinstance(value, str) and value.strip() == "": + return True + if isinstance(value, (list, dict)) and len(value) == 0: + return True + return False + + +def _deep_clean(value: Any) -> Any: + if isinstance(value, dict): + cleaned: Dict[str, Any] = {} + for k, v in value.items(): + vv = _deep_clean(v) + if _is_empty_value(vv): + continue + cleaned[k] = vv + return cleaned + if isinstance(value, list): + cleaned_list = [] + for item in value: + ii = _deep_clean(item) + if _is_empty_value(ii): + continue + cleaned_list.append(ii) + return cleaned_list + if isinstance(value, str): + return value.strip() + return value + + +def _infer_type_for_property(prop_name: str) -> str: + name = prop_name.lower() + if name in ("url", "uri", "href", "link"): + return "string" + if name in ("headers", "options", "params", "payload", "data"): + return "object" + return "string" + + +def _ensure_property_schema(name: str, schema: Dict[str, Any]) -> Dict[str, Any]: + prop = dict(schema) if isinstance(schema, dict) else {} + prop = _deep_clean(prop) + + # Enforce type & description + if "type" not in prop or not isinstance(prop.get("type"), str) or not prop["type"].strip(): + prop["type"] = _infer_type_for_property(name) + if "description" not in prop or not isinstance(prop.get("description"), str) or not prop["description"].strip(): + prop["description"] = f"{name} parameter" + + # Special handling for headers + if name.lower() == "headers": + prop["type"] = "object" + headers_props = prop.get("properties") + if not isinstance(headers_props, dict): + headers_props = {} + headers_props = _deep_clean(headers_props) + if not headers_props: + headers_props = { + "user-agent": { + "type": "string", + "description": "User-Agent header for the request", + } + } + else: + fixed_headers: Dict[str, Any] = {} + for hk, hv in headers_props.items(): + sub = _deep_clean(hv if isinstance(hv, dict) else {}) + if "type" not in sub or not isinstance(sub.get("type"), str) or not sub["type"].strip(): + sub["type"] = "string" + if "description" not in sub or not isinstance(sub.get("description"), str) or not sub["description"].strip(): + sub["description"] = f"{hk} header" + fixed_headers[hk] = sub + headers_props = fixed_headers + prop["properties"] = headers_props + if isinstance(prop.get("required"), list): + req = [r for r in prop["required"] if isinstance(r, str) and r in headers_props] + if req: + prop["required"] = req + else: + prop.pop("required", None) + if isinstance(prop.get("additionalProperties"), dict) and len(prop["additionalProperties"]) == 0: + prop.pop("additionalProperties", None) + + return prop + + +def _sanitize_json_schema(schema: Dict[str, Any]) -> Dict[str, Any]: + s = _deep_clean(schema if isinstance(schema, dict) else {}) + + # If properties exist, assume object type + if "properties" in s and not isinstance(s.get("type"), str): + s["type"] = "object" + + # Normalize $schema + if "$schema" in s and not isinstance(s["$schema"], str): + s.pop("$schema", None) + if "$schema" not in s: + s["$schema"] = "http://json-schema.org/draft-07/schema#" + + properties = s.get("properties") + if isinstance(properties, dict): + fixed_props: Dict[str, Any] = {} + for name, subschema in properties.items(): + fixed_props[name] = _ensure_property_schema(name, subschema if isinstance(subschema, dict) else {}) + s["properties"] = fixed_props + + # Clean required list + if isinstance(s.get("required"), list): + if isinstance(properties, dict): + req = [r for r in s["required"] if isinstance(r, str) and r in properties] + else: + req = [] + if req: + s["required"] = req + else: + s.pop("required", None) + + # Remove empty additionalProperties object + if isinstance(s.get("additionalProperties"), dict) and len(s["additionalProperties"]) == 0: + s.pop("additionalProperties", None) + + return s + + +def sanitize_mcp_input_schema_in_packet(body: Dict[str, Any]) -> Dict[str, Any]: + """Validate and sanitize mcp_context.tools[*].input_schema in the given packet. + + - Removes empty values (empty strings, lists, dicts) + - Ensures each property has non-empty `type` and `description` + - Special-cases `headers` to include at least `user-agent` when empty + - Fixes `required` lists and general JSON Schema shape + """ + try: + body = _deep_clean(body) + candidate_roots: List[Dict[str, Any]] = [] + if isinstance(body.get("json_data"), dict): + candidate_roots.append(body["json_data"]) + candidate_roots.append(body) + + for root in candidate_roots: + if not isinstance(root, dict): + continue + mcp_ctx = root.get("mcp_context") + if not isinstance(mcp_ctx, dict): + continue + tools = mcp_ctx.get("tools") + if not isinstance(tools, list): + continue + fixed_tools: List[Any] = [] + for tool in tools: + if not isinstance(tool, dict): + fixed_tools.append(tool) + continue + tool_copy = dict(tool) + input_schema = tool_copy.get("input_schema") or tool_copy.get("inputSchema") + if isinstance(input_schema, dict): + tool_copy["input_schema"] = _sanitize_json_schema(input_schema) + if "inputSchema" in tool_copy: + tool_copy["inputSchema"] = tool_copy["input_schema"] + fixed_tools.append(_deep_clean(tool_copy)) + mcp_ctx["tools"] = fixed_tools + return body + except Exception: + return body \ No newline at end of file diff --git a/warp2protobuf/core/server_message_data.py b/warp2protobuf/core/server_message_data.py new file mode 100644 index 0000000000000000000000000000000000000000..418052b759ac4b1c8e4f776cf71a6bc101def0cb --- /dev/null +++ b/warp2protobuf/core/server_message_data.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Helpers for encoding/decoding server_message_data values. + +These are Base64URL-encoded proto3 messages with shape: + - field 1: string UUID (36 chars) + - field 3: google.protobuf.Timestamp (1=seconds, 2=nanos) + +Supports UUID_ONLY, TIMESTAMP_ONLY, and UUID_AND_TIMESTAMP. +""" +from typing import Dict, Optional, Tuple +import base64 +from datetime import datetime, timezone + +try: + from zoneinfo import ZoneInfo # Python 3.9+ +except Exception: + ZoneInfo = None # type: ignore + + +def _b64url_decode_padded(s: str) -> bytes: + t = s.replace("-", "+").replace("_", "/") + pad = (-len(t)) % 4 + if pad: + t += "=" * pad + return base64.b64decode(t) + + +def _b64url_encode_nopad(b: bytes) -> str: + return base64.urlsafe_b64encode(b).decode("ascii").rstrip("=") + + +def _read_varint(buf: bytes, i: int) -> Tuple[int, int]: + shift = 0 + val = 0 + while i < len(buf): + b = buf[i] + i += 1 + val |= (b & 0x7F) << shift + if not (b & 0x80): + return val, i + shift += 7 + if shift > 63: + break + raise ValueError("invalid varint") + + +def _write_varint(v: int) -> bytes: + out = bytearray() + vv = int(v) + while True: + to_write = vv & 0x7F + vv >>= 7 + if vv: + out.append(to_write | 0x80) + else: + out.append(to_write) + break + return bytes(out) + + +def _make_key(field_no: int, wire_type: int) -> bytes: + return _write_varint((field_no << 3) | wire_type) + + +def _decode_timestamp(buf: bytes) -> Tuple[Optional[int], Optional[int]]: + i = 0 + seconds: Optional[int] = None + nanos: Optional[int] = None + while i < len(buf): + key, i = _read_varint(buf, i) + field_no = key >> 3 + wt = key & 0x07 + if wt == 0: + val, i = _read_varint(buf, i) + if field_no == 1: + seconds = int(val) + elif field_no == 2: + nanos = int(val) + elif wt == 2: + ln, i2 = _read_varint(buf, i) + i = i2 + ln + elif wt == 1: + i += 8 + elif wt == 5: + i += 4 + else: + break + return seconds, nanos + + +def _encode_timestamp(seconds: Optional[int], nanos: Optional[int]) -> bytes: + parts = bytearray() + if seconds is not None: + parts += _make_key(1, 0) + parts += _write_varint(int(seconds)) + if nanos is not None: + parts += _make_key(2, 0) + parts += _write_varint(int(nanos)) + return bytes(parts) + + +def decode_server_message_data(b64url: str) -> Dict: + try: + raw = _b64url_decode_padded(b64url) + except Exception as e: + return {"error": f"base64url decode failed: {e}"} + + i = 0 + uuid: Optional[str] = None + seconds: Optional[int] = None + nanos: Optional[int] = None + + while i < len(raw): + key, i = _read_varint(raw, i) + field_no = key >> 3 + wt = key & 0x07 + if wt == 2: + ln, i2 = _read_varint(raw, i) + i = i2 + data = raw[i:i+ln] + i += ln + if field_no == 1: + try: + uuid = data.decode("utf-8") + except Exception: + uuid = None + elif field_no == 3: + s, n = _decode_timestamp(data) + if s is not None: + seconds = s + if n is not None: + nanos = n + elif wt == 0: + _, i = _read_varint(raw, i) + elif wt == 1: + i += 8 + elif wt == 5: + i += 4 + else: + break + + iso_utc: Optional[str] = None + iso_ny: Optional[str] = None + if seconds is not None: + micros = int((nanos or 0) / 1000) + dt = datetime.fromtimestamp(int(seconds), tz=timezone.utc).replace(microsecond=micros) + iso_utc = dt.isoformat().replace("+00:00", "Z") + if ZoneInfo is not None: + try: + iso_ny = dt.astimezone(ZoneInfo("America/New_York")).isoformat() + except Exception: + iso_ny = None + + if uuid and (seconds is not None or nanos is not None): + t = "UUID_AND_TIMESTAMP" + elif uuid: + t = "UUID_ONLY" + elif seconds is not None or nanos is not None: + t = "TIMESTAMP_ONLY" + else: + t = "UNKNOWN" + + return { + "uuid": uuid, + "seconds": seconds, + "nanos": nanos, + "iso_utc": iso_utc, + "iso_ny": iso_ny, + "type": t, + } + + +def encode_server_message_data(uuid: Optional[str] = None, + seconds: Optional[int] = None, + nanos: Optional[int] = None) -> str: + parts = bytearray() + if uuid: + b = uuid.encode("utf-8") + parts += _make_key(1, 2) + parts += _write_varint(len(b)) + parts += b + if seconds is not None or nanos is not None: + ts = _encode_timestamp(seconds, nanos) + parts += _make_key(3, 2) + parts += _write_varint(len(ts)) + parts += ts + return _b64url_encode_nopad(bytes(parts)) \ No newline at end of file diff --git a/warp2protobuf/core/session.py b/warp2protobuf/core/session.py new file mode 100644 index 0000000000000000000000000000000000000000..4728d569d476e6e95dd91f71e3860f1e44d1e8d5 --- /dev/null +++ b/warp2protobuf/core/session.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Global session management for Warp API + +Manages fixed conversation_id and task context based on real packet analysis. +""" +import uuid +import time +import asyncio +from typing import Dict, List, Optional, Any +from dataclasses import dataclass, field +from .logging import logger + +# 全局固定的conversation_id - 所有请求都使用这个ID +FIXED_CONVERSATION_ID = "5b48d359-0715-479e-a158-0a00f2dfea36" + + +@dataclass +class SessionMessage: + """Represents a message in the session history""" + id: str + role: str # "user", "assistant", "system", "tool" + content: str + tool_calls: Optional[List[Dict]] = None + tool_call_id: Optional[str] = None + timestamp: float = field(default_factory=time.time) + + +@dataclass +class SessionState: + """Global session state for the fixed conversation""" + conversation_id: str = FIXED_CONVERSATION_ID + active_task_id: Optional[str] = None + messages: List[SessionMessage] = field(default_factory=list) + initialized: bool = False + created_at: float = field(default_factory=time.time) + last_activity: float = field(default_factory=time.time) + + +class GlobalSessionManager: + """ + Manages the global fixed session for Warp API. + """ + + def __init__(self): + self._session = SessionState() + self._initialization_lock = asyncio.Lock() + logger.info(f"GlobalSessionManager initialized with fixed conversation_id: {FIXED_CONVERSATION_ID}") + + def get_fixed_conversation_id(self) -> str: + return FIXED_CONVERSATION_ID + + def add_message_from_openai(self, role: str, content: str, tool_calls: Optional[List[Dict]] = None, tool_call_id: Optional[str] = None) -> str: + message_id = f"msg-{uuid.uuid4().hex[:8]}" + message = SessionMessage( + id=message_id, + role=role, + content=content, + tool_calls=tool_calls, + tool_call_id=tool_call_id + ) + + self._session.messages.append(message) + self._session.last_activity = time.time() + + logger.debug(f"Added {role} message to session: {content[:100]}...") + return message_id + + def get_session_history(self) -> List[SessionMessage]: + return self._session.messages.copy() + + def get_history_for_task_context(self) -> List[SessionMessage]: + return self._session.messages.copy() + + def update_session_with_openai_messages(self, openai_messages: List[Dict[str, Any]]) -> None: + self._session.messages.clear() + for msg in openai_messages: + role = msg.get("role", "") + content = msg.get("content", "") + tool_calls = msg.get("tool_calls") + tool_call_id = msg.get("tool_call_id") + if not content and not tool_calls and role != "tool": + continue + self.add_message_from_openai(role, content, tool_calls, tool_call_id) + logger.debug(f"Updated session with {len(openai_messages)} OpenAI messages") + + def extract_current_user_query(self, openai_messages: List[Dict[str, Any]]) -> Optional[str]: + for msg in reversed(openai_messages): + if msg.get("role") == "user": + query = msg.get("content", "") + logger.debug(f"Extracted current user query: {query[:100]}...") + return query + return None + + def get_history_messages_excluding_current(self, current_user_query: str) -> List[SessionMessage]: + history = [] + for msg in self._session.messages: + if msg.role == "user" and msg.content == current_user_query: + continue + history.append(msg) + logger.debug(f"Retrieved {len(history)} history messages (excluding current query)") + return history + + def set_active_task_id(self, task_id: str) -> None: + self._session.active_task_id = task_id + logger.debug(f"Set active task_id: {task_id}") + + def get_active_task_id(self) -> Optional[str]: + return self._session.active_task_id + + def is_initialized(self) -> bool: + return self._session.initialized + + def get_stats(self) -> Dict[str, Any]: + return { + "conversation_id": self._session.conversation_id, + "initialized": self._session.initialized, + "active_task_id": self._session.active_task_id, + "message_count": len(self._session.messages), + "created_at": self._session.created_at, + "last_activity": self._session.last_activity + } + + +# Global session manager instance +_global_session: Optional[GlobalSessionManager] = None + +def get_global_session() -> GlobalSessionManager: + global _global_session + if _global_session is None: + _global_session = GlobalSessionManager() + return _global_session \ No newline at end of file diff --git a/warp2protobuf/core/stream_processor.py b/warp2protobuf/core/stream_processor.py new file mode 100644 index 0000000000000000000000000000000000000000..91af4066068417e857861893b10363d2dff08d8c --- /dev/null +++ b/warp2protobuf/core/stream_processor.py @@ -0,0 +1,334 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +流式数据包处理器 + +处理流式protobuf数据包,支持实时解析和WebSocket推送。 +""" +import asyncio +import json +import base64 +from typing import AsyncGenerator, List, Dict, Any, Optional +from datetime import datetime + +from .logging import logger +from .protobuf_utils import protobuf_to_dict + + +class StreamProcessor: + """流式数据包处理器""" + + def __init__(self, websocket_manager=None): + self.websocket_manager = websocket_manager + self.active_streams: Dict[str, StreamSession] = {} + + async def create_stream_session(self, stream_id: str, message_type: str = "warp.multi_agent.v1.Response") -> 'StreamSession': + """创建流式会话""" + session = StreamSession(stream_id, message_type, self.websocket_manager) + self.active_streams[stream_id] = session + + logger.info(f"创建流式会话: {stream_id}, 消息类型: {message_type}") + return session + + async def get_stream_session(self, stream_id: str) -> Optional['StreamSession']: + """获取流式会话""" + return self.active_streams.get(stream_id) + + async def close_stream_session(self, stream_id: str): + """关闭流式会话""" + if stream_id in self.active_streams: + session = self.active_streams[stream_id] + await session.close() + del self.active_streams[stream_id] + logger.info(f"关闭流式会话: {stream_id}") + + async def process_stream_chunk(self, stream_id: str, chunk_data: bytes) -> Dict[str, Any]: + """处理流式数据块""" + session = await self.get_stream_session(stream_id) + if not session: + raise ValueError(f"流式会话不存在: {stream_id}") + + return await session.process_chunk(chunk_data) + + async def finalize_stream(self, stream_id: str) -> Dict[str, Any]: + """完成流式处理""" + session = await self.get_stream_session(stream_id) + if not session: + raise ValueError(f"流式会话不存在: {stream_id}") + + result = await session.finalize() + await self.close_stream_session(stream_id) + return result + + +class StreamSession: + """流式会话""" + + def __init__(self, session_id: str, message_type: str, websocket_manager=None): + self.session_id = session_id + self.message_type = message_type + self.websocket_manager = websocket_manager + + self.chunks: List[bytes] = [] + self.chunk_count = 0 + self.total_size = 0 + self.start_time = datetime.now() + + self.parsed_chunks: List[Dict] = [] + self.complete_message: Optional[Dict] = None + + async def process_chunk(self, chunk_data: bytes) -> Dict[str, Any]: + """处理单个数据块""" + self.chunk_count += 1 + self.total_size += len(chunk_data) + self.chunks.append(chunk_data) + + logger.debug(f"流式会话 {self.session_id}: 处理数据块 {self.chunk_count}, 大小 {len(chunk_data)} 字节") + + chunk_result = { + "chunk_index": self.chunk_count - 1, + "size": len(chunk_data), + "timestamp": datetime.now().isoformat() + } + + try: + chunk_json = protobuf_to_dict(chunk_data, self.message_type) + chunk_result["json_data"] = chunk_json + chunk_result["parsed_successfully"] = True + + self.parsed_chunks.append(chunk_json) + + if self.websocket_manager: + await self.websocket_manager.broadcast({ + "event": "stream_chunk_parsed", + "stream_id": self.session_id, + "chunk": chunk_result + }) + + except Exception as e: + chunk_result["error"] = str(e) + chunk_result["parsed_successfully"] = False + logger.warning(f"数据块解析失败: {e}") + + if self.websocket_manager: + await self.websocket_manager.broadcast({ + "event": "stream_chunk_error", + "stream_id": self.session_id, + "chunk": chunk_result + }) + + return chunk_result + + async def finalize(self) -> Dict[str, Any]: + """完成流式处理,尝试拼接完整消息""" + duration = (datetime.now() - self.start_time).total_seconds() + + logger.info(f"流式会话 {self.session_id} 完成: {self.chunk_count} 块, 总大小 {self.total_size} 字节, 耗时 {duration:.2f}s") + + result = { + "session_id": self.session_id, + "chunk_count": self.chunk_count, + "total_size": self.total_size, + "duration_seconds": duration, + "chunks": [] + } + + for i, chunk in enumerate(self.chunks): + chunk_info = { + "index": i, + "size": len(chunk), + "hex_preview": chunk[:32].hex() if len(chunk) >= 32 else chunk.hex() + } + + if i < len(self.parsed_chunks): + chunk_info["parsed_data"] = self.parsed_chunks[i] + + result["chunks"].append(chunk_info) + + try: + complete_data = b''.join(self.chunks) + complete_json = protobuf_to_dict(complete_data, self.message_type) + + result["complete_message"] = { + "size": len(complete_data), + "json_data": complete_json, + "assembly_successful": True + } + + self.complete_message = complete_json + + logger.info(f"流式消息拼接成功: {len(complete_data)} 字节") + + except Exception as e: + result["complete_message"] = { + "error": str(e), + "assembly_successful": False + } + logger.warning(f"流式消息拼接失败: {e}") + + if self.websocket_manager: + await self.websocket_manager.broadcast({ + "event": "stream_completed", + "stream_id": self.session_id, + "result": result + }) + + return result + + async def close(self): + """关闭会话""" + self.chunks.clear() + self.parsed_chunks.clear() + self.complete_message = None + + logger.debug(f"流式会话 {self.session_id} 已关闭") + + +class StreamPacketAnalyzer: + """流式数据包分析器""" + + @staticmethod + def analyze_chunk_patterns(chunks: List[bytes]) -> Dict[str, Any]: + if not chunks: + return {"error": "无数据块"} + + analysis = { + "total_chunks": len(chunks), + "size_distribution": {}, + "size_stats": {}, + "pattern_analysis": {} + } + + sizes = [len(chunk) for chunk in chunks] + analysis["size_stats"] = { + "min": min(sizes), + "max": max(sizes), + "avg": sum(sizes) / len(sizes), + "total": sum(sizes) + } + + size_ranges = [(0, 100), (100, 500), (500, 1000), (1000, 5000), (5000, float('inf'))] + for start, end in size_ranges: + range_name = f"{start}-{end if end != float('inf') else '∞'}" + count = sum(1 for size in sizes if start <= size < end) + analysis["size_distribution"][range_name] = count + + if len(chunks) >= 2: + first_bytes = [chunk[:4].hex() if len(chunk) >= 4 else chunk.hex() for chunk in chunks[:5]] + analysis["pattern_analysis"]["first_bytes_samples"] = first_bytes + + if chunks: + common_prefix_len = 0 + first_chunk = chunks[0] + for i in range(min(len(first_chunk), 10)): + if all(len(chunk) > i and chunk[i] == first_chunk[i] for chunk in chunks[1:]): + common_prefix_len = i + 1 + else: + break + + if common_prefix_len > 0: + analysis["pattern_analysis"]["common_prefix_length"] = common_prefix_len + analysis["pattern_analysis"]["common_prefix_hex"] = first_chunk[:common_prefix_len].hex() + + return analysis + + @staticmethod + def extract_streaming_deltas(parsed_chunks: List[Dict]) -> List[Dict]: + if not parsed_chunks: + return [] + + deltas = [] + previous_content = "" + + for i, chunk in enumerate(parsed_chunks): + delta = { + "chunk_index": i, + "timestamp": datetime.now().isoformat() + } + + current_content = StreamPacketAnalyzer._extract_text_content(chunk) + + if current_content and current_content != previous_content: + if previous_content and current_content.startswith(previous_content): + delta["content_delta"] = current_content[len(previous_content):] + delta["delta_type"] = "append" + else: + delta["content_delta"] = current_content + delta["delta_type"] = "replace" + + delta["total_content_length"] = len(current_content) + previous_content = current_content + else: + delta["content_delta"] = "" + delta["delta_type"] = "no_change" + + if i > 0: + delta["field_changes"] = StreamPacketAnalyzer._compare_dicts(parsed_chunks[i-1], chunk) + + deltas.append(delta) + + return deltas + + @staticmethod + def _extract_text_content(data: Dict) -> str: + text_paths = [ + ["content"], + ["text"], + ["message"], + ["agent_output", "text"], + ["choices", 0, "delta", "content"], + ["choices", 0, "message", "content"] + ] + + for path in text_paths: + try: + current = data + for key in path: + if isinstance(current, dict) and key in current: + current = current[key] + elif isinstance(current, list) and isinstance(key, int) and 0 <= key < len(current): + current = current[key] + else: + break + else: + if isinstance(current, str): + return current + except Exception: + continue + + return "" + + @staticmethod + def _compare_dicts(dict1: Dict, dict2: Dict, prefix: str = "") -> List[str]: + changes = [] + + all_keys = set(dict1.keys()) | set(dict2.keys()) + + for key in all_keys: + current_path = f"{prefix}.{key}" if prefix else key + + if key not in dict1: + changes.append(f"添加: {current_path}") + elif key not in dict2: + changes.append(f"删除: {current_path}") + elif dict1[key] != dict2[key]: + if isinstance(dict1[key], dict) and isinstance(dict2[key], dict): + changes.extend(StreamPacketAnalyzer._compare_dicts(dict1[key], dict2[key], current_path)) + else: + changes.append(f"修改: {current_path}") + + return changes[:10] + + +_global_processor: Optional[StreamProcessor] = None + +def get_stream_processor() -> StreamProcessor: + global _global_processor + if _global_processor is None: + _global_processor = StreamProcessor() + return _global_processor + + +def set_websocket_manager(manager): + processor = get_stream_processor() + processor.websocket_manager = manager \ No newline at end of file diff --git a/warp2protobuf/warp/__init__.py b/warp2protobuf/warp/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..249f56b87998e5854f25f9c5424a478f549cc599 --- /dev/null +++ b/warp2protobuf/warp/__init__.py @@ -0,0 +1,2 @@ +# Subpackage for Warp API client integrations +__all__ = [] \ No newline at end of file diff --git a/warp2protobuf/warp/__pycache__/__init__.cpython-312.pyc b/warp2protobuf/warp/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6f069b3eef2d8b0ad1ae23207cf731af9c89b745 Binary files /dev/null and b/warp2protobuf/warp/__pycache__/__init__.cpython-312.pyc differ diff --git a/warp2protobuf/warp/__pycache__/__init__.cpython-313.pyc b/warp2protobuf/warp/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e86183926d4be731b295c6727b141b3b011708c4 Binary files /dev/null and b/warp2protobuf/warp/__pycache__/__init__.cpython-313.pyc differ diff --git a/warp2protobuf/warp/__pycache__/api_client.cpython-312.pyc b/warp2protobuf/warp/__pycache__/api_client.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a4038d61ec80165b81e59304522939ada6e96ba Binary files /dev/null and b/warp2protobuf/warp/__pycache__/api_client.cpython-312.pyc differ diff --git a/warp2protobuf/warp/__pycache__/api_client.cpython-313.pyc b/warp2protobuf/warp/__pycache__/api_client.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c3797b890437ea2d231c2cf3ee0e21b02b236ff8 Binary files /dev/null and b/warp2protobuf/warp/__pycache__/api_client.cpython-313.pyc differ diff --git a/warp2protobuf/warp/api_client.py b/warp2protobuf/warp/api_client.py new file mode 100644 index 0000000000000000000000000000000000000000..fbebf69785f317cab993f40a1ad88778b70d0a31 --- /dev/null +++ b/warp2protobuf/warp/api_client.py @@ -0,0 +1,474 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Warp API客户端模块 + +处理与Warp API的通信,包括protobuf数据发送和SSE响应解析。 +""" +import httpx +import os +import base64 +import binascii +from typing import Optional, Any, Dict +from urllib.parse import urlparse +import socket + +from ..core.logging import logger +from ..core.protobuf_utils import protobuf_to_dict +from ..core.auth import get_valid_jwt, acquire_anonymous_access_token, refresh_jwt_if_needed +from ..config.settings import WARP_URL as CONFIG_WARP_URL + + +def _get(d: Dict[str, Any], *names: str) -> Any: + """Return the first matching key value (camelCase/snake_case tolerant).""" + for name in names: + if name in d: + return d[name] + return None + + +def _get_event_type(event_data: dict) -> str: + """Determine the type of SSE event for logging""" + if "init" in event_data: + return "INITIALIZATION" + client_actions = _get(event_data, "client_actions", "clientActions") + if isinstance(client_actions, dict): + actions = _get(client_actions, "actions", "Actions") or [] + if not actions: + return "CLIENT_ACTIONS_EMPTY" + + action_types = [] + for action in actions: + if _get(action, "create_task", "createTask") is not None: + action_types.append("CREATE_TASK") + elif _get(action, "append_to_message_content", "appendToMessageContent") is not None: + action_types.append("APPEND_CONTENT") + elif _get(action, "add_messages_to_task", "addMessagesToTask") is not None: + action_types.append("ADD_MESSAGE") + elif _get(action, "tool_call", "toolCall") is not None: + action_types.append("TOOL_CALL") + elif _get(action, "tool_response", "toolResponse") is not None: + action_types.append("TOOL_RESPONSE") + else: + action_types.append("UNKNOWN_ACTION") + + return f"CLIENT_ACTIONS({', '.join(action_types)})" + elif "finished" in event_data: + return "FINISHED" + else: + return "UNKNOWN_EVENT" + + +async def send_protobuf_to_warp_api( + protobuf_bytes: bytes, show_all_events: bool = True +) -> tuple[str, Optional[str], Optional[str]]: + """发送protobuf数据到Warp API并获取响应""" + try: + logger.info(f"发送 {len(protobuf_bytes)} 字节到Warp API") + logger.info(f"数据包前32字节 (hex): {protobuf_bytes[:32].hex()}") + + warp_url = CONFIG_WARP_URL + + logger.info(f"发送请求到: {warp_url}") + + conversation_id = None + task_id = None + complete_response = [] + all_events = [] + event_count = 0 + + verify_opt = True + insecure_env = os.getenv("WARP_INSECURE_TLS", "").lower() + if insecure_env in ("1", "true", "yes"): + verify_opt = False + logger.warning("TLS verification disabled via WARP_INSECURE_TLS for Warp API client") + + async with httpx.AsyncClient(http2=True, timeout=httpx.Timeout(60.0), verify=verify_opt, trust_env=True) as client: + # 最多尝试两次:第一次失败且为配额429时申请匿名token并重试一次 + for attempt in range(2): + jwt = await get_valid_jwt() if attempt == 0 else jwt # keep existing unless refreshed explicitly + headers = { + "accept": "text/event-stream", + "content-type": "application/x-protobuf", + "x-warp-client-version": "v0.2025.08.06.08.12.stable_02", + "x-warp-os-category": "Windows", + "x-warp-os-name": "Windows", + "x-warp-os-version": "11 (26100)", + "authorization": f"Bearer {jwt}", + "content-length": str(len(protobuf_bytes)), + } + async with client.stream("POST", warp_url, headers=headers, content=protobuf_bytes) as response: + if response.status_code != 200: + error_text = await response.aread() + error_content = error_text.decode('utf-8') if error_text else "No error content" + + # 检测JWT token无效错误并在第一次失败时尝试刷新token + if response.status_code == 401 and attempt == 0: + logger.warning("WARP API 返回 401 (token无效)。尝试刷新JWT token并重试一次…") + try: + refresh_success = await refresh_jwt_if_needed() + if refresh_success: + jwt = await get_valid_jwt() + logger.info("JWT token刷新成功,重试API调用") + continue + else: + logger.warning("JWT token刷新失败,尝试申请匿名token") + new_jwt = await acquire_anonymous_access_token() + if new_jwt: + jwt = new_jwt + continue + except Exception as e: + logger.warning(f"JWT token刷新异常: {e}") + logger.error(f"WARP API HTTP ERROR {response.status_code}: {error_content}") + return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None + + # 检测配额耗尽错误并在第一次失败时尝试获取新token + if response.status_code == 429 and attempt == 0 and ( + ("No remaining quota" in error_content) or ("No AI requests remaining" in error_content) + ): + logger.warning("WARP API 返回 429 (配额用尽)。删除当前token并重新获取…") + # 删除当前token并重新获取 + from ..core.auth import remove_token_from_pool + remove_token_from_pool(jwt) + try: + new_jwt = await get_valid_jwt() + except Exception: + new_jwt = None + if new_jwt and new_jwt != jwt: + jwt = new_jwt + # 跳出当前响应并进行下一次尝试 + continue + else: + logger.error("无法获取新的有效token,重试失败。") + logger.error(f"WARP API HTTP ERROR {response.status_code}: {error_content}") + return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None + # 其他错误或第二次失败 + logger.error(f"WARP API HTTP ERROR {response.status_code}: {error_content}") + return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None + + logger.info(f"✅ 收到HTTP {response.status_code}响应") + logger.info("开始处理SSE事件流...") + + import re as _re + def _parse_payload_bytes(data_str: str): + s = _re.sub(r"\s+", "", data_str or "") + if not s: + return None + if _re.fullmatch(r"[0-9a-fA-F]+", s or ""): + try: + return bytes.fromhex(s) + except Exception: + pass + pad = "=" * ((4 - (len(s) % 4)) % 4) + try: + import base64 as _b64 + return _b64.urlsafe_b64decode(s + pad) + except Exception: + try: + return _b64.b64decode(s + pad) + except Exception: + return None + + current_data = "" + + async for line in response.aiter_lines(): + if line.startswith("data:"): + payload = line[5:].strip() + if not payload: + continue + if payload == "[DONE]": + logger.info("收到[DONE]标记,结束处理") + break + current_data += payload + continue + + if (line.strip() == "") and current_data: + raw_bytes = _parse_payload_bytes(current_data) + current_data = "" + if raw_bytes is None: + logger.debug("跳过无法解析的SSE数据块(非hex/base64或不完整)") + continue + try: + event_data = protobuf_to_dict(raw_bytes, "warp.multi_agent.v1.ResponseEvent") + except Exception as parse_error: + logger.debug(f"解析事件失败,跳过: {str(parse_error)[:100]}") + continue + event_count += 1 + + def _get(d: Dict[str, Any], *names: str) -> Any: + for n in names: + if isinstance(d, dict) and n in d: + return d[n] + return None + + event_type = _get_event_type(event_data) + if show_all_events: + all_events.append({"event_number": event_count, "event_type": event_type, "raw_data": event_data}) + logger.info(f"🔄 Event #{event_count}: {event_type}") + if show_all_events: + logger.info(f" 📋 Event data: {str(event_data)}...") + + if "init" in event_data: + init_data = event_data["init"] + conversation_id = init_data.get("conversation_id", conversation_id) + task_id = init_data.get("task_id", task_id) + logger.info(f"会话初始化: {conversation_id}") + client_actions = _get(event_data, "client_actions", "clientActions") + if isinstance(client_actions, dict): + actions = _get(client_actions, "actions", "Actions") or [] + for i, action in enumerate(actions): + logger.info(f" 🎯 Action #{i+1}: {list(action.keys())}") + append_data = _get(action, "append_to_message_content", "appendToMessageContent") + if isinstance(append_data, dict): + message = append_data.get("message", {}) + agent_output = _get(message, "agent_output", "agentOutput") or {} + text_content = agent_output.get("text", "") + if text_content: + complete_response.append(text_content) + logger.info(f" 📝 Text Fragment: {text_content[:100]}...") + messages_data = _get(action, "add_messages_to_task", "addMessagesToTask") + if isinstance(messages_data, dict): + messages = messages_data.get("messages", []) + task_id = messages_data.get("task_id", messages_data.get("taskId", task_id)) + for j, message in enumerate(messages): + logger.info(f" 📨 Message #{j+1}: {list(message.keys())}") + if _get(message, "agent_output", "agentOutput") is not None: + agent_output = _get(message, "agent_output", "agentOutput") or {} + text_content = agent_output.get("text", "") + if text_content: + complete_response.append(text_content) + logger.info(f" 📝 Complete Message: {text_content[:100]}...") + + full_response = "".join(complete_response) + logger.info("="*60) + logger.info("📊 SSE STREAM SUMMARY") + logger.info("="*60) + logger.info(f"📈 Total Events Processed: {event_count}") + logger.info(f"🆔 Conversation ID: {conversation_id}") + logger.info(f"🆔 Task ID: {task_id}") + logger.info(f"📝 Response Length: {len(full_response)} characters") + logger.info("="*60) + if full_response: + logger.info(f"✅ Stream processing completed successfully") + return full_response, conversation_id, task_id + else: + logger.warning("⚠️ No text content received in response") + return "Warning: No response content received", conversation_id, task_id + except Exception as e: + import traceback + logger.error("="*60) + logger.error("WARP API CLIENT EXCEPTION") + logger.error("="*60) + logger.error(f"Exception Type: {type(e).__name__}") + logger.error(f"Exception Message: {str(e)}") + logger.error(f"Request URL: {warp_url if 'warp_url' in locals() else 'Unknown'}") + logger.error(f"Request Size: {len(protobuf_bytes) if 'protobuf_bytes' in locals() else 'Unknown'}") + logger.error("Python Traceback:") + logger.error(traceback.format_exc()) + logger.error("="*60) + raise + + +async def send_protobuf_to_warp_api_parsed(protobuf_bytes: bytes) -> tuple[str, Optional[str], Optional[str], list]: + """发送protobuf数据到Warp API并获取解析后的SSE事件数据""" + try: + logger.info(f"发送 {len(protobuf_bytes)} 字节到Warp API (解析模式)") + logger.info(f"数据包前32字节 (hex): {protobuf_bytes[:32].hex()}") + + warp_url = CONFIG_WARP_URL + + logger.info(f"发送请求到: {warp_url}") + + conversation_id = None + task_id = None + complete_response = [] + parsed_events = [] + event_count = 0 + + verify_opt = True + insecure_env = os.getenv("WARP_INSECURE_TLS", "").lower() + if insecure_env in ("1", "true", "yes"): + verify_opt = False + logger.warning("TLS verification disabled via WARP_INSECURE_TLS for Warp API client") + + async with httpx.AsyncClient(http2=True, timeout=httpx.Timeout(60.0), verify=verify_opt, trust_env=True) as client: + # 最多尝试两次:第一次失败且为配额429时申请匿名token并重试一次 + for attempt in range(2): + jwt = await get_valid_jwt() if attempt == 0 else jwt # keep existing unless refreshed explicitly + headers = { + "accept": "text/event-stream", + "content-type": "application/x-protobuf", + "x-warp-client-version": "v0.2025.08.06.08.12.stable_02", + "x-warp-os-category": "Windows", + "x-warp-os-name": "Windows", + "x-warp-os-version": "11 (26100)", + "authorization": f"Bearer {jwt}", + "content-length": str(len(protobuf_bytes)), + } + async with client.stream("POST", warp_url, headers=headers, content=protobuf_bytes) as response: + if response.status_code != 200: + error_text = await response.aread() + error_content = error_text.decode('utf-8') if error_text else "No error content" + + # 检测JWT token无效错误并在第一次失败时尝试刷新token + if response.status_code == 401 and attempt == 0: + logger.warning("WARP API 返回 401 (token无效, 解析模式)。尝试刷新JWT token并重试一次…") + try: + refresh_success = await refresh_jwt_if_needed() + if refresh_success: + jwt = await get_valid_jwt() + logger.info("JWT token刷新成功,重试API调用 (解析模式)") + continue + else: + logger.warning("JWT token刷新失败,尝试申请匿名token (解析模式)") + new_jwt = await acquire_anonymous_access_token() + if new_jwt: + jwt = new_jwt + continue + except Exception as e: + logger.warning(f"JWT token刷新异常 (解析模式): {e}") + logger.error(f"WARP API HTTP ERROR (解析模式) {response.status_code}: {error_content}") + return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None, [] + + # 检测配额耗尽错误并在第一次失败时尝试获取新token + if response.status_code == 429 and attempt == 0 and ( + ("No remaining quota" in error_content) or ("No AI requests remaining" in error_content) + ): + logger.warning("WARP API 返回 429 (配额用尽, 解析模式)。删除当前token并重新获取…") + # 删除当前token并重新获取 + from ..core.auth import remove_token_from_pool + remove_token_from_pool(jwt) + try: + new_jwt = await get_valid_jwt() + except Exception: + new_jwt = None + if new_jwt and new_jwt != jwt: + jwt = new_jwt + # 跳出当前响应并进行下一次尝试 + continue + else: + logger.error("无法获取新的有效token,重试失败 (解析模式)。") + logger.error(f"WARP API HTTP ERROR (解析模式) {response.status_code}: {error_content}") + return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None, [] + # 其他错误或第二次失败 + logger.error(f"WARP API HTTP ERROR (解析模式) {response.status_code}: {error_content}") + return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None, [] + + logger.info(f"✅ 收到HTTP {response.status_code}响应 (解析模式)") + logger.info("开始处理SSE事件流...") + + import re as _re2 + def _parse_payload_bytes2(data_str: str): + s = _re2.sub(r"\s+", "", data_str or "") + if not s: + return None + if _re2.fullmatch(r"[0-9a-fA-F]+", s or ""): + try: + return bytes.fromhex(s) + except Exception: + pass + pad = "=" * ((4 - (len(s) % 4)) % 4) + try: + import base64 as _b642 + return _b642.urlsafe_b64decode(s + pad) + except Exception: + try: + return _b642.b64decode(s + pad) + except Exception: + return None + + current_data = "" + + async for line in response.aiter_lines(): + if line.startswith("data:"): + payload = line[5:].strip() + if not payload: + continue + if payload == "[DONE]": + logger.info("收到[DONE]标记,结束处理") + break + current_data += payload + continue + + if (line.strip() == "") and current_data: + raw_bytes = _parse_payload_bytes2(current_data) + current_data = "" + if raw_bytes is None: + logger.debug("跳过无法解析的SSE数据块(非hex/base64或不完整)") + continue + try: + event_data = protobuf_to_dict(raw_bytes, "warp.multi_agent.v1.ResponseEvent") + event_count += 1 + event_type = _get_event_type(event_data) + parsed_event = {"event_number": event_count, "event_type": event_type, "parsed_data": event_data} + parsed_events.append(parsed_event) + logger.info(f"🔄 Event #{event_count}: {event_type}") + logger.debug(f" 📋 Event data: {str(event_data)}...") + + def _get(d: Dict[str, Any], *names: str) -> Any: + for n in names: + if isinstance(d, dict) and n in d: + return d[n] + return None + + if "init" in event_data: + init_data = event_data["init"] + conversation_id = init_data.get("conversation_id", conversation_id) + task_id = init_data.get("task_id", task_id) + logger.info(f"会话初始化: {conversation_id}") + + client_actions = _get(event_data, "client_actions", "clientActions") + if isinstance(client_actions, dict): + actions = _get(client_actions, "actions", "Actions") or [] + for i, action in enumerate(actions): + logger.info(f" 🎯 Action #{i+1}: {list(action.keys())}") + append_data = _get(action, "append_to_message_content", "appendToMessageContent") + if isinstance(append_data, dict): + message = append_data.get("message", {}) + agent_output = _get(message, "agent_output", "agentOutput") or {} + text_content = agent_output.get("text", "") + if text_content: + complete_response.append(text_content) + logger.info(f" 📝 Text Fragment: {text_content[:100]}...") + messages_data = _get(action, "add_messages_to_task", "addMessagesToTask") + if isinstance(messages_data, dict): + messages = messages_data.get("messages", []) + task_id = messages_data.get("task_id", messages_data.get("taskId", task_id)) + for j, message in enumerate(messages): + logger.info(f" 📨 Message #{j+1}: {list(message.keys())}") + if _get(message, "agent_output", "agentOutput") is not None: + agent_output = _get(message, "agent_output", "agentOutput") or {} + text_content = agent_output.get("text", "") + if text_content: + complete_response.append(text_content) + logger.info(f" 📝 Complete Message: {text_content[:100]}...") + except Exception as parse_err: + logger.debug(f"解析事件失败,跳过: {str(parse_err)[:100]}") + continue + + full_response = "".join(complete_response) + logger.info("="*60) + logger.info("📊 SSE STREAM SUMMARY (解析模式)") + logger.info("="*60) + logger.info(f"📈 Total Events Processed: {event_count}") + logger.info(f"🆔 Conversation ID: {conversation_id}") + logger.info(f"🆔 Task ID: {task_id}") + logger.info(f"📝 Response Length: {len(full_response)} characters") + logger.info(f"🎯 Parsed Events Count: {len(parsed_events)}") + logger.info("="*60) + + logger.info(f"✅ Stream processing completed successfully (解析模式)") + return full_response, conversation_id, task_id, parsed_events + except Exception as e: + import traceback + logger.error("="*60) + logger.error("WARP API CLIENT EXCEPTION (解析模式)") + logger.error("="*60) + logger.error(f"Exception Type: {type(e).__name__}") + logger.error(f"Exception Message: {str(e)}") + logger.error(f"Request URL: {warp_url if 'warp_url' in locals() else 'Unknown'}") + logger.error(f"Request Size: {len(protobuf_bytes) if 'protobuf_bytes' in locals() else 'Unknown'}") + logger.error("Python Traceback:") + logger.error(traceback.format_exc()) + logger.error("="*60) + raise \ No newline at end of file diff --git a/warp2protobuf/warp/response.py b/warp2protobuf/warp/response.py new file mode 100644 index 0000000000000000000000000000000000000000..8edd0d7e43d3b737348e217078207a1949c07161 --- /dev/null +++ b/warp2protobuf/warp/response.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Warp API response parsing + +Handles parsing of protobuf responses and extraction of OpenAI-compatible content. +""" +from typing import Optional, Dict, List, Any + +from ..core.logging import logger +from ..core.protobuf import ensure_proto_runtime, msg_cls + + +def extract_openai_content_from_response(payload: bytes) -> dict: + """ + Extract OpenAI-compatible content from Warp API response payload. + """ + if not payload: + logger.debug("extract_openai_content_from_response: payload is empty") + return {"content": None, "tool_calls": [], "finish_reason": None, "metadata": {}} + + logger.debug(f"extract_openai_content_from_response: processing payload of {len(payload)} bytes") + + hex_dump = payload.hex() + logger.debug(f"extract_openai_content_from_response: complete payload hex: {hex_dump}") + + try: + ensure_proto_runtime() + ResponseEvent = msg_cls("warp.multi_agent.v1.ResponseEvent") + response = ResponseEvent() + response.ParseFromString(payload) + + result = {"content": "", "tool_calls": [], "finish_reason": None, "metadata": {}} + + if response.HasField("client_actions"): + for i, action in enumerate(response.client_actions.actions): + if action.HasField("append_to_message_content"): + message = action.append_to_message_content.message + if message.HasField("agent_output"): + agent_output = message.agent_output + if agent_output.text: + result["content"] += agent_output.text + if agent_output.reasoning: + if "reasoning" not in result: + result["reasoning"] = "" + result["reasoning"] += agent_output.reasoning + if message.HasField("tool_call"): + tool_call = message.tool_call + openai_tool_call = { + "id": getattr(tool_call, 'id', f"call_{i}"), + "type": "function", + "function": { + "name": getattr(tool_call, 'name', getattr(tool_call, 'function_name', 'unknown')), + "arguments": getattr(tool_call, 'arguments', getattr(tool_call, 'parameters', '{}')) + } + } + result["tool_calls"].append(openai_tool_call) + elif action.HasField("add_messages_to_task"): + for j, msg in enumerate(action.add_messages_to_task.messages): + if msg.HasField("agent_output") and msg.agent_output.text: + result["content"] += msg.agent_output.text + if msg.HasField("tool_call"): + tool_call = msg.tool_call + tool_name = "unknown" + tool_args = "{}" + tool_call_id = getattr(tool_call, 'tool_call_id', f"call_{i}_{j}") + for field, value in tool_call.ListFields(): + if field.name == 'tool_call_id': + continue + tool_name = field.name + if hasattr(value, 'ListFields'): + tool_fields_dict = {} + for tool_field, tool_value in value.ListFields(): + if isinstance(tool_value, str): + tool_fields_dict[tool_field.name] = tool_value + elif hasattr(tool_value, '__len__') and not isinstance(tool_value, str): + tool_fields_dict[tool_field.name] = list(tool_value) + else: + tool_fields_dict[tool_field.name] = str(tool_value) + if tool_fields_dict: + import json + tool_args = json.dumps(tool_fields_dict) + break + openai_tool_call = { + "id": tool_call_id, + "type": "function", + "function": {"name": tool_name, "arguments": tool_args} + } + result["tool_calls"].append(openai_tool_call) + elif action.HasField("update_task_message"): + umsg = action.update_task_message.message + if umsg.HasField("agent_output") and umsg.agent_output.text: + result["content"] += umsg.agent_output.text + elif action.HasField("create_task"): + task = action.create_task.task + for j, msg in enumerate(task.messages): + if msg.HasField("agent_output") and msg.agent_output.text: + result["content"] += msg.agent_output.text + elif action.HasField("update_task_summary"): + summary = action.update_task_summary.summary + if summary: + result["content"] += summary + if response.HasField("finished"): + result["finish_reason"] = "stop" + result["metadata"] = { + "response_fields": [field.name for field, _ in response.ListFields()], + "has_client_actions": response.HasField("client_actions"), + "payload_size": len(payload) + } + return result + except Exception as e: + logger.error(f"extract_openai_content_from_response: exception occurred: {e}") + import traceback + logger.error(f"extract_openai_content_from_response: traceback: {traceback.format_exc()}") + return {"content": None, "tool_calls": [], "finish_reason": "error", "metadata": {"error": str(e)}} + + +def extract_text_from_response(payload: bytes) -> Optional[str]: + result = extract_openai_content_from_response(payload) + return result["content"] if result["content"] else None + + +def extract_openai_sse_deltas_from_response(payload: bytes) -> List[Dict[str, Any]]: + if not payload: + return [] + try: + ensure_proto_runtime() + ResponseEvent = msg_cls("warp.multi_agent.v1.ResponseEvent") + response = ResponseEvent() + response.ParseFromString(payload) + deltas = [] + if response.HasField("client_actions"): + for i, action in enumerate(response.client_actions.actions): + if action.HasField("append_to_message_content"): + message = action.append_to_message_content.message + if message.HasField("agent_output"): + agent_output = message.agent_output + if agent_output.text: + deltas.append({"choices": [{"index": 0, "delta": {"content": agent_output.text}, "finish_reason": None}]}) + if agent_output.reasoning: + deltas.append({"choices": [{"index": 0, "delta": {"reasoning": agent_output.reasoning}, "finish_reason": None}]}) + if message.HasField("tool_call"): + tool_call = message.tool_call + deltas.append({"choices": [{"index": 0, "delta": {"role": "assistant"}, "finish_reason": None}]}) + openai_tool_call = { + "id": getattr(tool_call, 'tool_call_id', f"call_{i}"), + "type": "function", + "function": { + "name": getattr(tool_call, 'name', 'unknown'), + "arguments": getattr(tool_call, 'arguments', '{}') + } + } + deltas.append({"choices": [{"index": 0, "delta": {"tool_calls": [openai_tool_call]}, "finish_reason": None}]}) + elif action.HasField("add_messages_to_task"): + for j, msg in enumerate(action.add_messages_to_task.messages): + if msg.HasField("agent_output") and msg.agent_output.text: + deltas.append({"choices": [{"index": 0, "delta": {"content": msg.agent_output.text}, "finish_reason": None}]}) + if msg.HasField("tool_call"): + tool_call = msg.tool_call + if j == 0: + deltas.append({"choices": [{"index": 0, "delta": {"role": "assistant"}, "finish_reason": None}]}) + tool_call_id = getattr(tool_call, 'tool_call_id', f"call_{i}_{j}") + tool_name = "unknown" + tool_args = "{}" + for field, value in tool_call.ListFields(): + if field.name == 'tool_call_id': + continue + tool_name = field.name + if hasattr(value, 'ListFields'): + tool_fields_dict = {} + for tool_field, tool_value in value.ListFields(): + if isinstance(tool_value, str): + tool_fields_dict[tool_field.name] = tool_value + elif hasattr(tool_value, '__len__') and not isinstance(tool_value, str): + tool_fields_dict[tool_field.name] = list(tool_value) + else: + tool_fields_dict[tool_field.name] = str(tool_value) + if tool_fields_dict: + import json + tool_args = json.dumps(tool_fields_dict) + break + openai_tool_call = {"id": tool_call_id, "type": "function", "function": {"name": tool_name, "arguments": tool_args}} + deltas.append({"choices": [{"index": 0, "delta": {"tool_calls": [openai_tool_call]}, "finish_reason": None}]}) + elif action.HasField("update_task_message"): + umsg = action.update_task_message.message + if umsg.HasField("agent_output") and umsg.agent_output.text: + deltas.append({"choices": [{"index": 0, "delta": {"content": umsg.agent_output.text}, "finish_reason": None}]}) + elif action.HasField("create_task"): + task = action.create_task.task + for j, msg in enumerate(task.messages): + if msg.HasField("agent_output") and msg.agent_output.text: + deltas.append({"choices": [{"index": 0, "delta": {"content": msg.agent_output.text}, "finish_reason": None}]}) + elif action.HasField("update_task_summary"): + summary = action.update_task_summary.summary + if summary: + deltas.append({"choices": [{"index": 0, "delta": {"content": summary}, "finish_reason": None}]}) + if response.HasField("finished"): + deltas.append({"choices": [{"index": 0, "delta": {}, "finish_reason": "stop"}]}) + return deltas + except Exception as e: + logger.error(f"extract_openai_sse_deltas_from_response: exception occurred: {e}") + import traceback + logger.error(f"extract_openai_sse_deltas_from_response: traceback: {traceback.format_exc()}") + return [] \ No newline at end of file