Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/.hash/_cparser.pxd.hash +1 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/.hash/_find_header.pxd.hash +1 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/.hash/_http_writer.pyx.hash +1 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/.hash/hdrs.py.hash +1 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/__init__.py +264 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_cparser.pxd +158 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_find_header.pxd +2 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_headers.pxi +83 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_websocket/reader_c.py +468 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/abc.py +253 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/client_proto.py +307 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/client_reqrep.py +1315 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/client_ws.py +426 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/compression_utils.py +173 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/formdata.py +182 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/hdrs.py +121 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/helpers.py +944 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/http.py +72 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/http_exceptions.py +112 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/http_parser.py +1046 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/http_writer.py +234 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/log.py +8 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/payload.py +519 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/py.typed +1 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/pytest_plugin.py +436 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/resolver.py +187 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/streams.py +726 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/tcp_helpers.py +37 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/test_utils.py +770 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/typedefs.py +69 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web.py +605 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_fileresponse.py +418 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_protocol.py +746 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_routedef.py +214 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_runner.py +399 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_server.py +84 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/worker.py +252 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/LICENSE +201 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/METADATA +123 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/RECORD +10 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/WHEEL +6 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/top_level.txt +1 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__init__.py +104 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/_cmp.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/_compat.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/_config.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/_funcs.cpython-311.pyc +0 -0
.gitattributes
CHANGED
|
@@ -161,3 +161,4 @@ tuning-competition-baseline/.venv/lib/python3.11/site-packages/torch/_inductor/_
|
|
| 161 |
.venv/lib/python3.11/site-packages/ray/core/libjemalloc.so filter=lfs diff=lfs merge=lfs -text
|
| 162 |
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/yarl/_quoting_c.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 163 |
.venv/lib/python3.11/site-packages/ray/dag/__pycache__/compiled_dag_node.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 161 |
.venv/lib/python3.11/site-packages/ray/core/libjemalloc.so filter=lfs diff=lfs merge=lfs -text
|
| 162 |
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/yarl/_quoting_c.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 163 |
.venv/lib/python3.11/site-packages/ray/dag/__pycache__/compiled_dag_node.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
| 164 |
+
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict/_multidict.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/.hash/_cparser.pxd.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
f2318883e549f69de597009a914603b0f1b10381e265ef5d98af499ad973fb98 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/.hash/_find_header.pxd.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/.hash/_http_writer.pyx.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
7e209c93f1158118935fb56d028576025763b9eb093053debf84d677d171f23a /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/.hash/hdrs.py.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
dab8f933203eeb245d60f856e542a45b888d5a110094620e4811f90f816628d1 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/__init__.py
ADDED
|
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__version__ = "3.11.11"
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING, Tuple
|
| 4 |
+
|
| 5 |
+
from . import hdrs as hdrs
|
| 6 |
+
from .client import (
|
| 7 |
+
BaseConnector,
|
| 8 |
+
ClientConnectionError,
|
| 9 |
+
ClientConnectionResetError,
|
| 10 |
+
ClientConnectorCertificateError,
|
| 11 |
+
ClientConnectorDNSError,
|
| 12 |
+
ClientConnectorError,
|
| 13 |
+
ClientConnectorSSLError,
|
| 14 |
+
ClientError,
|
| 15 |
+
ClientHttpProxyError,
|
| 16 |
+
ClientOSError,
|
| 17 |
+
ClientPayloadError,
|
| 18 |
+
ClientProxyConnectionError,
|
| 19 |
+
ClientRequest,
|
| 20 |
+
ClientResponse,
|
| 21 |
+
ClientResponseError,
|
| 22 |
+
ClientSession,
|
| 23 |
+
ClientSSLError,
|
| 24 |
+
ClientTimeout,
|
| 25 |
+
ClientWebSocketResponse,
|
| 26 |
+
ClientWSTimeout,
|
| 27 |
+
ConnectionTimeoutError,
|
| 28 |
+
ContentTypeError,
|
| 29 |
+
Fingerprint,
|
| 30 |
+
InvalidURL,
|
| 31 |
+
InvalidUrlClientError,
|
| 32 |
+
InvalidUrlRedirectClientError,
|
| 33 |
+
NamedPipeConnector,
|
| 34 |
+
NonHttpUrlClientError,
|
| 35 |
+
NonHttpUrlRedirectClientError,
|
| 36 |
+
RedirectClientError,
|
| 37 |
+
RequestInfo,
|
| 38 |
+
ServerConnectionError,
|
| 39 |
+
ServerDisconnectedError,
|
| 40 |
+
ServerFingerprintMismatch,
|
| 41 |
+
ServerTimeoutError,
|
| 42 |
+
SocketTimeoutError,
|
| 43 |
+
TCPConnector,
|
| 44 |
+
TooManyRedirects,
|
| 45 |
+
UnixConnector,
|
| 46 |
+
WSMessageTypeError,
|
| 47 |
+
WSServerHandshakeError,
|
| 48 |
+
request,
|
| 49 |
+
)
|
| 50 |
+
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
| 51 |
+
from .formdata import FormData as FormData
|
| 52 |
+
from .helpers import BasicAuth, ChainMapProxy, ETag
|
| 53 |
+
from .http import (
|
| 54 |
+
HttpVersion as HttpVersion,
|
| 55 |
+
HttpVersion10 as HttpVersion10,
|
| 56 |
+
HttpVersion11 as HttpVersion11,
|
| 57 |
+
WebSocketError as WebSocketError,
|
| 58 |
+
WSCloseCode as WSCloseCode,
|
| 59 |
+
WSMessage as WSMessage,
|
| 60 |
+
WSMsgType as WSMsgType,
|
| 61 |
+
)
|
| 62 |
+
from .multipart import (
|
| 63 |
+
BadContentDispositionHeader as BadContentDispositionHeader,
|
| 64 |
+
BadContentDispositionParam as BadContentDispositionParam,
|
| 65 |
+
BodyPartReader as BodyPartReader,
|
| 66 |
+
MultipartReader as MultipartReader,
|
| 67 |
+
MultipartWriter as MultipartWriter,
|
| 68 |
+
content_disposition_filename as content_disposition_filename,
|
| 69 |
+
parse_content_disposition as parse_content_disposition,
|
| 70 |
+
)
|
| 71 |
+
from .payload import (
|
| 72 |
+
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
| 73 |
+
AsyncIterablePayload as AsyncIterablePayload,
|
| 74 |
+
BufferedReaderPayload as BufferedReaderPayload,
|
| 75 |
+
BytesIOPayload as BytesIOPayload,
|
| 76 |
+
BytesPayload as BytesPayload,
|
| 77 |
+
IOBasePayload as IOBasePayload,
|
| 78 |
+
JsonPayload as JsonPayload,
|
| 79 |
+
Payload as Payload,
|
| 80 |
+
StringIOPayload as StringIOPayload,
|
| 81 |
+
StringPayload as StringPayload,
|
| 82 |
+
TextIOPayload as TextIOPayload,
|
| 83 |
+
get_payload as get_payload,
|
| 84 |
+
payload_type as payload_type,
|
| 85 |
+
)
|
| 86 |
+
from .payload_streamer import streamer as streamer
|
| 87 |
+
from .resolver import (
|
| 88 |
+
AsyncResolver as AsyncResolver,
|
| 89 |
+
DefaultResolver as DefaultResolver,
|
| 90 |
+
ThreadedResolver as ThreadedResolver,
|
| 91 |
+
)
|
| 92 |
+
from .streams import (
|
| 93 |
+
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
| 94 |
+
DataQueue as DataQueue,
|
| 95 |
+
EofStream as EofStream,
|
| 96 |
+
FlowControlDataQueue as FlowControlDataQueue,
|
| 97 |
+
StreamReader as StreamReader,
|
| 98 |
+
)
|
| 99 |
+
from .tracing import (
|
| 100 |
+
TraceConfig as TraceConfig,
|
| 101 |
+
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
| 102 |
+
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
| 103 |
+
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
| 104 |
+
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
| 105 |
+
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
| 106 |
+
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
| 107 |
+
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
| 108 |
+
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
| 109 |
+
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
| 110 |
+
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
| 111 |
+
TraceRequestEndParams as TraceRequestEndParams,
|
| 112 |
+
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
| 113 |
+
TraceRequestHeadersSentParams as TraceRequestHeadersSentParams,
|
| 114 |
+
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
| 115 |
+
TraceRequestStartParams as TraceRequestStartParams,
|
| 116 |
+
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
if TYPE_CHECKING:
|
| 120 |
+
# At runtime these are lazy-loaded at the bottom of the file.
|
| 121 |
+
from .worker import (
|
| 122 |
+
GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
|
| 123 |
+
GunicornWebWorker as GunicornWebWorker,
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
__all__: Tuple[str, ...] = (
|
| 127 |
+
"hdrs",
|
| 128 |
+
# client
|
| 129 |
+
"BaseConnector",
|
| 130 |
+
"ClientConnectionError",
|
| 131 |
+
"ClientConnectionResetError",
|
| 132 |
+
"ClientConnectorCertificateError",
|
| 133 |
+
"ClientConnectorDNSError",
|
| 134 |
+
"ClientConnectorError",
|
| 135 |
+
"ClientConnectorSSLError",
|
| 136 |
+
"ClientError",
|
| 137 |
+
"ClientHttpProxyError",
|
| 138 |
+
"ClientOSError",
|
| 139 |
+
"ClientPayloadError",
|
| 140 |
+
"ClientProxyConnectionError",
|
| 141 |
+
"ClientResponse",
|
| 142 |
+
"ClientRequest",
|
| 143 |
+
"ClientResponseError",
|
| 144 |
+
"ClientSSLError",
|
| 145 |
+
"ClientSession",
|
| 146 |
+
"ClientTimeout",
|
| 147 |
+
"ClientWebSocketResponse",
|
| 148 |
+
"ClientWSTimeout",
|
| 149 |
+
"ConnectionTimeoutError",
|
| 150 |
+
"ContentTypeError",
|
| 151 |
+
"Fingerprint",
|
| 152 |
+
"FlowControlDataQueue",
|
| 153 |
+
"InvalidURL",
|
| 154 |
+
"InvalidUrlClientError",
|
| 155 |
+
"InvalidUrlRedirectClientError",
|
| 156 |
+
"NonHttpUrlClientError",
|
| 157 |
+
"NonHttpUrlRedirectClientError",
|
| 158 |
+
"RedirectClientError",
|
| 159 |
+
"RequestInfo",
|
| 160 |
+
"ServerConnectionError",
|
| 161 |
+
"ServerDisconnectedError",
|
| 162 |
+
"ServerFingerprintMismatch",
|
| 163 |
+
"ServerTimeoutError",
|
| 164 |
+
"SocketTimeoutError",
|
| 165 |
+
"TCPConnector",
|
| 166 |
+
"TooManyRedirects",
|
| 167 |
+
"UnixConnector",
|
| 168 |
+
"NamedPipeConnector",
|
| 169 |
+
"WSServerHandshakeError",
|
| 170 |
+
"request",
|
| 171 |
+
# cookiejar
|
| 172 |
+
"CookieJar",
|
| 173 |
+
"DummyCookieJar",
|
| 174 |
+
# formdata
|
| 175 |
+
"FormData",
|
| 176 |
+
# helpers
|
| 177 |
+
"BasicAuth",
|
| 178 |
+
"ChainMapProxy",
|
| 179 |
+
"ETag",
|
| 180 |
+
# http
|
| 181 |
+
"HttpVersion",
|
| 182 |
+
"HttpVersion10",
|
| 183 |
+
"HttpVersion11",
|
| 184 |
+
"WSMsgType",
|
| 185 |
+
"WSCloseCode",
|
| 186 |
+
"WSMessage",
|
| 187 |
+
"WebSocketError",
|
| 188 |
+
# multipart
|
| 189 |
+
"BadContentDispositionHeader",
|
| 190 |
+
"BadContentDispositionParam",
|
| 191 |
+
"BodyPartReader",
|
| 192 |
+
"MultipartReader",
|
| 193 |
+
"MultipartWriter",
|
| 194 |
+
"content_disposition_filename",
|
| 195 |
+
"parse_content_disposition",
|
| 196 |
+
# payload
|
| 197 |
+
"AsyncIterablePayload",
|
| 198 |
+
"BufferedReaderPayload",
|
| 199 |
+
"BytesIOPayload",
|
| 200 |
+
"BytesPayload",
|
| 201 |
+
"IOBasePayload",
|
| 202 |
+
"JsonPayload",
|
| 203 |
+
"PAYLOAD_REGISTRY",
|
| 204 |
+
"Payload",
|
| 205 |
+
"StringIOPayload",
|
| 206 |
+
"StringPayload",
|
| 207 |
+
"TextIOPayload",
|
| 208 |
+
"get_payload",
|
| 209 |
+
"payload_type",
|
| 210 |
+
# payload_streamer
|
| 211 |
+
"streamer",
|
| 212 |
+
# resolver
|
| 213 |
+
"AsyncResolver",
|
| 214 |
+
"DefaultResolver",
|
| 215 |
+
"ThreadedResolver",
|
| 216 |
+
# streams
|
| 217 |
+
"DataQueue",
|
| 218 |
+
"EMPTY_PAYLOAD",
|
| 219 |
+
"EofStream",
|
| 220 |
+
"StreamReader",
|
| 221 |
+
# tracing
|
| 222 |
+
"TraceConfig",
|
| 223 |
+
"TraceConnectionCreateEndParams",
|
| 224 |
+
"TraceConnectionCreateStartParams",
|
| 225 |
+
"TraceConnectionQueuedEndParams",
|
| 226 |
+
"TraceConnectionQueuedStartParams",
|
| 227 |
+
"TraceConnectionReuseconnParams",
|
| 228 |
+
"TraceDnsCacheHitParams",
|
| 229 |
+
"TraceDnsCacheMissParams",
|
| 230 |
+
"TraceDnsResolveHostEndParams",
|
| 231 |
+
"TraceDnsResolveHostStartParams",
|
| 232 |
+
"TraceRequestChunkSentParams",
|
| 233 |
+
"TraceRequestEndParams",
|
| 234 |
+
"TraceRequestExceptionParams",
|
| 235 |
+
"TraceRequestHeadersSentParams",
|
| 236 |
+
"TraceRequestRedirectParams",
|
| 237 |
+
"TraceRequestStartParams",
|
| 238 |
+
"TraceResponseChunkReceivedParams",
|
| 239 |
+
# workers (imported lazily with __getattr__)
|
| 240 |
+
"GunicornUVLoopWebWorker",
|
| 241 |
+
"GunicornWebWorker",
|
| 242 |
+
"WSMessageTypeError",
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def __dir__() -> Tuple[str, ...]:
|
| 247 |
+
return __all__ + ("__doc__",)
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
def __getattr__(name: str) -> object:
|
| 251 |
+
global GunicornUVLoopWebWorker, GunicornWebWorker
|
| 252 |
+
|
| 253 |
+
# Importing gunicorn takes a long time (>100ms), so only import if actually needed.
|
| 254 |
+
if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
|
| 255 |
+
try:
|
| 256 |
+
from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
|
| 257 |
+
except ImportError:
|
| 258 |
+
return None
|
| 259 |
+
|
| 260 |
+
GunicornUVLoopWebWorker = guv # type: ignore[misc]
|
| 261 |
+
GunicornWebWorker = gw # type: ignore[misc]
|
| 262 |
+
return guv if name == "GunicornUVLoopWebWorker" else gw
|
| 263 |
+
|
| 264 |
+
raise AttributeError(f"module {__name__} has no attribute {name}")
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_cparser.pxd
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
cdef extern from "../vendor/llhttp/build/llhttp.h":
|
| 5 |
+
|
| 6 |
+
struct llhttp__internal_s:
|
| 7 |
+
int32_t _index
|
| 8 |
+
void* _span_pos0
|
| 9 |
+
void* _span_cb0
|
| 10 |
+
int32_t error
|
| 11 |
+
const char* reason
|
| 12 |
+
const char* error_pos
|
| 13 |
+
void* data
|
| 14 |
+
void* _current
|
| 15 |
+
uint64_t content_length
|
| 16 |
+
uint8_t type
|
| 17 |
+
uint8_t method
|
| 18 |
+
uint8_t http_major
|
| 19 |
+
uint8_t http_minor
|
| 20 |
+
uint8_t header_state
|
| 21 |
+
uint8_t lenient_flags
|
| 22 |
+
uint8_t upgrade
|
| 23 |
+
uint8_t finish
|
| 24 |
+
uint16_t flags
|
| 25 |
+
uint16_t status_code
|
| 26 |
+
void* settings
|
| 27 |
+
|
| 28 |
+
ctypedef llhttp__internal_s llhttp__internal_t
|
| 29 |
+
ctypedef llhttp__internal_t llhttp_t
|
| 30 |
+
|
| 31 |
+
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
| 32 |
+
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
| 33 |
+
|
| 34 |
+
struct llhttp_settings_s:
|
| 35 |
+
llhttp_cb on_message_begin
|
| 36 |
+
llhttp_data_cb on_url
|
| 37 |
+
llhttp_data_cb on_status
|
| 38 |
+
llhttp_data_cb on_header_field
|
| 39 |
+
llhttp_data_cb on_header_value
|
| 40 |
+
llhttp_cb on_headers_complete
|
| 41 |
+
llhttp_data_cb on_body
|
| 42 |
+
llhttp_cb on_message_complete
|
| 43 |
+
llhttp_cb on_chunk_header
|
| 44 |
+
llhttp_cb on_chunk_complete
|
| 45 |
+
|
| 46 |
+
llhttp_cb on_url_complete
|
| 47 |
+
llhttp_cb on_status_complete
|
| 48 |
+
llhttp_cb on_header_field_complete
|
| 49 |
+
llhttp_cb on_header_value_complete
|
| 50 |
+
|
| 51 |
+
ctypedef llhttp_settings_s llhttp_settings_t
|
| 52 |
+
|
| 53 |
+
enum llhttp_errno:
|
| 54 |
+
HPE_OK,
|
| 55 |
+
HPE_INTERNAL,
|
| 56 |
+
HPE_STRICT,
|
| 57 |
+
HPE_LF_EXPECTED,
|
| 58 |
+
HPE_UNEXPECTED_CONTENT_LENGTH,
|
| 59 |
+
HPE_CLOSED_CONNECTION,
|
| 60 |
+
HPE_INVALID_METHOD,
|
| 61 |
+
HPE_INVALID_URL,
|
| 62 |
+
HPE_INVALID_CONSTANT,
|
| 63 |
+
HPE_INVALID_VERSION,
|
| 64 |
+
HPE_INVALID_HEADER_TOKEN,
|
| 65 |
+
HPE_INVALID_CONTENT_LENGTH,
|
| 66 |
+
HPE_INVALID_CHUNK_SIZE,
|
| 67 |
+
HPE_INVALID_STATUS,
|
| 68 |
+
HPE_INVALID_EOF_STATE,
|
| 69 |
+
HPE_INVALID_TRANSFER_ENCODING,
|
| 70 |
+
HPE_CB_MESSAGE_BEGIN,
|
| 71 |
+
HPE_CB_HEADERS_COMPLETE,
|
| 72 |
+
HPE_CB_MESSAGE_COMPLETE,
|
| 73 |
+
HPE_CB_CHUNK_HEADER,
|
| 74 |
+
HPE_CB_CHUNK_COMPLETE,
|
| 75 |
+
HPE_PAUSED,
|
| 76 |
+
HPE_PAUSED_UPGRADE,
|
| 77 |
+
HPE_USER
|
| 78 |
+
|
| 79 |
+
ctypedef llhttp_errno llhttp_errno_t
|
| 80 |
+
|
| 81 |
+
enum llhttp_flags:
|
| 82 |
+
F_CHUNKED,
|
| 83 |
+
F_CONTENT_LENGTH
|
| 84 |
+
|
| 85 |
+
enum llhttp_type:
|
| 86 |
+
HTTP_REQUEST,
|
| 87 |
+
HTTP_RESPONSE,
|
| 88 |
+
HTTP_BOTH
|
| 89 |
+
|
| 90 |
+
enum llhttp_method:
|
| 91 |
+
HTTP_DELETE,
|
| 92 |
+
HTTP_GET,
|
| 93 |
+
HTTP_HEAD,
|
| 94 |
+
HTTP_POST,
|
| 95 |
+
HTTP_PUT,
|
| 96 |
+
HTTP_CONNECT,
|
| 97 |
+
HTTP_OPTIONS,
|
| 98 |
+
HTTP_TRACE,
|
| 99 |
+
HTTP_COPY,
|
| 100 |
+
HTTP_LOCK,
|
| 101 |
+
HTTP_MKCOL,
|
| 102 |
+
HTTP_MOVE,
|
| 103 |
+
HTTP_PROPFIND,
|
| 104 |
+
HTTP_PROPPATCH,
|
| 105 |
+
HTTP_SEARCH,
|
| 106 |
+
HTTP_UNLOCK,
|
| 107 |
+
HTTP_BIND,
|
| 108 |
+
HTTP_REBIND,
|
| 109 |
+
HTTP_UNBIND,
|
| 110 |
+
HTTP_ACL,
|
| 111 |
+
HTTP_REPORT,
|
| 112 |
+
HTTP_MKACTIVITY,
|
| 113 |
+
HTTP_CHECKOUT,
|
| 114 |
+
HTTP_MERGE,
|
| 115 |
+
HTTP_MSEARCH,
|
| 116 |
+
HTTP_NOTIFY,
|
| 117 |
+
HTTP_SUBSCRIBE,
|
| 118 |
+
HTTP_UNSUBSCRIBE,
|
| 119 |
+
HTTP_PATCH,
|
| 120 |
+
HTTP_PURGE,
|
| 121 |
+
HTTP_MKCALENDAR,
|
| 122 |
+
HTTP_LINK,
|
| 123 |
+
HTTP_UNLINK,
|
| 124 |
+
HTTP_SOURCE,
|
| 125 |
+
HTTP_PRI,
|
| 126 |
+
HTTP_DESCRIBE,
|
| 127 |
+
HTTP_ANNOUNCE,
|
| 128 |
+
HTTP_SETUP,
|
| 129 |
+
HTTP_PLAY,
|
| 130 |
+
HTTP_PAUSE,
|
| 131 |
+
HTTP_TEARDOWN,
|
| 132 |
+
HTTP_GET_PARAMETER,
|
| 133 |
+
HTTP_SET_PARAMETER,
|
| 134 |
+
HTTP_REDIRECT,
|
| 135 |
+
HTTP_RECORD,
|
| 136 |
+
HTTP_FLUSH
|
| 137 |
+
|
| 138 |
+
ctypedef llhttp_method llhttp_method_t;
|
| 139 |
+
|
| 140 |
+
void llhttp_settings_init(llhttp_settings_t* settings)
|
| 141 |
+
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
| 142 |
+
const llhttp_settings_t* settings)
|
| 143 |
+
|
| 144 |
+
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
| 145 |
+
|
| 146 |
+
int llhttp_should_keep_alive(const llhttp_t* parser)
|
| 147 |
+
|
| 148 |
+
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
| 149 |
+
|
| 150 |
+
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
| 151 |
+
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
| 152 |
+
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
| 153 |
+
|
| 154 |
+
const char* llhttp_method_name(llhttp_method_t method)
|
| 155 |
+
|
| 156 |
+
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
| 157 |
+
void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
|
| 158 |
+
void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_find_header.pxd
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef extern from "_find_header.h":
|
| 2 |
+
int find_header(char *, int)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_headers.pxi
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# The file is autogenerated from aiohttp/hdrs.py
|
| 2 |
+
# Run ./tools/gen.py to update it after the origin changing.
|
| 3 |
+
|
| 4 |
+
from . import hdrs
|
| 5 |
+
cdef tuple headers = (
|
| 6 |
+
hdrs.ACCEPT,
|
| 7 |
+
hdrs.ACCEPT_CHARSET,
|
| 8 |
+
hdrs.ACCEPT_ENCODING,
|
| 9 |
+
hdrs.ACCEPT_LANGUAGE,
|
| 10 |
+
hdrs.ACCEPT_RANGES,
|
| 11 |
+
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
| 12 |
+
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
| 13 |
+
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
| 14 |
+
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
| 15 |
+
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
| 16 |
+
hdrs.ACCESS_CONTROL_MAX_AGE,
|
| 17 |
+
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
| 18 |
+
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
| 19 |
+
hdrs.AGE,
|
| 20 |
+
hdrs.ALLOW,
|
| 21 |
+
hdrs.AUTHORIZATION,
|
| 22 |
+
hdrs.CACHE_CONTROL,
|
| 23 |
+
hdrs.CONNECTION,
|
| 24 |
+
hdrs.CONTENT_DISPOSITION,
|
| 25 |
+
hdrs.CONTENT_ENCODING,
|
| 26 |
+
hdrs.CONTENT_LANGUAGE,
|
| 27 |
+
hdrs.CONTENT_LENGTH,
|
| 28 |
+
hdrs.CONTENT_LOCATION,
|
| 29 |
+
hdrs.CONTENT_MD5,
|
| 30 |
+
hdrs.CONTENT_RANGE,
|
| 31 |
+
hdrs.CONTENT_TRANSFER_ENCODING,
|
| 32 |
+
hdrs.CONTENT_TYPE,
|
| 33 |
+
hdrs.COOKIE,
|
| 34 |
+
hdrs.DATE,
|
| 35 |
+
hdrs.DESTINATION,
|
| 36 |
+
hdrs.DIGEST,
|
| 37 |
+
hdrs.ETAG,
|
| 38 |
+
hdrs.EXPECT,
|
| 39 |
+
hdrs.EXPIRES,
|
| 40 |
+
hdrs.FORWARDED,
|
| 41 |
+
hdrs.FROM,
|
| 42 |
+
hdrs.HOST,
|
| 43 |
+
hdrs.IF_MATCH,
|
| 44 |
+
hdrs.IF_MODIFIED_SINCE,
|
| 45 |
+
hdrs.IF_NONE_MATCH,
|
| 46 |
+
hdrs.IF_RANGE,
|
| 47 |
+
hdrs.IF_UNMODIFIED_SINCE,
|
| 48 |
+
hdrs.KEEP_ALIVE,
|
| 49 |
+
hdrs.LAST_EVENT_ID,
|
| 50 |
+
hdrs.LAST_MODIFIED,
|
| 51 |
+
hdrs.LINK,
|
| 52 |
+
hdrs.LOCATION,
|
| 53 |
+
hdrs.MAX_FORWARDS,
|
| 54 |
+
hdrs.ORIGIN,
|
| 55 |
+
hdrs.PRAGMA,
|
| 56 |
+
hdrs.PROXY_AUTHENTICATE,
|
| 57 |
+
hdrs.PROXY_AUTHORIZATION,
|
| 58 |
+
hdrs.RANGE,
|
| 59 |
+
hdrs.REFERER,
|
| 60 |
+
hdrs.RETRY_AFTER,
|
| 61 |
+
hdrs.SEC_WEBSOCKET_ACCEPT,
|
| 62 |
+
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
| 63 |
+
hdrs.SEC_WEBSOCKET_KEY,
|
| 64 |
+
hdrs.SEC_WEBSOCKET_KEY1,
|
| 65 |
+
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
| 66 |
+
hdrs.SEC_WEBSOCKET_VERSION,
|
| 67 |
+
hdrs.SERVER,
|
| 68 |
+
hdrs.SET_COOKIE,
|
| 69 |
+
hdrs.TE,
|
| 70 |
+
hdrs.TRAILER,
|
| 71 |
+
hdrs.TRANSFER_ENCODING,
|
| 72 |
+
hdrs.URI,
|
| 73 |
+
hdrs.UPGRADE,
|
| 74 |
+
hdrs.USER_AGENT,
|
| 75 |
+
hdrs.VARY,
|
| 76 |
+
hdrs.VIA,
|
| 77 |
+
hdrs.WWW_AUTHENTICATE,
|
| 78 |
+
hdrs.WANT_DIGEST,
|
| 79 |
+
hdrs.WARNING,
|
| 80 |
+
hdrs.X_FORWARDED_FOR,
|
| 81 |
+
hdrs.X_FORWARDED_HOST,
|
| 82 |
+
hdrs.X_FORWARDED_PROTO,
|
| 83 |
+
)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_websocket/reader_c.py
ADDED
|
@@ -0,0 +1,468 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Reader for WebSocket protocol versions 13 and 8."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import builtins
|
| 5 |
+
from collections import deque
|
| 6 |
+
from typing import Deque, Final, List, Optional, Set, Tuple, Union
|
| 7 |
+
|
| 8 |
+
from ..base_protocol import BaseProtocol
|
| 9 |
+
from ..compression_utils import ZLibDecompressor
|
| 10 |
+
from ..helpers import _EXC_SENTINEL, set_exception
|
| 11 |
+
from ..streams import EofStream
|
| 12 |
+
from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask
|
| 13 |
+
from .models import (
|
| 14 |
+
WS_DEFLATE_TRAILING,
|
| 15 |
+
WebSocketError,
|
| 16 |
+
WSCloseCode,
|
| 17 |
+
WSMessage,
|
| 18 |
+
WSMsgType,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
|
| 22 |
+
|
| 23 |
+
# States for the reader, used to parse the WebSocket frame
|
| 24 |
+
# integer values are used so they can be cythonized
|
| 25 |
+
READ_HEADER = 1
|
| 26 |
+
READ_PAYLOAD_LENGTH = 2
|
| 27 |
+
READ_PAYLOAD_MASK = 3
|
| 28 |
+
READ_PAYLOAD = 4
|
| 29 |
+
|
| 30 |
+
WS_MSG_TYPE_BINARY = WSMsgType.BINARY
|
| 31 |
+
WS_MSG_TYPE_TEXT = WSMsgType.TEXT
|
| 32 |
+
|
| 33 |
+
# WSMsgType values unpacked so they can by cythonized to ints
|
| 34 |
+
OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value
|
| 35 |
+
OP_CODE_TEXT = WSMsgType.TEXT.value
|
| 36 |
+
OP_CODE_BINARY = WSMsgType.BINARY.value
|
| 37 |
+
OP_CODE_CLOSE = WSMsgType.CLOSE.value
|
| 38 |
+
OP_CODE_PING = WSMsgType.PING.value
|
| 39 |
+
OP_CODE_PONG = WSMsgType.PONG.value
|
| 40 |
+
|
| 41 |
+
EMPTY_FRAME_ERROR = (True, b"")
|
| 42 |
+
EMPTY_FRAME = (False, b"")
|
| 43 |
+
|
| 44 |
+
TUPLE_NEW = tuple.__new__
|
| 45 |
+
|
| 46 |
+
int_ = int # Prevent Cython from converting to PyInt
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class WebSocketDataQueue:
|
| 50 |
+
"""WebSocketDataQueue resumes and pauses an underlying stream.
|
| 51 |
+
|
| 52 |
+
It is a destination for WebSocket data.
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
def __init__(
|
| 56 |
+
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
| 57 |
+
) -> None:
|
| 58 |
+
self._size = 0
|
| 59 |
+
self._protocol = protocol
|
| 60 |
+
self._limit = limit * 2
|
| 61 |
+
self._loop = loop
|
| 62 |
+
self._eof = False
|
| 63 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
| 64 |
+
self._exception: Union[BaseException, None] = None
|
| 65 |
+
self._buffer: Deque[Tuple[WSMessage, int]] = deque()
|
| 66 |
+
self._get_buffer = self._buffer.popleft
|
| 67 |
+
self._put_buffer = self._buffer.append
|
| 68 |
+
|
| 69 |
+
def is_eof(self) -> bool:
|
| 70 |
+
return self._eof
|
| 71 |
+
|
| 72 |
+
def exception(self) -> Optional[BaseException]:
|
| 73 |
+
return self._exception
|
| 74 |
+
|
| 75 |
+
def set_exception(
|
| 76 |
+
self,
|
| 77 |
+
exc: "BaseException",
|
| 78 |
+
exc_cause: builtins.BaseException = _EXC_SENTINEL,
|
| 79 |
+
) -> None:
|
| 80 |
+
self._eof = True
|
| 81 |
+
self._exception = exc
|
| 82 |
+
if (waiter := self._waiter) is not None:
|
| 83 |
+
self._waiter = None
|
| 84 |
+
set_exception(waiter, exc, exc_cause)
|
| 85 |
+
|
| 86 |
+
def _release_waiter(self) -> None:
|
| 87 |
+
if (waiter := self._waiter) is None:
|
| 88 |
+
return
|
| 89 |
+
self._waiter = None
|
| 90 |
+
if not waiter.done():
|
| 91 |
+
waiter.set_result(None)
|
| 92 |
+
|
| 93 |
+
def feed_eof(self) -> None:
|
| 94 |
+
self._eof = True
|
| 95 |
+
self._release_waiter()
|
| 96 |
+
|
| 97 |
+
def feed_data(self, data: "WSMessage", size: "int_") -> None:
|
| 98 |
+
self._size += size
|
| 99 |
+
self._put_buffer((data, size))
|
| 100 |
+
self._release_waiter()
|
| 101 |
+
if self._size > self._limit and not self._protocol._reading_paused:
|
| 102 |
+
self._protocol.pause_reading()
|
| 103 |
+
|
| 104 |
+
async def read(self) -> WSMessage:
|
| 105 |
+
if not self._buffer and not self._eof:
|
| 106 |
+
assert not self._waiter
|
| 107 |
+
self._waiter = self._loop.create_future()
|
| 108 |
+
try:
|
| 109 |
+
await self._waiter
|
| 110 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 111 |
+
self._waiter = None
|
| 112 |
+
raise
|
| 113 |
+
return self._read_from_buffer()
|
| 114 |
+
|
| 115 |
+
def _read_from_buffer(self) -> WSMessage:
|
| 116 |
+
if self._buffer:
|
| 117 |
+
data, size = self._get_buffer()
|
| 118 |
+
self._size -= size
|
| 119 |
+
if self._size < self._limit and self._protocol._reading_paused:
|
| 120 |
+
self._protocol.resume_reading()
|
| 121 |
+
return data
|
| 122 |
+
if self._exception is not None:
|
| 123 |
+
raise self._exception
|
| 124 |
+
raise EofStream
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class WebSocketReader:
|
| 128 |
+
def __init__(
|
| 129 |
+
self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True
|
| 130 |
+
) -> None:
|
| 131 |
+
self.queue = queue
|
| 132 |
+
self._max_msg_size = max_msg_size
|
| 133 |
+
|
| 134 |
+
self._exc: Optional[Exception] = None
|
| 135 |
+
self._partial = bytearray()
|
| 136 |
+
self._state = READ_HEADER
|
| 137 |
+
|
| 138 |
+
self._opcode: Optional[int] = None
|
| 139 |
+
self._frame_fin = False
|
| 140 |
+
self._frame_opcode: Optional[int] = None
|
| 141 |
+
self._frame_payload: Union[bytes, bytearray] = b""
|
| 142 |
+
self._frame_payload_len = 0
|
| 143 |
+
|
| 144 |
+
self._tail: bytes = b""
|
| 145 |
+
self._has_mask = False
|
| 146 |
+
self._frame_mask: Optional[bytes] = None
|
| 147 |
+
self._payload_length = 0
|
| 148 |
+
self._payload_length_flag = 0
|
| 149 |
+
self._compressed: Optional[bool] = None
|
| 150 |
+
self._decompressobj: Optional[ZLibDecompressor] = None
|
| 151 |
+
self._compress = compress
|
| 152 |
+
|
| 153 |
+
def feed_eof(self) -> None:
|
| 154 |
+
self.queue.feed_eof()
|
| 155 |
+
|
| 156 |
+
# data can be bytearray on Windows because proactor event loop uses bytearray
|
| 157 |
+
# and asyncio types this to Union[bytes, bytearray, memoryview] so we need
|
| 158 |
+
# coerce data to bytes if it is not
|
| 159 |
+
def feed_data(
|
| 160 |
+
self, data: Union[bytes, bytearray, memoryview]
|
| 161 |
+
) -> Tuple[bool, bytes]:
|
| 162 |
+
if type(data) is not bytes:
|
| 163 |
+
data = bytes(data)
|
| 164 |
+
|
| 165 |
+
if self._exc is not None:
|
| 166 |
+
return True, data
|
| 167 |
+
|
| 168 |
+
try:
|
| 169 |
+
self._feed_data(data)
|
| 170 |
+
except Exception as exc:
|
| 171 |
+
self._exc = exc
|
| 172 |
+
set_exception(self.queue, exc)
|
| 173 |
+
return EMPTY_FRAME_ERROR
|
| 174 |
+
|
| 175 |
+
return EMPTY_FRAME
|
| 176 |
+
|
| 177 |
+
def _feed_data(self, data: bytes) -> None:
|
| 178 |
+
msg: WSMessage
|
| 179 |
+
for frame in self.parse_frame(data):
|
| 180 |
+
fin = frame[0]
|
| 181 |
+
opcode = frame[1]
|
| 182 |
+
payload = frame[2]
|
| 183 |
+
compressed = frame[3]
|
| 184 |
+
|
| 185 |
+
is_continuation = opcode == OP_CODE_CONTINUATION
|
| 186 |
+
if opcode == OP_CODE_TEXT or opcode == OP_CODE_BINARY or is_continuation:
|
| 187 |
+
# load text/binary
|
| 188 |
+
if not fin:
|
| 189 |
+
# got partial frame payload
|
| 190 |
+
if not is_continuation:
|
| 191 |
+
self._opcode = opcode
|
| 192 |
+
self._partial += payload
|
| 193 |
+
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
| 194 |
+
raise WebSocketError(
|
| 195 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 196 |
+
"Message size {} exceeds limit {}".format(
|
| 197 |
+
len(self._partial), self._max_msg_size
|
| 198 |
+
),
|
| 199 |
+
)
|
| 200 |
+
continue
|
| 201 |
+
|
| 202 |
+
has_partial = bool(self._partial)
|
| 203 |
+
if is_continuation:
|
| 204 |
+
if self._opcode is None:
|
| 205 |
+
raise WebSocketError(
|
| 206 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 207 |
+
"Continuation frame for non started message",
|
| 208 |
+
)
|
| 209 |
+
opcode = self._opcode
|
| 210 |
+
self._opcode = None
|
| 211 |
+
# previous frame was non finished
|
| 212 |
+
# we should get continuation opcode
|
| 213 |
+
elif has_partial:
|
| 214 |
+
raise WebSocketError(
|
| 215 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 216 |
+
"The opcode in non-fin frame is expected "
|
| 217 |
+
"to be zero, got {!r}".format(opcode),
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
assembled_payload: Union[bytes, bytearray]
|
| 221 |
+
if has_partial:
|
| 222 |
+
assembled_payload = self._partial + payload
|
| 223 |
+
self._partial.clear()
|
| 224 |
+
else:
|
| 225 |
+
assembled_payload = payload
|
| 226 |
+
|
| 227 |
+
if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:
|
| 228 |
+
raise WebSocketError(
|
| 229 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 230 |
+
"Message size {} exceeds limit {}".format(
|
| 231 |
+
len(assembled_payload), self._max_msg_size
|
| 232 |
+
),
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
# Decompress process must to be done after all packets
|
| 236 |
+
# received.
|
| 237 |
+
if compressed:
|
| 238 |
+
if not self._decompressobj:
|
| 239 |
+
self._decompressobj = ZLibDecompressor(
|
| 240 |
+
suppress_deflate_header=True
|
| 241 |
+
)
|
| 242 |
+
payload_merged = self._decompressobj.decompress_sync(
|
| 243 |
+
assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size
|
| 244 |
+
)
|
| 245 |
+
if self._decompressobj.unconsumed_tail:
|
| 246 |
+
left = len(self._decompressobj.unconsumed_tail)
|
| 247 |
+
raise WebSocketError(
|
| 248 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 249 |
+
"Decompressed message size {} exceeds limit {}".format(
|
| 250 |
+
self._max_msg_size + left, self._max_msg_size
|
| 251 |
+
),
|
| 252 |
+
)
|
| 253 |
+
elif type(assembled_payload) is bytes:
|
| 254 |
+
payload_merged = assembled_payload
|
| 255 |
+
else:
|
| 256 |
+
payload_merged = bytes(assembled_payload)
|
| 257 |
+
|
| 258 |
+
if opcode == OP_CODE_TEXT:
|
| 259 |
+
try:
|
| 260 |
+
text = payload_merged.decode("utf-8")
|
| 261 |
+
except UnicodeDecodeError as exc:
|
| 262 |
+
raise WebSocketError(
|
| 263 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
| 264 |
+
) from exc
|
| 265 |
+
|
| 266 |
+
# XXX: The Text and Binary messages here can be a performance
|
| 267 |
+
# bottleneck, so we use tuple.__new__ to improve performance.
|
| 268 |
+
# This is not type safe, but many tests should fail in
|
| 269 |
+
# test_client_ws_functional.py if this is wrong.
|
| 270 |
+
self.queue.feed_data(
|
| 271 |
+
TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")),
|
| 272 |
+
len(payload_merged),
|
| 273 |
+
)
|
| 274 |
+
else:
|
| 275 |
+
self.queue.feed_data(
|
| 276 |
+
TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")),
|
| 277 |
+
len(payload_merged),
|
| 278 |
+
)
|
| 279 |
+
elif opcode == OP_CODE_CLOSE:
|
| 280 |
+
if len(payload) >= 2:
|
| 281 |
+
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
| 282 |
+
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
| 283 |
+
raise WebSocketError(
|
| 284 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 285 |
+
f"Invalid close code: {close_code}",
|
| 286 |
+
)
|
| 287 |
+
try:
|
| 288 |
+
close_message = payload[2:].decode("utf-8")
|
| 289 |
+
except UnicodeDecodeError as exc:
|
| 290 |
+
raise WebSocketError(
|
| 291 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
| 292 |
+
) from exc
|
| 293 |
+
msg = TUPLE_NEW(
|
| 294 |
+
WSMessage, (WSMsgType.CLOSE, close_code, close_message)
|
| 295 |
+
)
|
| 296 |
+
elif payload:
|
| 297 |
+
raise WebSocketError(
|
| 298 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 299 |
+
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
| 300 |
+
)
|
| 301 |
+
else:
|
| 302 |
+
msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, ""))
|
| 303 |
+
|
| 304 |
+
self.queue.feed_data(msg, 0)
|
| 305 |
+
elif opcode == OP_CODE_PING:
|
| 306 |
+
msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, ""))
|
| 307 |
+
self.queue.feed_data(msg, len(payload))
|
| 308 |
+
|
| 309 |
+
elif opcode == OP_CODE_PONG:
|
| 310 |
+
msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, ""))
|
| 311 |
+
self.queue.feed_data(msg, len(payload))
|
| 312 |
+
|
| 313 |
+
else:
|
| 314 |
+
raise WebSocketError(
|
| 315 |
+
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
def parse_frame(
|
| 319 |
+
self, buf: bytes
|
| 320 |
+
) -> List[Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]]:
|
| 321 |
+
"""Return the next frame from the socket."""
|
| 322 |
+
frames: List[
|
| 323 |
+
Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]
|
| 324 |
+
] = []
|
| 325 |
+
if self._tail:
|
| 326 |
+
buf, self._tail = self._tail + buf, b""
|
| 327 |
+
|
| 328 |
+
start_pos: int = 0
|
| 329 |
+
buf_length = len(buf)
|
| 330 |
+
|
| 331 |
+
while True:
|
| 332 |
+
# read header
|
| 333 |
+
if self._state == READ_HEADER:
|
| 334 |
+
if buf_length - start_pos < 2:
|
| 335 |
+
break
|
| 336 |
+
first_byte = buf[start_pos]
|
| 337 |
+
second_byte = buf[start_pos + 1]
|
| 338 |
+
start_pos += 2
|
| 339 |
+
|
| 340 |
+
fin = (first_byte >> 7) & 1
|
| 341 |
+
rsv1 = (first_byte >> 6) & 1
|
| 342 |
+
rsv2 = (first_byte >> 5) & 1
|
| 343 |
+
rsv3 = (first_byte >> 4) & 1
|
| 344 |
+
opcode = first_byte & 0xF
|
| 345 |
+
|
| 346 |
+
# frame-fin = %x0 ; more frames of this message follow
|
| 347 |
+
# / %x1 ; final frame of this message
|
| 348 |
+
# frame-rsv1 = %x0 ;
|
| 349 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 350 |
+
# frame-rsv2 = %x0 ;
|
| 351 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 352 |
+
# frame-rsv3 = %x0 ;
|
| 353 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 354 |
+
#
|
| 355 |
+
# Remove rsv1 from this test for deflate development
|
| 356 |
+
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
| 357 |
+
raise WebSocketError(
|
| 358 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 359 |
+
"Received frame with non-zero reserved bits",
|
| 360 |
+
)
|
| 361 |
+
|
| 362 |
+
if opcode > 0x7 and fin == 0:
|
| 363 |
+
raise WebSocketError(
|
| 364 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 365 |
+
"Received fragmented control frame",
|
| 366 |
+
)
|
| 367 |
+
|
| 368 |
+
has_mask = (second_byte >> 7) & 1
|
| 369 |
+
length = second_byte & 0x7F
|
| 370 |
+
|
| 371 |
+
# Control frames MUST have a payload
|
| 372 |
+
# length of 125 bytes or less
|
| 373 |
+
if opcode > 0x7 and length > 125:
|
| 374 |
+
raise WebSocketError(
|
| 375 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 376 |
+
"Control frame payload cannot be larger than 125 bytes",
|
| 377 |
+
)
|
| 378 |
+
|
| 379 |
+
# Set compress status if last package is FIN
|
| 380 |
+
# OR set compress status if this is first fragment
|
| 381 |
+
# Raise error if not first fragment with rsv1 = 0x1
|
| 382 |
+
if self._frame_fin or self._compressed is None:
|
| 383 |
+
self._compressed = True if rsv1 else False
|
| 384 |
+
elif rsv1:
|
| 385 |
+
raise WebSocketError(
|
| 386 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 387 |
+
"Received frame with non-zero reserved bits",
|
| 388 |
+
)
|
| 389 |
+
|
| 390 |
+
self._frame_fin = bool(fin)
|
| 391 |
+
self._frame_opcode = opcode
|
| 392 |
+
self._has_mask = bool(has_mask)
|
| 393 |
+
self._payload_length_flag = length
|
| 394 |
+
self._state = READ_PAYLOAD_LENGTH
|
| 395 |
+
|
| 396 |
+
# read payload length
|
| 397 |
+
if self._state == READ_PAYLOAD_LENGTH:
|
| 398 |
+
length_flag = self._payload_length_flag
|
| 399 |
+
if length_flag == 126:
|
| 400 |
+
if buf_length - start_pos < 2:
|
| 401 |
+
break
|
| 402 |
+
first_byte = buf[start_pos]
|
| 403 |
+
second_byte = buf[start_pos + 1]
|
| 404 |
+
start_pos += 2
|
| 405 |
+
self._payload_length = first_byte << 8 | second_byte
|
| 406 |
+
elif length_flag > 126:
|
| 407 |
+
if buf_length - start_pos < 8:
|
| 408 |
+
break
|
| 409 |
+
data = buf[start_pos : start_pos + 8]
|
| 410 |
+
start_pos += 8
|
| 411 |
+
self._payload_length = UNPACK_LEN3(data)[0]
|
| 412 |
+
else:
|
| 413 |
+
self._payload_length = length_flag
|
| 414 |
+
|
| 415 |
+
self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD
|
| 416 |
+
|
| 417 |
+
# read payload mask
|
| 418 |
+
if self._state == READ_PAYLOAD_MASK:
|
| 419 |
+
if buf_length - start_pos < 4:
|
| 420 |
+
break
|
| 421 |
+
self._frame_mask = buf[start_pos : start_pos + 4]
|
| 422 |
+
start_pos += 4
|
| 423 |
+
self._state = READ_PAYLOAD
|
| 424 |
+
|
| 425 |
+
if self._state == READ_PAYLOAD:
|
| 426 |
+
chunk_len = buf_length - start_pos
|
| 427 |
+
if self._payload_length >= chunk_len:
|
| 428 |
+
end_pos = buf_length
|
| 429 |
+
self._payload_length -= chunk_len
|
| 430 |
+
else:
|
| 431 |
+
end_pos = start_pos + self._payload_length
|
| 432 |
+
self._payload_length = 0
|
| 433 |
+
|
| 434 |
+
if self._frame_payload_len:
|
| 435 |
+
if type(self._frame_payload) is not bytearray:
|
| 436 |
+
self._frame_payload = bytearray(self._frame_payload)
|
| 437 |
+
self._frame_payload += buf[start_pos:end_pos]
|
| 438 |
+
else:
|
| 439 |
+
# Fast path for the first frame
|
| 440 |
+
self._frame_payload = buf[start_pos:end_pos]
|
| 441 |
+
|
| 442 |
+
self._frame_payload_len += end_pos - start_pos
|
| 443 |
+
start_pos = end_pos
|
| 444 |
+
|
| 445 |
+
if self._payload_length != 0:
|
| 446 |
+
break
|
| 447 |
+
|
| 448 |
+
if self._has_mask:
|
| 449 |
+
assert self._frame_mask is not None
|
| 450 |
+
if type(self._frame_payload) is not bytearray:
|
| 451 |
+
self._frame_payload = bytearray(self._frame_payload)
|
| 452 |
+
websocket_mask(self._frame_mask, self._frame_payload)
|
| 453 |
+
|
| 454 |
+
frames.append(
|
| 455 |
+
(
|
| 456 |
+
self._frame_fin,
|
| 457 |
+
self._frame_opcode,
|
| 458 |
+
self._frame_payload,
|
| 459 |
+
self._compressed,
|
| 460 |
+
)
|
| 461 |
+
)
|
| 462 |
+
self._frame_payload = b""
|
| 463 |
+
self._frame_payload_len = 0
|
| 464 |
+
self._state = READ_HEADER
|
| 465 |
+
|
| 466 |
+
self._tail = buf[start_pos:] if start_pos < buf_length else b""
|
| 467 |
+
|
| 468 |
+
return frames
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/abc.py
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import socket
|
| 4 |
+
import zlib
|
| 5 |
+
from abc import ABC, abstractmethod
|
| 6 |
+
from collections.abc import Sized
|
| 7 |
+
from http.cookies import BaseCookie, Morsel
|
| 8 |
+
from typing import (
|
| 9 |
+
TYPE_CHECKING,
|
| 10 |
+
Any,
|
| 11 |
+
Awaitable,
|
| 12 |
+
Callable,
|
| 13 |
+
Dict,
|
| 14 |
+
Generator,
|
| 15 |
+
Iterable,
|
| 16 |
+
List,
|
| 17 |
+
Optional,
|
| 18 |
+
Tuple,
|
| 19 |
+
TypedDict,
|
| 20 |
+
Union,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from multidict import CIMultiDict
|
| 24 |
+
from yarl import URL
|
| 25 |
+
|
| 26 |
+
from .typedefs import LooseCookies
|
| 27 |
+
|
| 28 |
+
if TYPE_CHECKING:
|
| 29 |
+
from .web_app import Application
|
| 30 |
+
from .web_exceptions import HTTPException
|
| 31 |
+
from .web_request import BaseRequest, Request
|
| 32 |
+
from .web_response import StreamResponse
|
| 33 |
+
else:
|
| 34 |
+
BaseRequest = Request = Application = StreamResponse = None
|
| 35 |
+
HTTPException = None
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class AbstractRouter(ABC):
|
| 39 |
+
def __init__(self) -> None:
|
| 40 |
+
self._frozen = False
|
| 41 |
+
|
| 42 |
+
def post_init(self, app: Application) -> None:
|
| 43 |
+
"""Post init stage.
|
| 44 |
+
|
| 45 |
+
Not an abstract method for sake of backward compatibility,
|
| 46 |
+
but if the router wants to be aware of the application
|
| 47 |
+
it can override this.
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
@property
|
| 51 |
+
def frozen(self) -> bool:
|
| 52 |
+
return self._frozen
|
| 53 |
+
|
| 54 |
+
def freeze(self) -> None:
|
| 55 |
+
"""Freeze router."""
|
| 56 |
+
self._frozen = True
|
| 57 |
+
|
| 58 |
+
@abstractmethod
|
| 59 |
+
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
| 60 |
+
"""Return MATCH_INFO for given request"""
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class AbstractMatchInfo(ABC):
|
| 64 |
+
|
| 65 |
+
__slots__ = ()
|
| 66 |
+
|
| 67 |
+
@property # pragma: no branch
|
| 68 |
+
@abstractmethod
|
| 69 |
+
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
| 70 |
+
"""Execute matched request handler"""
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
@abstractmethod
|
| 74 |
+
def expect_handler(
|
| 75 |
+
self,
|
| 76 |
+
) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
|
| 77 |
+
"""Expect handler for 100-continue processing"""
|
| 78 |
+
|
| 79 |
+
@property # pragma: no branch
|
| 80 |
+
@abstractmethod
|
| 81 |
+
def http_exception(self) -> Optional[HTTPException]:
|
| 82 |
+
"""HTTPException instance raised on router's resolving, or None"""
|
| 83 |
+
|
| 84 |
+
@abstractmethod # pragma: no branch
|
| 85 |
+
def get_info(self) -> Dict[str, Any]:
|
| 86 |
+
"""Return a dict with additional info useful for introspection"""
|
| 87 |
+
|
| 88 |
+
@property # pragma: no branch
|
| 89 |
+
@abstractmethod
|
| 90 |
+
def apps(self) -> Tuple[Application, ...]:
|
| 91 |
+
"""Stack of nested applications.
|
| 92 |
+
|
| 93 |
+
Top level application is left-most element.
|
| 94 |
+
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
@abstractmethod
|
| 98 |
+
def add_app(self, app: Application) -> None:
|
| 99 |
+
"""Add application to the nested apps stack."""
|
| 100 |
+
|
| 101 |
+
@abstractmethod
|
| 102 |
+
def freeze(self) -> None:
|
| 103 |
+
"""Freeze the match info.
|
| 104 |
+
|
| 105 |
+
The method is called after route resolution.
|
| 106 |
+
|
| 107 |
+
After the call .add_app() is forbidden.
|
| 108 |
+
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class AbstractView(ABC):
|
| 113 |
+
"""Abstract class based view."""
|
| 114 |
+
|
| 115 |
+
def __init__(self, request: Request) -> None:
|
| 116 |
+
self._request = request
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def request(self) -> Request:
|
| 120 |
+
"""Request instance."""
|
| 121 |
+
return self._request
|
| 122 |
+
|
| 123 |
+
@abstractmethod
|
| 124 |
+
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
| 125 |
+
"""Execute the view handler."""
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class ResolveResult(TypedDict):
|
| 129 |
+
"""Resolve result.
|
| 130 |
+
|
| 131 |
+
This is the result returned from an AbstractResolver's
|
| 132 |
+
resolve method.
|
| 133 |
+
|
| 134 |
+
:param hostname: The hostname that was provided.
|
| 135 |
+
:param host: The IP address that was resolved.
|
| 136 |
+
:param port: The port that was resolved.
|
| 137 |
+
:param family: The address family that was resolved.
|
| 138 |
+
:param proto: The protocol that was resolved.
|
| 139 |
+
:param flags: The flags that were resolved.
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
hostname: str
|
| 143 |
+
host: str
|
| 144 |
+
port: int
|
| 145 |
+
family: int
|
| 146 |
+
proto: int
|
| 147 |
+
flags: int
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class AbstractResolver(ABC):
|
| 151 |
+
"""Abstract DNS resolver."""
|
| 152 |
+
|
| 153 |
+
@abstractmethod
|
| 154 |
+
async def resolve(
|
| 155 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 156 |
+
) -> List[ResolveResult]:
|
| 157 |
+
"""Return IP address for given hostname"""
|
| 158 |
+
|
| 159 |
+
@abstractmethod
|
| 160 |
+
async def close(self) -> None:
|
| 161 |
+
"""Release resolver"""
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
if TYPE_CHECKING:
|
| 165 |
+
IterableBase = Iterable[Morsel[str]]
|
| 166 |
+
else:
|
| 167 |
+
IterableBase = Iterable
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class AbstractCookieJar(Sized, IterableBase):
|
| 174 |
+
"""Abstract Cookie Jar."""
|
| 175 |
+
|
| 176 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 177 |
+
self._loop = loop or asyncio.get_running_loop()
|
| 178 |
+
|
| 179 |
+
@property
|
| 180 |
+
@abstractmethod
|
| 181 |
+
def quote_cookie(self) -> bool:
|
| 182 |
+
"""Return True if cookies should be quoted."""
|
| 183 |
+
|
| 184 |
+
@abstractmethod
|
| 185 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 186 |
+
"""Clear all cookies if no predicate is passed."""
|
| 187 |
+
|
| 188 |
+
@abstractmethod
|
| 189 |
+
def clear_domain(self, domain: str) -> None:
|
| 190 |
+
"""Clear all cookies for domain and all subdomains."""
|
| 191 |
+
|
| 192 |
+
@abstractmethod
|
| 193 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 194 |
+
"""Update cookies."""
|
| 195 |
+
|
| 196 |
+
@abstractmethod
|
| 197 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
| 198 |
+
"""Return the jar's cookies filtered by their attributes."""
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class AbstractStreamWriter(ABC):
|
| 202 |
+
"""Abstract stream writer."""
|
| 203 |
+
|
| 204 |
+
buffer_size: int = 0
|
| 205 |
+
output_size: int = 0
|
| 206 |
+
length: Optional[int] = 0
|
| 207 |
+
|
| 208 |
+
@abstractmethod
|
| 209 |
+
async def write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
|
| 210 |
+
"""Write chunk into stream."""
|
| 211 |
+
|
| 212 |
+
@abstractmethod
|
| 213 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
| 214 |
+
"""Write last chunk."""
|
| 215 |
+
|
| 216 |
+
@abstractmethod
|
| 217 |
+
async def drain(self) -> None:
|
| 218 |
+
"""Flush the write buffer."""
|
| 219 |
+
|
| 220 |
+
@abstractmethod
|
| 221 |
+
def enable_compression(
|
| 222 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
| 223 |
+
) -> None:
|
| 224 |
+
"""Enable HTTP body compression"""
|
| 225 |
+
|
| 226 |
+
@abstractmethod
|
| 227 |
+
def enable_chunking(self) -> None:
|
| 228 |
+
"""Enable HTTP chunked mode"""
|
| 229 |
+
|
| 230 |
+
@abstractmethod
|
| 231 |
+
async def write_headers(
|
| 232 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
| 233 |
+
) -> None:
|
| 234 |
+
"""Write HTTP headers"""
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
class AbstractAccessLogger(ABC):
|
| 238 |
+
"""Abstract writer to access log."""
|
| 239 |
+
|
| 240 |
+
__slots__ = ("logger", "log_format")
|
| 241 |
+
|
| 242 |
+
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
| 243 |
+
self.logger = logger
|
| 244 |
+
self.log_format = log_format
|
| 245 |
+
|
| 246 |
+
@abstractmethod
|
| 247 |
+
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
| 248 |
+
"""Emit log to logger."""
|
| 249 |
+
|
| 250 |
+
@property
|
| 251 |
+
def enabled(self) -> bool:
|
| 252 |
+
"""Check if logger is enabled."""
|
| 253 |
+
return True
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/client_proto.py
ADDED
|
@@ -0,0 +1,307 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from contextlib import suppress
|
| 3 |
+
from typing import Any, Optional, Tuple
|
| 4 |
+
|
| 5 |
+
from .base_protocol import BaseProtocol
|
| 6 |
+
from .client_exceptions import (
|
| 7 |
+
ClientOSError,
|
| 8 |
+
ClientPayloadError,
|
| 9 |
+
ServerDisconnectedError,
|
| 10 |
+
SocketTimeoutError,
|
| 11 |
+
)
|
| 12 |
+
from .helpers import (
|
| 13 |
+
_EXC_SENTINEL,
|
| 14 |
+
EMPTY_BODY_STATUS_CODES,
|
| 15 |
+
BaseTimerContext,
|
| 16 |
+
set_exception,
|
| 17 |
+
)
|
| 18 |
+
from .http import HttpResponseParser, RawResponseMessage
|
| 19 |
+
from .http_exceptions import HttpProcessingError
|
| 20 |
+
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
| 24 |
+
"""Helper class to adapt between Protocol and StreamReader."""
|
| 25 |
+
|
| 26 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 27 |
+
BaseProtocol.__init__(self, loop=loop)
|
| 28 |
+
DataQueue.__init__(self, loop)
|
| 29 |
+
|
| 30 |
+
self._should_close = False
|
| 31 |
+
|
| 32 |
+
self._payload: Optional[StreamReader] = None
|
| 33 |
+
self._skip_payload = False
|
| 34 |
+
self._payload_parser = None
|
| 35 |
+
|
| 36 |
+
self._timer = None
|
| 37 |
+
|
| 38 |
+
self._tail = b""
|
| 39 |
+
self._upgraded = False
|
| 40 |
+
self._parser: Optional[HttpResponseParser] = None
|
| 41 |
+
|
| 42 |
+
self._read_timeout: Optional[float] = None
|
| 43 |
+
self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
|
| 44 |
+
|
| 45 |
+
self._timeout_ceil_threshold: Optional[float] = 5
|
| 46 |
+
|
| 47 |
+
@property
|
| 48 |
+
def upgraded(self) -> bool:
|
| 49 |
+
return self._upgraded
|
| 50 |
+
|
| 51 |
+
@property
|
| 52 |
+
def should_close(self) -> bool:
|
| 53 |
+
return bool(
|
| 54 |
+
self._should_close
|
| 55 |
+
or (self._payload is not None and not self._payload.is_eof())
|
| 56 |
+
or self._upgraded
|
| 57 |
+
or self._exception is not None
|
| 58 |
+
or self._payload_parser is not None
|
| 59 |
+
or self._buffer
|
| 60 |
+
or self._tail
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
def force_close(self) -> None:
|
| 64 |
+
self._should_close = True
|
| 65 |
+
|
| 66 |
+
def close(self) -> None:
|
| 67 |
+
transport = self.transport
|
| 68 |
+
if transport is not None:
|
| 69 |
+
transport.close()
|
| 70 |
+
self.transport = None
|
| 71 |
+
self._payload = None
|
| 72 |
+
self._drop_timeout()
|
| 73 |
+
|
| 74 |
+
def is_connected(self) -> bool:
|
| 75 |
+
return self.transport is not None and not self.transport.is_closing()
|
| 76 |
+
|
| 77 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 78 |
+
self._drop_timeout()
|
| 79 |
+
|
| 80 |
+
original_connection_error = exc
|
| 81 |
+
reraised_exc = original_connection_error
|
| 82 |
+
|
| 83 |
+
connection_closed_cleanly = original_connection_error is None
|
| 84 |
+
|
| 85 |
+
if self._payload_parser is not None:
|
| 86 |
+
with suppress(Exception): # FIXME: log this somehow?
|
| 87 |
+
self._payload_parser.feed_eof()
|
| 88 |
+
|
| 89 |
+
uncompleted = None
|
| 90 |
+
if self._parser is not None:
|
| 91 |
+
try:
|
| 92 |
+
uncompleted = self._parser.feed_eof()
|
| 93 |
+
except Exception as underlying_exc:
|
| 94 |
+
if self._payload is not None:
|
| 95 |
+
client_payload_exc_msg = (
|
| 96 |
+
f"Response payload is not completed: {underlying_exc !r}"
|
| 97 |
+
)
|
| 98 |
+
if not connection_closed_cleanly:
|
| 99 |
+
client_payload_exc_msg = (
|
| 100 |
+
f"{client_payload_exc_msg !s}. "
|
| 101 |
+
f"{original_connection_error !r}"
|
| 102 |
+
)
|
| 103 |
+
set_exception(
|
| 104 |
+
self._payload,
|
| 105 |
+
ClientPayloadError(client_payload_exc_msg),
|
| 106 |
+
underlying_exc,
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
if not self.is_eof():
|
| 110 |
+
if isinstance(original_connection_error, OSError):
|
| 111 |
+
reraised_exc = ClientOSError(*original_connection_error.args)
|
| 112 |
+
if connection_closed_cleanly:
|
| 113 |
+
reraised_exc = ServerDisconnectedError(uncompleted)
|
| 114 |
+
# assigns self._should_close to True as side effect,
|
| 115 |
+
# we do it anyway below
|
| 116 |
+
underlying_non_eof_exc = (
|
| 117 |
+
_EXC_SENTINEL
|
| 118 |
+
if connection_closed_cleanly
|
| 119 |
+
else original_connection_error
|
| 120 |
+
)
|
| 121 |
+
assert underlying_non_eof_exc is not None
|
| 122 |
+
assert reraised_exc is not None
|
| 123 |
+
self.set_exception(reraised_exc, underlying_non_eof_exc)
|
| 124 |
+
|
| 125 |
+
self._should_close = True
|
| 126 |
+
self._parser = None
|
| 127 |
+
self._payload = None
|
| 128 |
+
self._payload_parser = None
|
| 129 |
+
self._reading_paused = False
|
| 130 |
+
|
| 131 |
+
super().connection_lost(reraised_exc)
|
| 132 |
+
|
| 133 |
+
def eof_received(self) -> None:
|
| 134 |
+
# should call parser.feed_eof() most likely
|
| 135 |
+
self._drop_timeout()
|
| 136 |
+
|
| 137 |
+
def pause_reading(self) -> None:
|
| 138 |
+
super().pause_reading()
|
| 139 |
+
self._drop_timeout()
|
| 140 |
+
|
| 141 |
+
def resume_reading(self) -> None:
|
| 142 |
+
super().resume_reading()
|
| 143 |
+
self._reschedule_timeout()
|
| 144 |
+
|
| 145 |
+
def set_exception(
|
| 146 |
+
self,
|
| 147 |
+
exc: BaseException,
|
| 148 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 149 |
+
) -> None:
|
| 150 |
+
self._should_close = True
|
| 151 |
+
self._drop_timeout()
|
| 152 |
+
super().set_exception(exc, exc_cause)
|
| 153 |
+
|
| 154 |
+
def set_parser(self, parser: Any, payload: Any) -> None:
|
| 155 |
+
# TODO: actual types are:
|
| 156 |
+
# parser: WebSocketReader
|
| 157 |
+
# payload: WebSocketDataQueue
|
| 158 |
+
# but they are not generi enough
|
| 159 |
+
# Need an ABC for both types
|
| 160 |
+
self._payload = payload
|
| 161 |
+
self._payload_parser = parser
|
| 162 |
+
|
| 163 |
+
self._drop_timeout()
|
| 164 |
+
|
| 165 |
+
if self._tail:
|
| 166 |
+
data, self._tail = self._tail, b""
|
| 167 |
+
self.data_received(data)
|
| 168 |
+
|
| 169 |
+
def set_response_params(
|
| 170 |
+
self,
|
| 171 |
+
*,
|
| 172 |
+
timer: Optional[BaseTimerContext] = None,
|
| 173 |
+
skip_payload: bool = False,
|
| 174 |
+
read_until_eof: bool = False,
|
| 175 |
+
auto_decompress: bool = True,
|
| 176 |
+
read_timeout: Optional[float] = None,
|
| 177 |
+
read_bufsize: int = 2**16,
|
| 178 |
+
timeout_ceil_threshold: float = 5,
|
| 179 |
+
max_line_size: int = 8190,
|
| 180 |
+
max_field_size: int = 8190,
|
| 181 |
+
) -> None:
|
| 182 |
+
self._skip_payload = skip_payload
|
| 183 |
+
|
| 184 |
+
self._read_timeout = read_timeout
|
| 185 |
+
|
| 186 |
+
self._timeout_ceil_threshold = timeout_ceil_threshold
|
| 187 |
+
|
| 188 |
+
self._parser = HttpResponseParser(
|
| 189 |
+
self,
|
| 190 |
+
self._loop,
|
| 191 |
+
read_bufsize,
|
| 192 |
+
timer=timer,
|
| 193 |
+
payload_exception=ClientPayloadError,
|
| 194 |
+
response_with_body=not skip_payload,
|
| 195 |
+
read_until_eof=read_until_eof,
|
| 196 |
+
auto_decompress=auto_decompress,
|
| 197 |
+
max_line_size=max_line_size,
|
| 198 |
+
max_field_size=max_field_size,
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
if self._tail:
|
| 202 |
+
data, self._tail = self._tail, b""
|
| 203 |
+
self.data_received(data)
|
| 204 |
+
|
| 205 |
+
def _drop_timeout(self) -> None:
|
| 206 |
+
if self._read_timeout_handle is not None:
|
| 207 |
+
self._read_timeout_handle.cancel()
|
| 208 |
+
self._read_timeout_handle = None
|
| 209 |
+
|
| 210 |
+
def _reschedule_timeout(self) -> None:
|
| 211 |
+
timeout = self._read_timeout
|
| 212 |
+
if self._read_timeout_handle is not None:
|
| 213 |
+
self._read_timeout_handle.cancel()
|
| 214 |
+
|
| 215 |
+
if timeout:
|
| 216 |
+
self._read_timeout_handle = self._loop.call_later(
|
| 217 |
+
timeout, self._on_read_timeout
|
| 218 |
+
)
|
| 219 |
+
else:
|
| 220 |
+
self._read_timeout_handle = None
|
| 221 |
+
|
| 222 |
+
def start_timeout(self) -> None:
|
| 223 |
+
self._reschedule_timeout()
|
| 224 |
+
|
| 225 |
+
@property
|
| 226 |
+
def read_timeout(self) -> Optional[float]:
|
| 227 |
+
return self._read_timeout
|
| 228 |
+
|
| 229 |
+
@read_timeout.setter
|
| 230 |
+
def read_timeout(self, read_timeout: Optional[float]) -> None:
|
| 231 |
+
self._read_timeout = read_timeout
|
| 232 |
+
|
| 233 |
+
def _on_read_timeout(self) -> None:
|
| 234 |
+
exc = SocketTimeoutError("Timeout on reading data from socket")
|
| 235 |
+
self.set_exception(exc)
|
| 236 |
+
if self._payload is not None:
|
| 237 |
+
set_exception(self._payload, exc)
|
| 238 |
+
|
| 239 |
+
def data_received(self, data: bytes) -> None:
|
| 240 |
+
self._reschedule_timeout()
|
| 241 |
+
|
| 242 |
+
if not data:
|
| 243 |
+
return
|
| 244 |
+
|
| 245 |
+
# custom payload parser - currently always WebSocketReader
|
| 246 |
+
if self._payload_parser is not None:
|
| 247 |
+
eof, tail = self._payload_parser.feed_data(data)
|
| 248 |
+
if eof:
|
| 249 |
+
self._payload = None
|
| 250 |
+
self._payload_parser = None
|
| 251 |
+
|
| 252 |
+
if tail:
|
| 253 |
+
self.data_received(tail)
|
| 254 |
+
return
|
| 255 |
+
|
| 256 |
+
if self._upgraded or self._parser is None:
|
| 257 |
+
# i.e. websocket connection, websocket parser is not set yet
|
| 258 |
+
self._tail += data
|
| 259 |
+
return
|
| 260 |
+
|
| 261 |
+
# parse http messages
|
| 262 |
+
try:
|
| 263 |
+
messages, upgraded, tail = self._parser.feed_data(data)
|
| 264 |
+
except BaseException as underlying_exc:
|
| 265 |
+
if self.transport is not None:
|
| 266 |
+
# connection.release() could be called BEFORE
|
| 267 |
+
# data_received(), the transport is already
|
| 268 |
+
# closed in this case
|
| 269 |
+
self.transport.close()
|
| 270 |
+
# should_close is True after the call
|
| 271 |
+
if isinstance(underlying_exc, HttpProcessingError):
|
| 272 |
+
exc = HttpProcessingError(
|
| 273 |
+
code=underlying_exc.code,
|
| 274 |
+
message=underlying_exc.message,
|
| 275 |
+
headers=underlying_exc.headers,
|
| 276 |
+
)
|
| 277 |
+
else:
|
| 278 |
+
exc = HttpProcessingError()
|
| 279 |
+
self.set_exception(exc, underlying_exc)
|
| 280 |
+
return
|
| 281 |
+
|
| 282 |
+
self._upgraded = upgraded
|
| 283 |
+
|
| 284 |
+
payload: Optional[StreamReader] = None
|
| 285 |
+
for message, payload in messages:
|
| 286 |
+
if message.should_close:
|
| 287 |
+
self._should_close = True
|
| 288 |
+
|
| 289 |
+
self._payload = payload
|
| 290 |
+
|
| 291 |
+
if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES:
|
| 292 |
+
self.feed_data((message, EMPTY_PAYLOAD), 0)
|
| 293 |
+
else:
|
| 294 |
+
self.feed_data((message, payload), 0)
|
| 295 |
+
|
| 296 |
+
if payload is not None:
|
| 297 |
+
# new message(s) was processed
|
| 298 |
+
# register timeout handler unsubscribing
|
| 299 |
+
# either on end-of-stream or immediately for
|
| 300 |
+
# EMPTY_PAYLOAD
|
| 301 |
+
if payload is not EMPTY_PAYLOAD:
|
| 302 |
+
payload.on_eof(self._drop_timeout)
|
| 303 |
+
else:
|
| 304 |
+
self._drop_timeout()
|
| 305 |
+
|
| 306 |
+
if upgraded and tail:
|
| 307 |
+
self.data_received(tail)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/client_reqrep.py
ADDED
|
@@ -0,0 +1,1315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import codecs
|
| 3 |
+
import contextlib
|
| 4 |
+
import functools
|
| 5 |
+
import io
|
| 6 |
+
import re
|
| 7 |
+
import sys
|
| 8 |
+
import traceback
|
| 9 |
+
import warnings
|
| 10 |
+
from hashlib import md5, sha1, sha256
|
| 11 |
+
from http.cookies import CookieError, Morsel, SimpleCookie
|
| 12 |
+
from types import MappingProxyType, TracebackType
|
| 13 |
+
from typing import (
|
| 14 |
+
TYPE_CHECKING,
|
| 15 |
+
Any,
|
| 16 |
+
Callable,
|
| 17 |
+
Dict,
|
| 18 |
+
Iterable,
|
| 19 |
+
List,
|
| 20 |
+
Mapping,
|
| 21 |
+
NamedTuple,
|
| 22 |
+
Optional,
|
| 23 |
+
Tuple,
|
| 24 |
+
Type,
|
| 25 |
+
Union,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
import attr
|
| 29 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
|
| 30 |
+
from yarl import URL
|
| 31 |
+
|
| 32 |
+
from . import hdrs, helpers, http, multipart, payload
|
| 33 |
+
from .abc import AbstractStreamWriter
|
| 34 |
+
from .client_exceptions import (
|
| 35 |
+
ClientConnectionError,
|
| 36 |
+
ClientOSError,
|
| 37 |
+
ClientResponseError,
|
| 38 |
+
ContentTypeError,
|
| 39 |
+
InvalidURL,
|
| 40 |
+
ServerFingerprintMismatch,
|
| 41 |
+
)
|
| 42 |
+
from .compression_utils import HAS_BROTLI
|
| 43 |
+
from .formdata import FormData
|
| 44 |
+
from .helpers import (
|
| 45 |
+
_SENTINEL,
|
| 46 |
+
BaseTimerContext,
|
| 47 |
+
BasicAuth,
|
| 48 |
+
HeadersMixin,
|
| 49 |
+
TimerNoop,
|
| 50 |
+
basicauth_from_netrc,
|
| 51 |
+
netrc_from_env,
|
| 52 |
+
noop,
|
| 53 |
+
reify,
|
| 54 |
+
set_exception,
|
| 55 |
+
set_result,
|
| 56 |
+
)
|
| 57 |
+
from .http import (
|
| 58 |
+
SERVER_SOFTWARE,
|
| 59 |
+
HttpVersion,
|
| 60 |
+
HttpVersion10,
|
| 61 |
+
HttpVersion11,
|
| 62 |
+
StreamWriter,
|
| 63 |
+
)
|
| 64 |
+
from .log import client_logger
|
| 65 |
+
from .streams import StreamReader
|
| 66 |
+
from .typedefs import (
|
| 67 |
+
DEFAULT_JSON_DECODER,
|
| 68 |
+
JSONDecoder,
|
| 69 |
+
LooseCookies,
|
| 70 |
+
LooseHeaders,
|
| 71 |
+
Query,
|
| 72 |
+
RawHeaders,
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
if TYPE_CHECKING:
|
| 76 |
+
import ssl
|
| 77 |
+
from ssl import SSLContext
|
| 78 |
+
else:
|
| 79 |
+
try:
|
| 80 |
+
import ssl
|
| 81 |
+
from ssl import SSLContext
|
| 82 |
+
except ImportError: # pragma: no cover
|
| 83 |
+
ssl = None # type: ignore[assignment]
|
| 84 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
if TYPE_CHECKING:
|
| 91 |
+
from .client import ClientSession
|
| 92 |
+
from .connector import Connection
|
| 93 |
+
from .tracing import Trace
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
|
| 97 |
+
json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def _gen_default_accept_encoding() -> str:
|
| 101 |
+
return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate"
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 105 |
+
class ContentDisposition:
|
| 106 |
+
type: Optional[str]
|
| 107 |
+
parameters: "MappingProxyType[str, str]"
|
| 108 |
+
filename: Optional[str]
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class _RequestInfo(NamedTuple):
|
| 112 |
+
url: URL
|
| 113 |
+
method: str
|
| 114 |
+
headers: "CIMultiDictProxy[str]"
|
| 115 |
+
real_url: URL
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class RequestInfo(_RequestInfo):
|
| 119 |
+
|
| 120 |
+
def __new__(
|
| 121 |
+
cls,
|
| 122 |
+
url: URL,
|
| 123 |
+
method: str,
|
| 124 |
+
headers: "CIMultiDictProxy[str]",
|
| 125 |
+
real_url: URL = _SENTINEL, # type: ignore[assignment]
|
| 126 |
+
) -> "RequestInfo":
|
| 127 |
+
"""Create a new RequestInfo instance.
|
| 128 |
+
|
| 129 |
+
For backwards compatibility, the real_url parameter is optional.
|
| 130 |
+
"""
|
| 131 |
+
return tuple.__new__(
|
| 132 |
+
cls, (url, method, headers, url if real_url is _SENTINEL else real_url)
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
class Fingerprint:
|
| 137 |
+
HASHFUNC_BY_DIGESTLEN = {
|
| 138 |
+
16: md5,
|
| 139 |
+
20: sha1,
|
| 140 |
+
32: sha256,
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
def __init__(self, fingerprint: bytes) -> None:
|
| 144 |
+
digestlen = len(fingerprint)
|
| 145 |
+
hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
|
| 146 |
+
if not hashfunc:
|
| 147 |
+
raise ValueError("fingerprint has invalid length")
|
| 148 |
+
elif hashfunc is md5 or hashfunc is sha1:
|
| 149 |
+
raise ValueError("md5 and sha1 are insecure and not supported. Use sha256.")
|
| 150 |
+
self._hashfunc = hashfunc
|
| 151 |
+
self._fingerprint = fingerprint
|
| 152 |
+
|
| 153 |
+
@property
|
| 154 |
+
def fingerprint(self) -> bytes:
|
| 155 |
+
return self._fingerprint
|
| 156 |
+
|
| 157 |
+
def check(self, transport: asyncio.Transport) -> None:
|
| 158 |
+
if not transport.get_extra_info("sslcontext"):
|
| 159 |
+
return
|
| 160 |
+
sslobj = transport.get_extra_info("ssl_object")
|
| 161 |
+
cert = sslobj.getpeercert(binary_form=True)
|
| 162 |
+
got = self._hashfunc(cert).digest()
|
| 163 |
+
if got != self._fingerprint:
|
| 164 |
+
host, port, *_ = transport.get_extra_info("peername")
|
| 165 |
+
raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
if ssl is not None:
|
| 169 |
+
SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
|
| 170 |
+
else: # pragma: no cover
|
| 171 |
+
SSL_ALLOWED_TYPES = (bool, type(None))
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def _merge_ssl_params(
|
| 175 |
+
ssl: Union["SSLContext", bool, Fingerprint],
|
| 176 |
+
verify_ssl: Optional[bool],
|
| 177 |
+
ssl_context: Optional["SSLContext"],
|
| 178 |
+
fingerprint: Optional[bytes],
|
| 179 |
+
) -> Union["SSLContext", bool, Fingerprint]:
|
| 180 |
+
if ssl is None:
|
| 181 |
+
ssl = True # Double check for backwards compatibility
|
| 182 |
+
if verify_ssl is not None and not verify_ssl:
|
| 183 |
+
warnings.warn(
|
| 184 |
+
"verify_ssl is deprecated, use ssl=False instead",
|
| 185 |
+
DeprecationWarning,
|
| 186 |
+
stacklevel=3,
|
| 187 |
+
)
|
| 188 |
+
if ssl is not True:
|
| 189 |
+
raise ValueError(
|
| 190 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
| 191 |
+
"parameters are mutually exclusive"
|
| 192 |
+
)
|
| 193 |
+
else:
|
| 194 |
+
ssl = False
|
| 195 |
+
if ssl_context is not None:
|
| 196 |
+
warnings.warn(
|
| 197 |
+
"ssl_context is deprecated, use ssl=context instead",
|
| 198 |
+
DeprecationWarning,
|
| 199 |
+
stacklevel=3,
|
| 200 |
+
)
|
| 201 |
+
if ssl is not True:
|
| 202 |
+
raise ValueError(
|
| 203 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
| 204 |
+
"parameters are mutually exclusive"
|
| 205 |
+
)
|
| 206 |
+
else:
|
| 207 |
+
ssl = ssl_context
|
| 208 |
+
if fingerprint is not None:
|
| 209 |
+
warnings.warn(
|
| 210 |
+
"fingerprint is deprecated, use ssl=Fingerprint(fingerprint) instead",
|
| 211 |
+
DeprecationWarning,
|
| 212 |
+
stacklevel=3,
|
| 213 |
+
)
|
| 214 |
+
if ssl is not True:
|
| 215 |
+
raise ValueError(
|
| 216 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
| 217 |
+
"parameters are mutually exclusive"
|
| 218 |
+
)
|
| 219 |
+
else:
|
| 220 |
+
ssl = Fingerprint(fingerprint)
|
| 221 |
+
if not isinstance(ssl, SSL_ALLOWED_TYPES):
|
| 222 |
+
raise TypeError(
|
| 223 |
+
"ssl should be SSLContext, bool, Fingerprint or None, "
|
| 224 |
+
"got {!r} instead.".format(ssl)
|
| 225 |
+
)
|
| 226 |
+
return ssl
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
_SSL_SCHEMES = frozenset(("https", "wss"))
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
# ConnectionKey is a NamedTuple because it is used as a key in a dict
|
| 233 |
+
# and a set in the connector. Since a NamedTuple is a tuple it uses
|
| 234 |
+
# the fast native tuple __hash__ and __eq__ implementation in CPython.
|
| 235 |
+
class ConnectionKey(NamedTuple):
|
| 236 |
+
# the key should contain an information about used proxy / TLS
|
| 237 |
+
# to prevent reusing wrong connections from a pool
|
| 238 |
+
host: str
|
| 239 |
+
port: Optional[int]
|
| 240 |
+
is_ssl: bool
|
| 241 |
+
ssl: Union[SSLContext, bool, Fingerprint]
|
| 242 |
+
proxy: Optional[URL]
|
| 243 |
+
proxy_auth: Optional[BasicAuth]
|
| 244 |
+
proxy_headers_hash: Optional[int] # hash(CIMultiDict)
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def _is_expected_content_type(
|
| 248 |
+
response_content_type: str, expected_content_type: str
|
| 249 |
+
) -> bool:
|
| 250 |
+
if expected_content_type == "application/json":
|
| 251 |
+
return json_re.match(response_content_type) is not None
|
| 252 |
+
return expected_content_type in response_content_type
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
class ClientRequest:
|
| 256 |
+
GET_METHODS = {
|
| 257 |
+
hdrs.METH_GET,
|
| 258 |
+
hdrs.METH_HEAD,
|
| 259 |
+
hdrs.METH_OPTIONS,
|
| 260 |
+
hdrs.METH_TRACE,
|
| 261 |
+
}
|
| 262 |
+
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
|
| 263 |
+
ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
|
| 264 |
+
|
| 265 |
+
DEFAULT_HEADERS = {
|
| 266 |
+
hdrs.ACCEPT: "*/*",
|
| 267 |
+
hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
|
| 268 |
+
}
|
| 269 |
+
|
| 270 |
+
# Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
|
| 271 |
+
body: Any = b""
|
| 272 |
+
auth = None
|
| 273 |
+
response = None
|
| 274 |
+
|
| 275 |
+
__writer = None # async task for streaming data
|
| 276 |
+
_continue = None # waiter future for '100 Continue' response
|
| 277 |
+
|
| 278 |
+
_skip_auto_headers: Optional["CIMultiDict[None]"] = None
|
| 279 |
+
|
| 280 |
+
# N.B.
|
| 281 |
+
# Adding __del__ method with self._writer closing doesn't make sense
|
| 282 |
+
# because _writer is instance method, thus it keeps a reference to self.
|
| 283 |
+
# Until writer has finished finalizer will not be called.
|
| 284 |
+
|
| 285 |
+
def __init__(
|
| 286 |
+
self,
|
| 287 |
+
method: str,
|
| 288 |
+
url: URL,
|
| 289 |
+
*,
|
| 290 |
+
params: Query = None,
|
| 291 |
+
headers: Optional[LooseHeaders] = None,
|
| 292 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 293 |
+
data: Any = None,
|
| 294 |
+
cookies: Optional[LooseCookies] = None,
|
| 295 |
+
auth: Optional[BasicAuth] = None,
|
| 296 |
+
version: http.HttpVersion = http.HttpVersion11,
|
| 297 |
+
compress: Union[str, bool, None] = None,
|
| 298 |
+
chunked: Optional[bool] = None,
|
| 299 |
+
expect100: bool = False,
|
| 300 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 301 |
+
response_class: Optional[Type["ClientResponse"]] = None,
|
| 302 |
+
proxy: Optional[URL] = None,
|
| 303 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 304 |
+
timer: Optional[BaseTimerContext] = None,
|
| 305 |
+
session: Optional["ClientSession"] = None,
|
| 306 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
| 307 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 308 |
+
traces: Optional[List["Trace"]] = None,
|
| 309 |
+
trust_env: bool = False,
|
| 310 |
+
server_hostname: Optional[str] = None,
|
| 311 |
+
):
|
| 312 |
+
if loop is None:
|
| 313 |
+
loop = asyncio.get_event_loop()
|
| 314 |
+
if match := _CONTAINS_CONTROL_CHAR_RE.search(method):
|
| 315 |
+
raise ValueError(
|
| 316 |
+
f"Method cannot contain non-token characters {method!r} "
|
| 317 |
+
f"(found at least {match.group()!r})"
|
| 318 |
+
)
|
| 319 |
+
# URL forbids subclasses, so a simple type check is enough.
|
| 320 |
+
assert type(url) is URL, url
|
| 321 |
+
if proxy is not None:
|
| 322 |
+
assert type(proxy) is URL, proxy
|
| 323 |
+
# FIXME: session is None in tests only, need to fix tests
|
| 324 |
+
# assert session is not None
|
| 325 |
+
if TYPE_CHECKING:
|
| 326 |
+
assert session is not None
|
| 327 |
+
self._session = session
|
| 328 |
+
if params:
|
| 329 |
+
url = url.extend_query(params)
|
| 330 |
+
self.original_url = url
|
| 331 |
+
self.url = url.with_fragment(None) if url.raw_fragment else url
|
| 332 |
+
self.method = method.upper()
|
| 333 |
+
self.chunked = chunked
|
| 334 |
+
self.compress = compress
|
| 335 |
+
self.loop = loop
|
| 336 |
+
self.length = None
|
| 337 |
+
if response_class is None:
|
| 338 |
+
real_response_class = ClientResponse
|
| 339 |
+
else:
|
| 340 |
+
real_response_class = response_class
|
| 341 |
+
self.response_class: Type[ClientResponse] = real_response_class
|
| 342 |
+
self._timer = timer if timer is not None else TimerNoop()
|
| 343 |
+
self._ssl = ssl if ssl is not None else True
|
| 344 |
+
self.server_hostname = server_hostname
|
| 345 |
+
|
| 346 |
+
if loop.get_debug():
|
| 347 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 348 |
+
|
| 349 |
+
self.update_version(version)
|
| 350 |
+
self.update_host(url)
|
| 351 |
+
self.update_headers(headers)
|
| 352 |
+
self.update_auto_headers(skip_auto_headers)
|
| 353 |
+
self.update_cookies(cookies)
|
| 354 |
+
self.update_content_encoding(data)
|
| 355 |
+
self.update_auth(auth, trust_env)
|
| 356 |
+
self.update_proxy(proxy, proxy_auth, proxy_headers)
|
| 357 |
+
|
| 358 |
+
self.update_body_from_data(data)
|
| 359 |
+
if data is not None or self.method not in self.GET_METHODS:
|
| 360 |
+
self.update_transfer_encoding()
|
| 361 |
+
self.update_expect_continue(expect100)
|
| 362 |
+
self._traces = [] if traces is None else traces
|
| 363 |
+
|
| 364 |
+
def __reset_writer(self, _: object = None) -> None:
|
| 365 |
+
self.__writer = None
|
| 366 |
+
|
| 367 |
+
@property
|
| 368 |
+
def skip_auto_headers(self) -> CIMultiDict[None]:
|
| 369 |
+
return self._skip_auto_headers or CIMultiDict()
|
| 370 |
+
|
| 371 |
+
@property
|
| 372 |
+
def _writer(self) -> Optional["asyncio.Task[None]"]:
|
| 373 |
+
return self.__writer
|
| 374 |
+
|
| 375 |
+
@_writer.setter
|
| 376 |
+
def _writer(self, writer: "asyncio.Task[None]") -> None:
|
| 377 |
+
if self.__writer is not None:
|
| 378 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
| 379 |
+
self.__writer = writer
|
| 380 |
+
writer.add_done_callback(self.__reset_writer)
|
| 381 |
+
|
| 382 |
+
def is_ssl(self) -> bool:
|
| 383 |
+
return self.url.scheme in _SSL_SCHEMES
|
| 384 |
+
|
| 385 |
+
@property
|
| 386 |
+
def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
|
| 387 |
+
return self._ssl
|
| 388 |
+
|
| 389 |
+
@property
|
| 390 |
+
def connection_key(self) -> ConnectionKey:
|
| 391 |
+
if proxy_headers := self.proxy_headers:
|
| 392 |
+
h: Optional[int] = hash(tuple(proxy_headers.items()))
|
| 393 |
+
else:
|
| 394 |
+
h = None
|
| 395 |
+
url = self.url
|
| 396 |
+
return tuple.__new__(
|
| 397 |
+
ConnectionKey,
|
| 398 |
+
(
|
| 399 |
+
url.raw_host or "",
|
| 400 |
+
url.port,
|
| 401 |
+
url.scheme in _SSL_SCHEMES,
|
| 402 |
+
self._ssl,
|
| 403 |
+
self.proxy,
|
| 404 |
+
self.proxy_auth,
|
| 405 |
+
h,
|
| 406 |
+
),
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
@property
|
| 410 |
+
def host(self) -> str:
|
| 411 |
+
ret = self.url.raw_host
|
| 412 |
+
assert ret is not None
|
| 413 |
+
return ret
|
| 414 |
+
|
| 415 |
+
@property
|
| 416 |
+
def port(self) -> Optional[int]:
|
| 417 |
+
return self.url.port
|
| 418 |
+
|
| 419 |
+
@property
|
| 420 |
+
def request_info(self) -> RequestInfo:
|
| 421 |
+
headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
|
| 422 |
+
# These are created on every request, so we use a NamedTuple
|
| 423 |
+
# for performance reasons. We don't use the RequestInfo.__new__
|
| 424 |
+
# method because it has a different signature which is provided
|
| 425 |
+
# for backwards compatibility only.
|
| 426 |
+
return tuple.__new__(
|
| 427 |
+
RequestInfo, (self.url, self.method, headers, self.original_url)
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
def update_host(self, url: URL) -> None:
|
| 431 |
+
"""Update destination host, port and connection type (ssl)."""
|
| 432 |
+
# get host/port
|
| 433 |
+
if not url.raw_host:
|
| 434 |
+
raise InvalidURL(url)
|
| 435 |
+
|
| 436 |
+
# basic auth info
|
| 437 |
+
if url.raw_user or url.raw_password:
|
| 438 |
+
self.auth = helpers.BasicAuth(url.user or "", url.password or "")
|
| 439 |
+
|
| 440 |
+
def update_version(self, version: Union[http.HttpVersion, str]) -> None:
|
| 441 |
+
"""Convert request version to two elements tuple.
|
| 442 |
+
|
| 443 |
+
parser HTTP version '1.1' => (1, 1)
|
| 444 |
+
"""
|
| 445 |
+
if isinstance(version, str):
|
| 446 |
+
v = [part.strip() for part in version.split(".", 1)]
|
| 447 |
+
try:
|
| 448 |
+
version = http.HttpVersion(int(v[0]), int(v[1]))
|
| 449 |
+
except ValueError:
|
| 450 |
+
raise ValueError(
|
| 451 |
+
f"Can not parse http version number: {version}"
|
| 452 |
+
) from None
|
| 453 |
+
self.version = version
|
| 454 |
+
|
| 455 |
+
def update_headers(self, headers: Optional[LooseHeaders]) -> None:
|
| 456 |
+
"""Update request headers."""
|
| 457 |
+
self.headers: CIMultiDict[str] = CIMultiDict()
|
| 458 |
+
|
| 459 |
+
# Build the host header
|
| 460 |
+
host = self.url.host_port_subcomponent
|
| 461 |
+
|
| 462 |
+
# host_port_subcomponent is None when the URL is a relative URL.
|
| 463 |
+
# but we know we do not have a relative URL here.
|
| 464 |
+
assert host is not None
|
| 465 |
+
self.headers[hdrs.HOST] = host
|
| 466 |
+
|
| 467 |
+
if not headers:
|
| 468 |
+
return
|
| 469 |
+
|
| 470 |
+
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
|
| 471 |
+
headers = headers.items()
|
| 472 |
+
|
| 473 |
+
for key, value in headers: # type: ignore[misc]
|
| 474 |
+
# A special case for Host header
|
| 475 |
+
if key in hdrs.HOST_ALL:
|
| 476 |
+
self.headers[key] = value
|
| 477 |
+
else:
|
| 478 |
+
self.headers.add(key, value)
|
| 479 |
+
|
| 480 |
+
def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None:
|
| 481 |
+
if skip_auto_headers is not None:
|
| 482 |
+
self._skip_auto_headers = CIMultiDict(
|
| 483 |
+
(hdr, None) for hdr in sorted(skip_auto_headers)
|
| 484 |
+
)
|
| 485 |
+
used_headers = self.headers.copy()
|
| 486 |
+
used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type]
|
| 487 |
+
else:
|
| 488 |
+
# Fast path when there are no headers to skip
|
| 489 |
+
# which is the most common case.
|
| 490 |
+
used_headers = self.headers
|
| 491 |
+
|
| 492 |
+
for hdr, val in self.DEFAULT_HEADERS.items():
|
| 493 |
+
if hdr not in used_headers:
|
| 494 |
+
self.headers[hdr] = val
|
| 495 |
+
|
| 496 |
+
if hdrs.USER_AGENT not in used_headers:
|
| 497 |
+
self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
|
| 498 |
+
|
| 499 |
+
def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
|
| 500 |
+
"""Update request cookies header."""
|
| 501 |
+
if not cookies:
|
| 502 |
+
return
|
| 503 |
+
|
| 504 |
+
c = SimpleCookie()
|
| 505 |
+
if hdrs.COOKIE in self.headers:
|
| 506 |
+
c.load(self.headers.get(hdrs.COOKIE, ""))
|
| 507 |
+
del self.headers[hdrs.COOKIE]
|
| 508 |
+
|
| 509 |
+
if isinstance(cookies, Mapping):
|
| 510 |
+
iter_cookies = cookies.items()
|
| 511 |
+
else:
|
| 512 |
+
iter_cookies = cookies # type: ignore[assignment]
|
| 513 |
+
for name, value in iter_cookies:
|
| 514 |
+
if isinstance(value, Morsel):
|
| 515 |
+
# Preserve coded_value
|
| 516 |
+
mrsl_val = value.get(value.key, Morsel())
|
| 517 |
+
mrsl_val.set(value.key, value.value, value.coded_value)
|
| 518 |
+
c[name] = mrsl_val
|
| 519 |
+
else:
|
| 520 |
+
c[name] = value # type: ignore[assignment]
|
| 521 |
+
|
| 522 |
+
self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
|
| 523 |
+
|
| 524 |
+
def update_content_encoding(self, data: Any) -> None:
|
| 525 |
+
"""Set request content encoding."""
|
| 526 |
+
if not data:
|
| 527 |
+
# Don't compress an empty body.
|
| 528 |
+
self.compress = None
|
| 529 |
+
return
|
| 530 |
+
|
| 531 |
+
if self.headers.get(hdrs.CONTENT_ENCODING):
|
| 532 |
+
if self.compress:
|
| 533 |
+
raise ValueError(
|
| 534 |
+
"compress can not be set if Content-Encoding header is set"
|
| 535 |
+
)
|
| 536 |
+
elif self.compress:
|
| 537 |
+
if not isinstance(self.compress, str):
|
| 538 |
+
self.compress = "deflate"
|
| 539 |
+
self.headers[hdrs.CONTENT_ENCODING] = self.compress
|
| 540 |
+
self.chunked = True # enable chunked, no need to deal with length
|
| 541 |
+
|
| 542 |
+
def update_transfer_encoding(self) -> None:
|
| 543 |
+
"""Analyze transfer-encoding header."""
|
| 544 |
+
te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
| 545 |
+
|
| 546 |
+
if "chunked" in te:
|
| 547 |
+
if self.chunked:
|
| 548 |
+
raise ValueError(
|
| 549 |
+
"chunked can not be set "
|
| 550 |
+
'if "Transfer-Encoding: chunked" header is set'
|
| 551 |
+
)
|
| 552 |
+
|
| 553 |
+
elif self.chunked:
|
| 554 |
+
if hdrs.CONTENT_LENGTH in self.headers:
|
| 555 |
+
raise ValueError(
|
| 556 |
+
"chunked can not be set if Content-Length header is set"
|
| 557 |
+
)
|
| 558 |
+
|
| 559 |
+
self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
| 560 |
+
else:
|
| 561 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
| 562 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
|
| 563 |
+
|
| 564 |
+
def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
|
| 565 |
+
"""Set basic auth."""
|
| 566 |
+
if auth is None:
|
| 567 |
+
auth = self.auth
|
| 568 |
+
if auth is None and trust_env and self.url.host is not None:
|
| 569 |
+
netrc_obj = netrc_from_env()
|
| 570 |
+
with contextlib.suppress(LookupError):
|
| 571 |
+
auth = basicauth_from_netrc(netrc_obj, self.url.host)
|
| 572 |
+
if auth is None:
|
| 573 |
+
return
|
| 574 |
+
|
| 575 |
+
if not isinstance(auth, helpers.BasicAuth):
|
| 576 |
+
raise TypeError("BasicAuth() tuple is required instead")
|
| 577 |
+
|
| 578 |
+
self.headers[hdrs.AUTHORIZATION] = auth.encode()
|
| 579 |
+
|
| 580 |
+
def update_body_from_data(self, body: Any) -> None:
|
| 581 |
+
if body is None:
|
| 582 |
+
return
|
| 583 |
+
|
| 584 |
+
# FormData
|
| 585 |
+
if isinstance(body, FormData):
|
| 586 |
+
body = body()
|
| 587 |
+
|
| 588 |
+
try:
|
| 589 |
+
body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
|
| 590 |
+
except payload.LookupError:
|
| 591 |
+
body = FormData(body)()
|
| 592 |
+
|
| 593 |
+
self.body = body
|
| 594 |
+
|
| 595 |
+
# enable chunked encoding if needed
|
| 596 |
+
if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers:
|
| 597 |
+
if (size := body.size) is not None:
|
| 598 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(size)
|
| 599 |
+
else:
|
| 600 |
+
self.chunked = True
|
| 601 |
+
|
| 602 |
+
# copy payload headers
|
| 603 |
+
assert body.headers
|
| 604 |
+
headers = self.headers
|
| 605 |
+
skip_headers = self._skip_auto_headers
|
| 606 |
+
for key, value in body.headers.items():
|
| 607 |
+
if key in headers or (skip_headers is not None and key in skip_headers):
|
| 608 |
+
continue
|
| 609 |
+
headers[key] = value
|
| 610 |
+
|
| 611 |
+
def update_expect_continue(self, expect: bool = False) -> None:
|
| 612 |
+
if expect:
|
| 613 |
+
self.headers[hdrs.EXPECT] = "100-continue"
|
| 614 |
+
elif (
|
| 615 |
+
hdrs.EXPECT in self.headers
|
| 616 |
+
and self.headers[hdrs.EXPECT].lower() == "100-continue"
|
| 617 |
+
):
|
| 618 |
+
expect = True
|
| 619 |
+
|
| 620 |
+
if expect:
|
| 621 |
+
self._continue = self.loop.create_future()
|
| 622 |
+
|
| 623 |
+
def update_proxy(
|
| 624 |
+
self,
|
| 625 |
+
proxy: Optional[URL],
|
| 626 |
+
proxy_auth: Optional[BasicAuth],
|
| 627 |
+
proxy_headers: Optional[LooseHeaders],
|
| 628 |
+
) -> None:
|
| 629 |
+
self.proxy = proxy
|
| 630 |
+
if proxy is None:
|
| 631 |
+
self.proxy_auth = None
|
| 632 |
+
self.proxy_headers = None
|
| 633 |
+
return
|
| 634 |
+
|
| 635 |
+
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
|
| 636 |
+
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
|
| 637 |
+
self.proxy_auth = proxy_auth
|
| 638 |
+
|
| 639 |
+
if proxy_headers is not None and not isinstance(
|
| 640 |
+
proxy_headers, (MultiDict, MultiDictProxy)
|
| 641 |
+
):
|
| 642 |
+
proxy_headers = CIMultiDict(proxy_headers)
|
| 643 |
+
self.proxy_headers = proxy_headers
|
| 644 |
+
|
| 645 |
+
async def write_bytes(
|
| 646 |
+
self, writer: AbstractStreamWriter, conn: "Connection"
|
| 647 |
+
) -> None:
|
| 648 |
+
"""Support coroutines that yields bytes objects."""
|
| 649 |
+
# 100 response
|
| 650 |
+
if self._continue is not None:
|
| 651 |
+
await writer.drain()
|
| 652 |
+
await self._continue
|
| 653 |
+
|
| 654 |
+
protocol = conn.protocol
|
| 655 |
+
assert protocol is not None
|
| 656 |
+
try:
|
| 657 |
+
if isinstance(self.body, payload.Payload):
|
| 658 |
+
await self.body.write(writer)
|
| 659 |
+
else:
|
| 660 |
+
if isinstance(self.body, (bytes, bytearray)):
|
| 661 |
+
self.body = (self.body,)
|
| 662 |
+
|
| 663 |
+
for chunk in self.body:
|
| 664 |
+
await writer.write(chunk)
|
| 665 |
+
except OSError as underlying_exc:
|
| 666 |
+
reraised_exc = underlying_exc
|
| 667 |
+
|
| 668 |
+
exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
|
| 669 |
+
underlying_exc, asyncio.TimeoutError
|
| 670 |
+
)
|
| 671 |
+
if exc_is_not_timeout:
|
| 672 |
+
reraised_exc = ClientOSError(
|
| 673 |
+
underlying_exc.errno,
|
| 674 |
+
f"Can not write request body for {self.url !s}",
|
| 675 |
+
)
|
| 676 |
+
|
| 677 |
+
set_exception(protocol, reraised_exc, underlying_exc)
|
| 678 |
+
except asyncio.CancelledError:
|
| 679 |
+
# Body hasn't been fully sent, so connection can't be reused.
|
| 680 |
+
conn.close()
|
| 681 |
+
raise
|
| 682 |
+
except Exception as underlying_exc:
|
| 683 |
+
set_exception(
|
| 684 |
+
protocol,
|
| 685 |
+
ClientConnectionError(
|
| 686 |
+
f"Failed to send bytes into the underlying connection {conn !s}",
|
| 687 |
+
),
|
| 688 |
+
underlying_exc,
|
| 689 |
+
)
|
| 690 |
+
else:
|
| 691 |
+
await writer.write_eof()
|
| 692 |
+
protocol.start_timeout()
|
| 693 |
+
|
| 694 |
+
async def send(self, conn: "Connection") -> "ClientResponse":
|
| 695 |
+
# Specify request target:
|
| 696 |
+
# - CONNECT request must send authority form URI
|
| 697 |
+
# - not CONNECT proxy must send absolute form URI
|
| 698 |
+
# - most common is origin form URI
|
| 699 |
+
if self.method == hdrs.METH_CONNECT:
|
| 700 |
+
connect_host = self.url.host_subcomponent
|
| 701 |
+
assert connect_host is not None
|
| 702 |
+
path = f"{connect_host}:{self.url.port}"
|
| 703 |
+
elif self.proxy and not self.is_ssl():
|
| 704 |
+
path = str(self.url)
|
| 705 |
+
else:
|
| 706 |
+
path = self.url.raw_path_qs
|
| 707 |
+
|
| 708 |
+
protocol = conn.protocol
|
| 709 |
+
assert protocol is not None
|
| 710 |
+
writer = StreamWriter(
|
| 711 |
+
protocol,
|
| 712 |
+
self.loop,
|
| 713 |
+
on_chunk_sent=(
|
| 714 |
+
functools.partial(self._on_chunk_request_sent, self.method, self.url)
|
| 715 |
+
if self._traces
|
| 716 |
+
else None
|
| 717 |
+
),
|
| 718 |
+
on_headers_sent=(
|
| 719 |
+
functools.partial(self._on_headers_request_sent, self.method, self.url)
|
| 720 |
+
if self._traces
|
| 721 |
+
else None
|
| 722 |
+
),
|
| 723 |
+
)
|
| 724 |
+
|
| 725 |
+
if self.compress:
|
| 726 |
+
writer.enable_compression(self.compress) # type: ignore[arg-type]
|
| 727 |
+
|
| 728 |
+
if self.chunked is not None:
|
| 729 |
+
writer.enable_chunking()
|
| 730 |
+
|
| 731 |
+
# set default content-type
|
| 732 |
+
if (
|
| 733 |
+
self.method in self.POST_METHODS
|
| 734 |
+
and (
|
| 735 |
+
self._skip_auto_headers is None
|
| 736 |
+
or hdrs.CONTENT_TYPE not in self._skip_auto_headers
|
| 737 |
+
)
|
| 738 |
+
and hdrs.CONTENT_TYPE not in self.headers
|
| 739 |
+
):
|
| 740 |
+
self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
|
| 741 |
+
|
| 742 |
+
v = self.version
|
| 743 |
+
if hdrs.CONNECTION not in self.headers:
|
| 744 |
+
if conn._connector.force_close:
|
| 745 |
+
if v == HttpVersion11:
|
| 746 |
+
self.headers[hdrs.CONNECTION] = "close"
|
| 747 |
+
elif v == HttpVersion10:
|
| 748 |
+
self.headers[hdrs.CONNECTION] = "keep-alive"
|
| 749 |
+
|
| 750 |
+
# status + headers
|
| 751 |
+
status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}"
|
| 752 |
+
await writer.write_headers(status_line, self.headers)
|
| 753 |
+
task: Optional["asyncio.Task[None]"]
|
| 754 |
+
if self.body or self._continue is not None or protocol.writing_paused:
|
| 755 |
+
coro = self.write_bytes(writer, conn)
|
| 756 |
+
if sys.version_info >= (3, 12):
|
| 757 |
+
# Optimization for Python 3.12, try to write
|
| 758 |
+
# bytes immediately to avoid having to schedule
|
| 759 |
+
# the task on the event loop.
|
| 760 |
+
task = asyncio.Task(coro, loop=self.loop, eager_start=True)
|
| 761 |
+
else:
|
| 762 |
+
task = self.loop.create_task(coro)
|
| 763 |
+
if task.done():
|
| 764 |
+
task = None
|
| 765 |
+
else:
|
| 766 |
+
self._writer = task
|
| 767 |
+
else:
|
| 768 |
+
# We have nothing to write because
|
| 769 |
+
# - there is no body
|
| 770 |
+
# - the protocol does not have writing paused
|
| 771 |
+
# - we are not waiting for a 100-continue response
|
| 772 |
+
protocol.start_timeout()
|
| 773 |
+
writer.set_eof()
|
| 774 |
+
task = None
|
| 775 |
+
response_class = self.response_class
|
| 776 |
+
assert response_class is not None
|
| 777 |
+
self.response = response_class(
|
| 778 |
+
self.method,
|
| 779 |
+
self.original_url,
|
| 780 |
+
writer=task,
|
| 781 |
+
continue100=self._continue,
|
| 782 |
+
timer=self._timer,
|
| 783 |
+
request_info=self.request_info,
|
| 784 |
+
traces=self._traces,
|
| 785 |
+
loop=self.loop,
|
| 786 |
+
session=self._session,
|
| 787 |
+
)
|
| 788 |
+
return self.response
|
| 789 |
+
|
| 790 |
+
async def close(self) -> None:
|
| 791 |
+
if self.__writer is not None:
|
| 792 |
+
try:
|
| 793 |
+
await self.__writer
|
| 794 |
+
except asyncio.CancelledError:
|
| 795 |
+
if (
|
| 796 |
+
sys.version_info >= (3, 11)
|
| 797 |
+
and (task := asyncio.current_task())
|
| 798 |
+
and task.cancelling()
|
| 799 |
+
):
|
| 800 |
+
raise
|
| 801 |
+
|
| 802 |
+
def terminate(self) -> None:
|
| 803 |
+
if self.__writer is not None:
|
| 804 |
+
if not self.loop.is_closed():
|
| 805 |
+
self.__writer.cancel()
|
| 806 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
| 807 |
+
self.__writer = None
|
| 808 |
+
|
| 809 |
+
async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
|
| 810 |
+
for trace in self._traces:
|
| 811 |
+
await trace.send_request_chunk_sent(method, url, chunk)
|
| 812 |
+
|
| 813 |
+
async def _on_headers_request_sent(
|
| 814 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
| 815 |
+
) -> None:
|
| 816 |
+
for trace in self._traces:
|
| 817 |
+
await trace.send_request_headers(method, url, headers)
|
| 818 |
+
|
| 819 |
+
|
| 820 |
+
_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed")
|
| 821 |
+
|
| 822 |
+
|
| 823 |
+
class ClientResponse(HeadersMixin):
|
| 824 |
+
|
| 825 |
+
# Some of these attributes are None when created,
|
| 826 |
+
# but will be set by the start() method.
|
| 827 |
+
# As the end user will likely never see the None values, we cheat the types below.
|
| 828 |
+
# from the Status-Line of the response
|
| 829 |
+
version: Optional[HttpVersion] = None # HTTP-Version
|
| 830 |
+
status: int = None # type: ignore[assignment] # Status-Code
|
| 831 |
+
reason: Optional[str] = None # Reason-Phrase
|
| 832 |
+
|
| 833 |
+
content: StreamReader = None # type: ignore[assignment] # Payload stream
|
| 834 |
+
_body: Optional[bytes] = None
|
| 835 |
+
_headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
|
| 836 |
+
_history: Tuple["ClientResponse", ...] = ()
|
| 837 |
+
_raw_headers: RawHeaders = None # type: ignore[assignment]
|
| 838 |
+
|
| 839 |
+
_connection: Optional["Connection"] = None # current connection
|
| 840 |
+
_cookies: Optional[SimpleCookie] = None
|
| 841 |
+
_continue: Optional["asyncio.Future[bool]"] = None
|
| 842 |
+
_source_traceback: Optional[traceback.StackSummary] = None
|
| 843 |
+
_session: Optional["ClientSession"] = None
|
| 844 |
+
# set up by ClientRequest after ClientResponse object creation
|
| 845 |
+
# post-init stage allows to not change ctor signature
|
| 846 |
+
_closed = True # to allow __del__ for non-initialized properly response
|
| 847 |
+
_released = False
|
| 848 |
+
_in_context = False
|
| 849 |
+
|
| 850 |
+
_resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8"
|
| 851 |
+
|
| 852 |
+
__writer: Optional["asyncio.Task[None]"] = None
|
| 853 |
+
|
| 854 |
+
def __init__(
|
| 855 |
+
self,
|
| 856 |
+
method: str,
|
| 857 |
+
url: URL,
|
| 858 |
+
*,
|
| 859 |
+
writer: "Optional[asyncio.Task[None]]",
|
| 860 |
+
continue100: Optional["asyncio.Future[bool]"],
|
| 861 |
+
timer: BaseTimerContext,
|
| 862 |
+
request_info: RequestInfo,
|
| 863 |
+
traces: List["Trace"],
|
| 864 |
+
loop: asyncio.AbstractEventLoop,
|
| 865 |
+
session: "ClientSession",
|
| 866 |
+
) -> None:
|
| 867 |
+
# URL forbids subclasses, so a simple type check is enough.
|
| 868 |
+
assert type(url) is URL
|
| 869 |
+
|
| 870 |
+
self.method = method
|
| 871 |
+
|
| 872 |
+
self._real_url = url
|
| 873 |
+
self._url = url.with_fragment(None) if url.raw_fragment else url
|
| 874 |
+
if writer is not None:
|
| 875 |
+
self._writer = writer
|
| 876 |
+
if continue100 is not None:
|
| 877 |
+
self._continue = continue100
|
| 878 |
+
self._request_info = request_info
|
| 879 |
+
self._timer = timer if timer is not None else TimerNoop()
|
| 880 |
+
self._cache: Dict[str, Any] = {}
|
| 881 |
+
self._traces = traces
|
| 882 |
+
self._loop = loop
|
| 883 |
+
# Save reference to _resolve_charset, so that get_encoding() will still
|
| 884 |
+
# work after the response has finished reading the body.
|
| 885 |
+
# TODO: Fix session=None in tests (see ClientRequest.__init__).
|
| 886 |
+
if session is not None:
|
| 887 |
+
# store a reference to session #1985
|
| 888 |
+
self._session = session
|
| 889 |
+
self._resolve_charset = session._resolve_charset
|
| 890 |
+
if loop.get_debug():
|
| 891 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 892 |
+
|
| 893 |
+
def __reset_writer(self, _: object = None) -> None:
|
| 894 |
+
self.__writer = None
|
| 895 |
+
|
| 896 |
+
@property
|
| 897 |
+
def _writer(self) -> Optional["asyncio.Task[None]"]:
|
| 898 |
+
"""The writer task for streaming data.
|
| 899 |
+
|
| 900 |
+
_writer is only provided for backwards compatibility
|
| 901 |
+
for subclasses that may need to access it.
|
| 902 |
+
"""
|
| 903 |
+
return self.__writer
|
| 904 |
+
|
| 905 |
+
@_writer.setter
|
| 906 |
+
def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
|
| 907 |
+
"""Set the writer task for streaming data."""
|
| 908 |
+
if self.__writer is not None:
|
| 909 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
| 910 |
+
self.__writer = writer
|
| 911 |
+
if writer is None:
|
| 912 |
+
return
|
| 913 |
+
if writer.done():
|
| 914 |
+
# The writer is already done, so we can clear it immediately.
|
| 915 |
+
self.__writer = None
|
| 916 |
+
else:
|
| 917 |
+
writer.add_done_callback(self.__reset_writer)
|
| 918 |
+
|
| 919 |
+
@property
|
| 920 |
+
def cookies(self) -> SimpleCookie:
|
| 921 |
+
if self._cookies is None:
|
| 922 |
+
self._cookies = SimpleCookie()
|
| 923 |
+
return self._cookies
|
| 924 |
+
|
| 925 |
+
@cookies.setter
|
| 926 |
+
def cookies(self, cookies: SimpleCookie) -> None:
|
| 927 |
+
self._cookies = cookies
|
| 928 |
+
|
| 929 |
+
@reify
|
| 930 |
+
def url(self) -> URL:
|
| 931 |
+
return self._url
|
| 932 |
+
|
| 933 |
+
@reify
|
| 934 |
+
def url_obj(self) -> URL:
|
| 935 |
+
warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
|
| 936 |
+
return self._url
|
| 937 |
+
|
| 938 |
+
@reify
|
| 939 |
+
def real_url(self) -> URL:
|
| 940 |
+
return self._real_url
|
| 941 |
+
|
| 942 |
+
@reify
|
| 943 |
+
def host(self) -> str:
|
| 944 |
+
assert self._url.host is not None
|
| 945 |
+
return self._url.host
|
| 946 |
+
|
| 947 |
+
@reify
|
| 948 |
+
def headers(self) -> "CIMultiDictProxy[str]":
|
| 949 |
+
return self._headers
|
| 950 |
+
|
| 951 |
+
@reify
|
| 952 |
+
def raw_headers(self) -> RawHeaders:
|
| 953 |
+
return self._raw_headers
|
| 954 |
+
|
| 955 |
+
@reify
|
| 956 |
+
def request_info(self) -> RequestInfo:
|
| 957 |
+
return self._request_info
|
| 958 |
+
|
| 959 |
+
@reify
|
| 960 |
+
def content_disposition(self) -> Optional[ContentDisposition]:
|
| 961 |
+
raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
|
| 962 |
+
if raw is None:
|
| 963 |
+
return None
|
| 964 |
+
disposition_type, params_dct = multipart.parse_content_disposition(raw)
|
| 965 |
+
params = MappingProxyType(params_dct)
|
| 966 |
+
filename = multipart.content_disposition_filename(params)
|
| 967 |
+
return ContentDisposition(disposition_type, params, filename)
|
| 968 |
+
|
| 969 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 970 |
+
if self._closed:
|
| 971 |
+
return
|
| 972 |
+
|
| 973 |
+
if self._connection is not None:
|
| 974 |
+
self._connection.release()
|
| 975 |
+
self._cleanup_writer()
|
| 976 |
+
|
| 977 |
+
if self._loop.get_debug():
|
| 978 |
+
kwargs = {"source": self}
|
| 979 |
+
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
|
| 980 |
+
context = {"client_response": self, "message": "Unclosed response"}
|
| 981 |
+
if self._source_traceback:
|
| 982 |
+
context["source_traceback"] = self._source_traceback
|
| 983 |
+
self._loop.call_exception_handler(context)
|
| 984 |
+
|
| 985 |
+
def __repr__(self) -> str:
|
| 986 |
+
out = io.StringIO()
|
| 987 |
+
ascii_encodable_url = str(self.url)
|
| 988 |
+
if self.reason:
|
| 989 |
+
ascii_encodable_reason = self.reason.encode(
|
| 990 |
+
"ascii", "backslashreplace"
|
| 991 |
+
).decode("ascii")
|
| 992 |
+
else:
|
| 993 |
+
ascii_encodable_reason = "None"
|
| 994 |
+
print(
|
| 995 |
+
"<ClientResponse({}) [{} {}]>".format(
|
| 996 |
+
ascii_encodable_url, self.status, ascii_encodable_reason
|
| 997 |
+
),
|
| 998 |
+
file=out,
|
| 999 |
+
)
|
| 1000 |
+
print(self.headers, file=out)
|
| 1001 |
+
return out.getvalue()
|
| 1002 |
+
|
| 1003 |
+
@property
|
| 1004 |
+
def connection(self) -> Optional["Connection"]:
|
| 1005 |
+
return self._connection
|
| 1006 |
+
|
| 1007 |
+
@reify
|
| 1008 |
+
def history(self) -> Tuple["ClientResponse", ...]:
|
| 1009 |
+
"""A sequence of of responses, if redirects occurred."""
|
| 1010 |
+
return self._history
|
| 1011 |
+
|
| 1012 |
+
@reify
|
| 1013 |
+
def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
|
| 1014 |
+
links_str = ", ".join(self.headers.getall("link", []))
|
| 1015 |
+
|
| 1016 |
+
if not links_str:
|
| 1017 |
+
return MultiDictProxy(MultiDict())
|
| 1018 |
+
|
| 1019 |
+
links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
|
| 1020 |
+
|
| 1021 |
+
for val in re.split(r",(?=\s*<)", links_str):
|
| 1022 |
+
match = re.match(r"\s*<(.*)>(.*)", val)
|
| 1023 |
+
if match is None: # pragma: no cover
|
| 1024 |
+
# the check exists to suppress mypy error
|
| 1025 |
+
continue
|
| 1026 |
+
url, params_str = match.groups()
|
| 1027 |
+
params = params_str.split(";")[1:]
|
| 1028 |
+
|
| 1029 |
+
link: MultiDict[Union[str, URL]] = MultiDict()
|
| 1030 |
+
|
| 1031 |
+
for param in params:
|
| 1032 |
+
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
|
| 1033 |
+
if match is None: # pragma: no cover
|
| 1034 |
+
# the check exists to suppress mypy error
|
| 1035 |
+
continue
|
| 1036 |
+
key, _, value, _ = match.groups()
|
| 1037 |
+
|
| 1038 |
+
link.add(key, value)
|
| 1039 |
+
|
| 1040 |
+
key = link.get("rel", url)
|
| 1041 |
+
|
| 1042 |
+
link.add("url", self.url.join(URL(url)))
|
| 1043 |
+
|
| 1044 |
+
links.add(str(key), MultiDictProxy(link))
|
| 1045 |
+
|
| 1046 |
+
return MultiDictProxy(links)
|
| 1047 |
+
|
| 1048 |
+
async def start(self, connection: "Connection") -> "ClientResponse":
|
| 1049 |
+
"""Start response processing."""
|
| 1050 |
+
self._closed = False
|
| 1051 |
+
self._protocol = connection.protocol
|
| 1052 |
+
self._connection = connection
|
| 1053 |
+
|
| 1054 |
+
with self._timer:
|
| 1055 |
+
while True:
|
| 1056 |
+
# read response
|
| 1057 |
+
try:
|
| 1058 |
+
protocol = self._protocol
|
| 1059 |
+
message, payload = await protocol.read() # type: ignore[union-attr]
|
| 1060 |
+
except http.HttpProcessingError as exc:
|
| 1061 |
+
raise ClientResponseError(
|
| 1062 |
+
self.request_info,
|
| 1063 |
+
self.history,
|
| 1064 |
+
status=exc.code,
|
| 1065 |
+
message=exc.message,
|
| 1066 |
+
headers=exc.headers,
|
| 1067 |
+
) from exc
|
| 1068 |
+
|
| 1069 |
+
if message.code < 100 or message.code > 199 or message.code == 101:
|
| 1070 |
+
break
|
| 1071 |
+
|
| 1072 |
+
if self._continue is not None:
|
| 1073 |
+
set_result(self._continue, True)
|
| 1074 |
+
self._continue = None
|
| 1075 |
+
|
| 1076 |
+
# payload eof handler
|
| 1077 |
+
payload.on_eof(self._response_eof)
|
| 1078 |
+
|
| 1079 |
+
# response status
|
| 1080 |
+
self.version = message.version
|
| 1081 |
+
self.status = message.code
|
| 1082 |
+
self.reason = message.reason
|
| 1083 |
+
|
| 1084 |
+
# headers
|
| 1085 |
+
self._headers = message.headers # type is CIMultiDictProxy
|
| 1086 |
+
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
|
| 1087 |
+
|
| 1088 |
+
# payload
|
| 1089 |
+
self.content = payload
|
| 1090 |
+
|
| 1091 |
+
# cookies
|
| 1092 |
+
if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()):
|
| 1093 |
+
cookies = SimpleCookie()
|
| 1094 |
+
for hdr in cookie_hdrs:
|
| 1095 |
+
try:
|
| 1096 |
+
cookies.load(hdr)
|
| 1097 |
+
except CookieError as exc:
|
| 1098 |
+
client_logger.warning("Can not load response cookies: %s", exc)
|
| 1099 |
+
self._cookies = cookies
|
| 1100 |
+
return self
|
| 1101 |
+
|
| 1102 |
+
def _response_eof(self) -> None:
|
| 1103 |
+
if self._closed:
|
| 1104 |
+
return
|
| 1105 |
+
|
| 1106 |
+
# protocol could be None because connection could be detached
|
| 1107 |
+
protocol = self._connection and self._connection.protocol
|
| 1108 |
+
if protocol is not None and protocol.upgraded:
|
| 1109 |
+
return
|
| 1110 |
+
|
| 1111 |
+
self._closed = True
|
| 1112 |
+
self._cleanup_writer()
|
| 1113 |
+
self._release_connection()
|
| 1114 |
+
|
| 1115 |
+
@property
|
| 1116 |
+
def closed(self) -> bool:
|
| 1117 |
+
return self._closed
|
| 1118 |
+
|
| 1119 |
+
def close(self) -> None:
|
| 1120 |
+
if not self._released:
|
| 1121 |
+
self._notify_content()
|
| 1122 |
+
|
| 1123 |
+
self._closed = True
|
| 1124 |
+
if self._loop is None or self._loop.is_closed():
|
| 1125 |
+
return
|
| 1126 |
+
|
| 1127 |
+
self._cleanup_writer()
|
| 1128 |
+
if self._connection is not None:
|
| 1129 |
+
self._connection.close()
|
| 1130 |
+
self._connection = None
|
| 1131 |
+
|
| 1132 |
+
def release(self) -> Any:
|
| 1133 |
+
if not self._released:
|
| 1134 |
+
self._notify_content()
|
| 1135 |
+
|
| 1136 |
+
self._closed = True
|
| 1137 |
+
|
| 1138 |
+
self._cleanup_writer()
|
| 1139 |
+
self._release_connection()
|
| 1140 |
+
return noop()
|
| 1141 |
+
|
| 1142 |
+
@property
|
| 1143 |
+
def ok(self) -> bool:
|
| 1144 |
+
"""Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
|
| 1145 |
+
|
| 1146 |
+
This is **not** a check for ``200 OK`` but a check that the response
|
| 1147 |
+
status is under 400.
|
| 1148 |
+
"""
|
| 1149 |
+
return 400 > self.status
|
| 1150 |
+
|
| 1151 |
+
def raise_for_status(self) -> None:
|
| 1152 |
+
if not self.ok:
|
| 1153 |
+
# reason should always be not None for a started response
|
| 1154 |
+
assert self.reason is not None
|
| 1155 |
+
|
| 1156 |
+
# If we're in a context we can rely on __aexit__() to release as the
|
| 1157 |
+
# exception propagates.
|
| 1158 |
+
if not self._in_context:
|
| 1159 |
+
self.release()
|
| 1160 |
+
|
| 1161 |
+
raise ClientResponseError(
|
| 1162 |
+
self.request_info,
|
| 1163 |
+
self.history,
|
| 1164 |
+
status=self.status,
|
| 1165 |
+
message=self.reason,
|
| 1166 |
+
headers=self.headers,
|
| 1167 |
+
)
|
| 1168 |
+
|
| 1169 |
+
def _release_connection(self) -> None:
|
| 1170 |
+
if self._connection is not None:
|
| 1171 |
+
if self.__writer is None:
|
| 1172 |
+
self._connection.release()
|
| 1173 |
+
self._connection = None
|
| 1174 |
+
else:
|
| 1175 |
+
self.__writer.add_done_callback(lambda f: self._release_connection())
|
| 1176 |
+
|
| 1177 |
+
async def _wait_released(self) -> None:
|
| 1178 |
+
if self.__writer is not None:
|
| 1179 |
+
try:
|
| 1180 |
+
await self.__writer
|
| 1181 |
+
except asyncio.CancelledError:
|
| 1182 |
+
if (
|
| 1183 |
+
sys.version_info >= (3, 11)
|
| 1184 |
+
and (task := asyncio.current_task())
|
| 1185 |
+
and task.cancelling()
|
| 1186 |
+
):
|
| 1187 |
+
raise
|
| 1188 |
+
self._release_connection()
|
| 1189 |
+
|
| 1190 |
+
def _cleanup_writer(self) -> None:
|
| 1191 |
+
if self.__writer is not None:
|
| 1192 |
+
self.__writer.cancel()
|
| 1193 |
+
self._session = None
|
| 1194 |
+
|
| 1195 |
+
def _notify_content(self) -> None:
|
| 1196 |
+
content = self.content
|
| 1197 |
+
if content and content.exception() is None:
|
| 1198 |
+
set_exception(content, _CONNECTION_CLOSED_EXCEPTION)
|
| 1199 |
+
self._released = True
|
| 1200 |
+
|
| 1201 |
+
async def wait_for_close(self) -> None:
|
| 1202 |
+
if self.__writer is not None:
|
| 1203 |
+
try:
|
| 1204 |
+
await self.__writer
|
| 1205 |
+
except asyncio.CancelledError:
|
| 1206 |
+
if (
|
| 1207 |
+
sys.version_info >= (3, 11)
|
| 1208 |
+
and (task := asyncio.current_task())
|
| 1209 |
+
and task.cancelling()
|
| 1210 |
+
):
|
| 1211 |
+
raise
|
| 1212 |
+
self.release()
|
| 1213 |
+
|
| 1214 |
+
async def read(self) -> bytes:
|
| 1215 |
+
"""Read response payload."""
|
| 1216 |
+
if self._body is None:
|
| 1217 |
+
try:
|
| 1218 |
+
self._body = await self.content.read()
|
| 1219 |
+
for trace in self._traces:
|
| 1220 |
+
await trace.send_response_chunk_received(
|
| 1221 |
+
self.method, self.url, self._body
|
| 1222 |
+
)
|
| 1223 |
+
except BaseException:
|
| 1224 |
+
self.close()
|
| 1225 |
+
raise
|
| 1226 |
+
elif self._released: # Response explicitly released
|
| 1227 |
+
raise ClientConnectionError("Connection closed")
|
| 1228 |
+
|
| 1229 |
+
protocol = self._connection and self._connection.protocol
|
| 1230 |
+
if protocol is None or not protocol.upgraded:
|
| 1231 |
+
await self._wait_released() # Underlying connection released
|
| 1232 |
+
return self._body
|
| 1233 |
+
|
| 1234 |
+
def get_encoding(self) -> str:
|
| 1235 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
| 1236 |
+
mimetype = helpers.parse_mimetype(ctype)
|
| 1237 |
+
|
| 1238 |
+
encoding = mimetype.parameters.get("charset")
|
| 1239 |
+
if encoding:
|
| 1240 |
+
with contextlib.suppress(LookupError, ValueError):
|
| 1241 |
+
return codecs.lookup(encoding).name
|
| 1242 |
+
|
| 1243 |
+
if mimetype.type == "application" and (
|
| 1244 |
+
mimetype.subtype == "json" or mimetype.subtype == "rdap"
|
| 1245 |
+
):
|
| 1246 |
+
# RFC 7159 states that the default encoding is UTF-8.
|
| 1247 |
+
# RFC 7483 defines application/rdap+json
|
| 1248 |
+
return "utf-8"
|
| 1249 |
+
|
| 1250 |
+
if self._body is None:
|
| 1251 |
+
raise RuntimeError(
|
| 1252 |
+
"Cannot compute fallback encoding of a not yet read body"
|
| 1253 |
+
)
|
| 1254 |
+
|
| 1255 |
+
return self._resolve_charset(self, self._body)
|
| 1256 |
+
|
| 1257 |
+
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
|
| 1258 |
+
"""Read response payload and decode."""
|
| 1259 |
+
if self._body is None:
|
| 1260 |
+
await self.read()
|
| 1261 |
+
|
| 1262 |
+
if encoding is None:
|
| 1263 |
+
encoding = self.get_encoding()
|
| 1264 |
+
|
| 1265 |
+
return self._body.decode(encoding, errors=errors) # type: ignore[union-attr]
|
| 1266 |
+
|
| 1267 |
+
async def json(
|
| 1268 |
+
self,
|
| 1269 |
+
*,
|
| 1270 |
+
encoding: Optional[str] = None,
|
| 1271 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
| 1272 |
+
content_type: Optional[str] = "application/json",
|
| 1273 |
+
) -> Any:
|
| 1274 |
+
"""Read and decodes JSON response."""
|
| 1275 |
+
if self._body is None:
|
| 1276 |
+
await self.read()
|
| 1277 |
+
|
| 1278 |
+
if content_type:
|
| 1279 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
| 1280 |
+
if not _is_expected_content_type(ctype, content_type):
|
| 1281 |
+
raise ContentTypeError(
|
| 1282 |
+
self.request_info,
|
| 1283 |
+
self.history,
|
| 1284 |
+
status=self.status,
|
| 1285 |
+
message=(
|
| 1286 |
+
"Attempt to decode JSON with unexpected mimetype: %s" % ctype
|
| 1287 |
+
),
|
| 1288 |
+
headers=self.headers,
|
| 1289 |
+
)
|
| 1290 |
+
|
| 1291 |
+
stripped = self._body.strip() # type: ignore[union-attr]
|
| 1292 |
+
if not stripped:
|
| 1293 |
+
return None
|
| 1294 |
+
|
| 1295 |
+
if encoding is None:
|
| 1296 |
+
encoding = self.get_encoding()
|
| 1297 |
+
|
| 1298 |
+
return loads(stripped.decode(encoding))
|
| 1299 |
+
|
| 1300 |
+
async def __aenter__(self) -> "ClientResponse":
|
| 1301 |
+
self._in_context = True
|
| 1302 |
+
return self
|
| 1303 |
+
|
| 1304 |
+
async def __aexit__(
|
| 1305 |
+
self,
|
| 1306 |
+
exc_type: Optional[Type[BaseException]],
|
| 1307 |
+
exc_val: Optional[BaseException],
|
| 1308 |
+
exc_tb: Optional[TracebackType],
|
| 1309 |
+
) -> None:
|
| 1310 |
+
self._in_context = False
|
| 1311 |
+
# similar to _RequestContextManager, we do not need to check
|
| 1312 |
+
# for exceptions, response object can close connection
|
| 1313 |
+
# if state is broken
|
| 1314 |
+
self.release()
|
| 1315 |
+
await self.wait_for_close()
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/client_ws.py
ADDED
|
@@ -0,0 +1,426 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""WebSocket client for asyncio."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import sys
|
| 5 |
+
from types import TracebackType
|
| 6 |
+
from typing import Any, Optional, Type, cast
|
| 7 |
+
|
| 8 |
+
import attr
|
| 9 |
+
|
| 10 |
+
from ._websocket.reader import WebSocketDataQueue
|
| 11 |
+
from .client_exceptions import ClientError, ServerTimeoutError, WSMessageTypeError
|
| 12 |
+
from .client_reqrep import ClientResponse
|
| 13 |
+
from .helpers import calculate_timeout_when, set_result
|
| 14 |
+
from .http import (
|
| 15 |
+
WS_CLOSED_MESSAGE,
|
| 16 |
+
WS_CLOSING_MESSAGE,
|
| 17 |
+
WebSocketError,
|
| 18 |
+
WSCloseCode,
|
| 19 |
+
WSMessage,
|
| 20 |
+
WSMsgType,
|
| 21 |
+
)
|
| 22 |
+
from .http_websocket import _INTERNAL_RECEIVE_TYPES, WebSocketWriter
|
| 23 |
+
from .streams import EofStream
|
| 24 |
+
from .typedefs import (
|
| 25 |
+
DEFAULT_JSON_DECODER,
|
| 26 |
+
DEFAULT_JSON_ENCODER,
|
| 27 |
+
JSONDecoder,
|
| 28 |
+
JSONEncoder,
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
if sys.version_info >= (3, 11):
|
| 32 |
+
import asyncio as async_timeout
|
| 33 |
+
else:
|
| 34 |
+
import async_timeout
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
@attr.s(frozen=True, slots=True)
|
| 38 |
+
class ClientWSTimeout:
|
| 39 |
+
ws_receive = attr.ib(type=Optional[float], default=None)
|
| 40 |
+
ws_close = attr.ib(type=Optional[float], default=None)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
DEFAULT_WS_CLIENT_TIMEOUT = ClientWSTimeout(ws_receive=None, ws_close=10.0)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class ClientWebSocketResponse:
|
| 47 |
+
def __init__(
|
| 48 |
+
self,
|
| 49 |
+
reader: WebSocketDataQueue,
|
| 50 |
+
writer: WebSocketWriter,
|
| 51 |
+
protocol: Optional[str],
|
| 52 |
+
response: ClientResponse,
|
| 53 |
+
timeout: ClientWSTimeout,
|
| 54 |
+
autoclose: bool,
|
| 55 |
+
autoping: bool,
|
| 56 |
+
loop: asyncio.AbstractEventLoop,
|
| 57 |
+
*,
|
| 58 |
+
heartbeat: Optional[float] = None,
|
| 59 |
+
compress: int = 0,
|
| 60 |
+
client_notakeover: bool = False,
|
| 61 |
+
) -> None:
|
| 62 |
+
self._response = response
|
| 63 |
+
self._conn = response.connection
|
| 64 |
+
|
| 65 |
+
self._writer = writer
|
| 66 |
+
self._reader = reader
|
| 67 |
+
self._protocol = protocol
|
| 68 |
+
self._closed = False
|
| 69 |
+
self._closing = False
|
| 70 |
+
self._close_code: Optional[int] = None
|
| 71 |
+
self._timeout = timeout
|
| 72 |
+
self._autoclose = autoclose
|
| 73 |
+
self._autoping = autoping
|
| 74 |
+
self._heartbeat = heartbeat
|
| 75 |
+
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
| 76 |
+
self._heartbeat_when: float = 0.0
|
| 77 |
+
if heartbeat is not None:
|
| 78 |
+
self._pong_heartbeat = heartbeat / 2.0
|
| 79 |
+
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
| 80 |
+
self._loop = loop
|
| 81 |
+
self._waiting: bool = False
|
| 82 |
+
self._close_wait: Optional[asyncio.Future[None]] = None
|
| 83 |
+
self._exception: Optional[BaseException] = None
|
| 84 |
+
self._compress = compress
|
| 85 |
+
self._client_notakeover = client_notakeover
|
| 86 |
+
self._ping_task: Optional[asyncio.Task[None]] = None
|
| 87 |
+
|
| 88 |
+
self._reset_heartbeat()
|
| 89 |
+
|
| 90 |
+
def _cancel_heartbeat(self) -> None:
|
| 91 |
+
self._cancel_pong_response_cb()
|
| 92 |
+
if self._heartbeat_cb is not None:
|
| 93 |
+
self._heartbeat_cb.cancel()
|
| 94 |
+
self._heartbeat_cb = None
|
| 95 |
+
if self._ping_task is not None:
|
| 96 |
+
self._ping_task.cancel()
|
| 97 |
+
self._ping_task = None
|
| 98 |
+
|
| 99 |
+
def _cancel_pong_response_cb(self) -> None:
|
| 100 |
+
if self._pong_response_cb is not None:
|
| 101 |
+
self._pong_response_cb.cancel()
|
| 102 |
+
self._pong_response_cb = None
|
| 103 |
+
|
| 104 |
+
def _reset_heartbeat(self) -> None:
|
| 105 |
+
if self._heartbeat is None:
|
| 106 |
+
return
|
| 107 |
+
self._cancel_pong_response_cb()
|
| 108 |
+
loop = self._loop
|
| 109 |
+
assert loop is not None
|
| 110 |
+
conn = self._conn
|
| 111 |
+
timeout_ceil_threshold = (
|
| 112 |
+
conn._connector._timeout_ceil_threshold if conn is not None else 5
|
| 113 |
+
)
|
| 114 |
+
now = loop.time()
|
| 115 |
+
when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold)
|
| 116 |
+
self._heartbeat_when = when
|
| 117 |
+
if self._heartbeat_cb is None:
|
| 118 |
+
# We do not cancel the previous heartbeat_cb here because
|
| 119 |
+
# it generates a significant amount of TimerHandle churn
|
| 120 |
+
# which causes asyncio to rebuild the heap frequently.
|
| 121 |
+
# Instead _send_heartbeat() will reschedule the next
|
| 122 |
+
# heartbeat if it fires too early.
|
| 123 |
+
self._heartbeat_cb = loop.call_at(when, self._send_heartbeat)
|
| 124 |
+
|
| 125 |
+
def _send_heartbeat(self) -> None:
|
| 126 |
+
self._heartbeat_cb = None
|
| 127 |
+
loop = self._loop
|
| 128 |
+
now = loop.time()
|
| 129 |
+
if now < self._heartbeat_when:
|
| 130 |
+
# Heartbeat fired too early, reschedule
|
| 131 |
+
self._heartbeat_cb = loop.call_at(
|
| 132 |
+
self._heartbeat_when, self._send_heartbeat
|
| 133 |
+
)
|
| 134 |
+
return
|
| 135 |
+
|
| 136 |
+
conn = self._conn
|
| 137 |
+
timeout_ceil_threshold = (
|
| 138 |
+
conn._connector._timeout_ceil_threshold if conn is not None else 5
|
| 139 |
+
)
|
| 140 |
+
when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold)
|
| 141 |
+
self._cancel_pong_response_cb()
|
| 142 |
+
self._pong_response_cb = loop.call_at(when, self._pong_not_received)
|
| 143 |
+
|
| 144 |
+
coro = self._writer.send_frame(b"", WSMsgType.PING)
|
| 145 |
+
if sys.version_info >= (3, 12):
|
| 146 |
+
# Optimization for Python 3.12, try to send the ping
|
| 147 |
+
# immediately to avoid having to schedule
|
| 148 |
+
# the task on the event loop.
|
| 149 |
+
ping_task = asyncio.Task(coro, loop=loop, eager_start=True)
|
| 150 |
+
else:
|
| 151 |
+
ping_task = loop.create_task(coro)
|
| 152 |
+
|
| 153 |
+
if not ping_task.done():
|
| 154 |
+
self._ping_task = ping_task
|
| 155 |
+
ping_task.add_done_callback(self._ping_task_done)
|
| 156 |
+
else:
|
| 157 |
+
self._ping_task_done(ping_task)
|
| 158 |
+
|
| 159 |
+
def _ping_task_done(self, task: "asyncio.Task[None]") -> None:
|
| 160 |
+
"""Callback for when the ping task completes."""
|
| 161 |
+
if not task.cancelled() and (exc := task.exception()):
|
| 162 |
+
self._handle_ping_pong_exception(exc)
|
| 163 |
+
self._ping_task = None
|
| 164 |
+
|
| 165 |
+
def _pong_not_received(self) -> None:
|
| 166 |
+
self._handle_ping_pong_exception(ServerTimeoutError())
|
| 167 |
+
|
| 168 |
+
def _handle_ping_pong_exception(self, exc: BaseException) -> None:
|
| 169 |
+
"""Handle exceptions raised during ping/pong processing."""
|
| 170 |
+
if self._closed:
|
| 171 |
+
return
|
| 172 |
+
self._set_closed()
|
| 173 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 174 |
+
self._exception = exc
|
| 175 |
+
self._response.close()
|
| 176 |
+
if self._waiting and not self._closing:
|
| 177 |
+
self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0)
|
| 178 |
+
|
| 179 |
+
def _set_closed(self) -> None:
|
| 180 |
+
"""Set the connection to closed.
|
| 181 |
+
|
| 182 |
+
Cancel any heartbeat timers and set the closed flag.
|
| 183 |
+
"""
|
| 184 |
+
self._closed = True
|
| 185 |
+
self._cancel_heartbeat()
|
| 186 |
+
|
| 187 |
+
def _set_closing(self) -> None:
|
| 188 |
+
"""Set the connection to closing.
|
| 189 |
+
|
| 190 |
+
Cancel any heartbeat timers and set the closing flag.
|
| 191 |
+
"""
|
| 192 |
+
self._closing = True
|
| 193 |
+
self._cancel_heartbeat()
|
| 194 |
+
|
| 195 |
+
@property
|
| 196 |
+
def closed(self) -> bool:
|
| 197 |
+
return self._closed
|
| 198 |
+
|
| 199 |
+
@property
|
| 200 |
+
def close_code(self) -> Optional[int]:
|
| 201 |
+
return self._close_code
|
| 202 |
+
|
| 203 |
+
@property
|
| 204 |
+
def protocol(self) -> Optional[str]:
|
| 205 |
+
return self._protocol
|
| 206 |
+
|
| 207 |
+
@property
|
| 208 |
+
def compress(self) -> int:
|
| 209 |
+
return self._compress
|
| 210 |
+
|
| 211 |
+
@property
|
| 212 |
+
def client_notakeover(self) -> bool:
|
| 213 |
+
return self._client_notakeover
|
| 214 |
+
|
| 215 |
+
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
| 216 |
+
"""extra info from connection transport"""
|
| 217 |
+
conn = self._response.connection
|
| 218 |
+
if conn is None:
|
| 219 |
+
return default
|
| 220 |
+
transport = conn.transport
|
| 221 |
+
if transport is None:
|
| 222 |
+
return default
|
| 223 |
+
return transport.get_extra_info(name, default)
|
| 224 |
+
|
| 225 |
+
def exception(self) -> Optional[BaseException]:
|
| 226 |
+
return self._exception
|
| 227 |
+
|
| 228 |
+
async def ping(self, message: bytes = b"") -> None:
|
| 229 |
+
await self._writer.send_frame(message, WSMsgType.PING)
|
| 230 |
+
|
| 231 |
+
async def pong(self, message: bytes = b"") -> None:
|
| 232 |
+
await self._writer.send_frame(message, WSMsgType.PONG)
|
| 233 |
+
|
| 234 |
+
async def send_frame(
|
| 235 |
+
self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None
|
| 236 |
+
) -> None:
|
| 237 |
+
"""Send a frame over the websocket."""
|
| 238 |
+
await self._writer.send_frame(message, opcode, compress)
|
| 239 |
+
|
| 240 |
+
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
| 241 |
+
if not isinstance(data, str):
|
| 242 |
+
raise TypeError("data argument must be str (%r)" % type(data))
|
| 243 |
+
await self._writer.send_frame(
|
| 244 |
+
data.encode("utf-8"), WSMsgType.TEXT, compress=compress
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
| 248 |
+
if not isinstance(data, (bytes, bytearray, memoryview)):
|
| 249 |
+
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
| 250 |
+
await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress)
|
| 251 |
+
|
| 252 |
+
async def send_json(
|
| 253 |
+
self,
|
| 254 |
+
data: Any,
|
| 255 |
+
compress: Optional[int] = None,
|
| 256 |
+
*,
|
| 257 |
+
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
| 258 |
+
) -> None:
|
| 259 |
+
await self.send_str(dumps(data), compress=compress)
|
| 260 |
+
|
| 261 |
+
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
| 262 |
+
# we need to break `receive()` cycle first,
|
| 263 |
+
# `close()` may be called from different task
|
| 264 |
+
if self._waiting and not self._closing:
|
| 265 |
+
assert self._loop is not None
|
| 266 |
+
self._close_wait = self._loop.create_future()
|
| 267 |
+
self._set_closing()
|
| 268 |
+
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
| 269 |
+
await self._close_wait
|
| 270 |
+
|
| 271 |
+
if self._closed:
|
| 272 |
+
return False
|
| 273 |
+
|
| 274 |
+
self._set_closed()
|
| 275 |
+
try:
|
| 276 |
+
await self._writer.close(code, message)
|
| 277 |
+
except asyncio.CancelledError:
|
| 278 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 279 |
+
self._response.close()
|
| 280 |
+
raise
|
| 281 |
+
except Exception as exc:
|
| 282 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 283 |
+
self._exception = exc
|
| 284 |
+
self._response.close()
|
| 285 |
+
return True
|
| 286 |
+
|
| 287 |
+
if self._close_code:
|
| 288 |
+
self._response.close()
|
| 289 |
+
return True
|
| 290 |
+
|
| 291 |
+
while True:
|
| 292 |
+
try:
|
| 293 |
+
async with async_timeout.timeout(self._timeout.ws_close):
|
| 294 |
+
msg = await self._reader.read()
|
| 295 |
+
except asyncio.CancelledError:
|
| 296 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 297 |
+
self._response.close()
|
| 298 |
+
raise
|
| 299 |
+
except Exception as exc:
|
| 300 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 301 |
+
self._exception = exc
|
| 302 |
+
self._response.close()
|
| 303 |
+
return True
|
| 304 |
+
|
| 305 |
+
if msg.type is WSMsgType.CLOSE:
|
| 306 |
+
self._close_code = msg.data
|
| 307 |
+
self._response.close()
|
| 308 |
+
return True
|
| 309 |
+
|
| 310 |
+
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
| 311 |
+
receive_timeout = timeout or self._timeout.ws_receive
|
| 312 |
+
|
| 313 |
+
while True:
|
| 314 |
+
if self._waiting:
|
| 315 |
+
raise RuntimeError("Concurrent call to receive() is not allowed")
|
| 316 |
+
|
| 317 |
+
if self._closed:
|
| 318 |
+
return WS_CLOSED_MESSAGE
|
| 319 |
+
elif self._closing:
|
| 320 |
+
await self.close()
|
| 321 |
+
return WS_CLOSED_MESSAGE
|
| 322 |
+
|
| 323 |
+
try:
|
| 324 |
+
self._waiting = True
|
| 325 |
+
try:
|
| 326 |
+
if receive_timeout:
|
| 327 |
+
# Entering the context manager and creating
|
| 328 |
+
# Timeout() object can take almost 50% of the
|
| 329 |
+
# run time in this loop so we avoid it if
|
| 330 |
+
# there is no read timeout.
|
| 331 |
+
async with async_timeout.timeout(receive_timeout):
|
| 332 |
+
msg = await self._reader.read()
|
| 333 |
+
else:
|
| 334 |
+
msg = await self._reader.read()
|
| 335 |
+
self._reset_heartbeat()
|
| 336 |
+
finally:
|
| 337 |
+
self._waiting = False
|
| 338 |
+
if self._close_wait:
|
| 339 |
+
set_result(self._close_wait, None)
|
| 340 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 341 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 342 |
+
raise
|
| 343 |
+
except EofStream:
|
| 344 |
+
self._close_code = WSCloseCode.OK
|
| 345 |
+
await self.close()
|
| 346 |
+
return WSMessage(WSMsgType.CLOSED, None, None)
|
| 347 |
+
except ClientError:
|
| 348 |
+
# Likely ServerDisconnectedError when connection is lost
|
| 349 |
+
self._set_closed()
|
| 350 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 351 |
+
return WS_CLOSED_MESSAGE
|
| 352 |
+
except WebSocketError as exc:
|
| 353 |
+
self._close_code = exc.code
|
| 354 |
+
await self.close(code=exc.code)
|
| 355 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
| 356 |
+
except Exception as exc:
|
| 357 |
+
self._exception = exc
|
| 358 |
+
self._set_closing()
|
| 359 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 360 |
+
await self.close()
|
| 361 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
| 362 |
+
|
| 363 |
+
if msg.type not in _INTERNAL_RECEIVE_TYPES:
|
| 364 |
+
# If its not a close/closing/ping/pong message
|
| 365 |
+
# we can return it immediately
|
| 366 |
+
return msg
|
| 367 |
+
|
| 368 |
+
if msg.type is WSMsgType.CLOSE:
|
| 369 |
+
self._set_closing()
|
| 370 |
+
self._close_code = msg.data
|
| 371 |
+
if not self._closed and self._autoclose:
|
| 372 |
+
await self.close()
|
| 373 |
+
elif msg.type is WSMsgType.CLOSING:
|
| 374 |
+
self._set_closing()
|
| 375 |
+
elif msg.type is WSMsgType.PING and self._autoping:
|
| 376 |
+
await self.pong(msg.data)
|
| 377 |
+
continue
|
| 378 |
+
elif msg.type is WSMsgType.PONG and self._autoping:
|
| 379 |
+
continue
|
| 380 |
+
|
| 381 |
+
return msg
|
| 382 |
+
|
| 383 |
+
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
| 384 |
+
msg = await self.receive(timeout)
|
| 385 |
+
if msg.type is not WSMsgType.TEXT:
|
| 386 |
+
raise WSMessageTypeError(
|
| 387 |
+
f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT"
|
| 388 |
+
)
|
| 389 |
+
return cast(str, msg.data)
|
| 390 |
+
|
| 391 |
+
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
| 392 |
+
msg = await self.receive(timeout)
|
| 393 |
+
if msg.type is not WSMsgType.BINARY:
|
| 394 |
+
raise WSMessageTypeError(
|
| 395 |
+
f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY"
|
| 396 |
+
)
|
| 397 |
+
return cast(bytes, msg.data)
|
| 398 |
+
|
| 399 |
+
async def receive_json(
|
| 400 |
+
self,
|
| 401 |
+
*,
|
| 402 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
| 403 |
+
timeout: Optional[float] = None,
|
| 404 |
+
) -> Any:
|
| 405 |
+
data = await self.receive_str(timeout=timeout)
|
| 406 |
+
return loads(data)
|
| 407 |
+
|
| 408 |
+
def __aiter__(self) -> "ClientWebSocketResponse":
|
| 409 |
+
return self
|
| 410 |
+
|
| 411 |
+
async def __anext__(self) -> WSMessage:
|
| 412 |
+
msg = await self.receive()
|
| 413 |
+
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
| 414 |
+
raise StopAsyncIteration
|
| 415 |
+
return msg
|
| 416 |
+
|
| 417 |
+
async def __aenter__(self) -> "ClientWebSocketResponse":
|
| 418 |
+
return self
|
| 419 |
+
|
| 420 |
+
async def __aexit__(
|
| 421 |
+
self,
|
| 422 |
+
exc_type: Optional[Type[BaseException]],
|
| 423 |
+
exc_val: Optional[BaseException],
|
| 424 |
+
exc_tb: Optional[TracebackType],
|
| 425 |
+
) -> None:
|
| 426 |
+
await self.close()
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/compression_utils.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import zlib
|
| 3 |
+
from concurrent.futures import Executor
|
| 4 |
+
from typing import Optional, cast
|
| 5 |
+
|
| 6 |
+
try:
|
| 7 |
+
try:
|
| 8 |
+
import brotlicffi as brotli
|
| 9 |
+
except ImportError:
|
| 10 |
+
import brotli
|
| 11 |
+
|
| 12 |
+
HAS_BROTLI = True
|
| 13 |
+
except ImportError: # pragma: no cover
|
| 14 |
+
HAS_BROTLI = False
|
| 15 |
+
|
| 16 |
+
MAX_SYNC_CHUNK_SIZE = 1024
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def encoding_to_mode(
|
| 20 |
+
encoding: Optional[str] = None,
|
| 21 |
+
suppress_deflate_header: bool = False,
|
| 22 |
+
) -> int:
|
| 23 |
+
if encoding == "gzip":
|
| 24 |
+
return 16 + zlib.MAX_WBITS
|
| 25 |
+
|
| 26 |
+
return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ZlibBaseHandler:
|
| 30 |
+
def __init__(
|
| 31 |
+
self,
|
| 32 |
+
mode: int,
|
| 33 |
+
executor: Optional[Executor] = None,
|
| 34 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
| 35 |
+
):
|
| 36 |
+
self._mode = mode
|
| 37 |
+
self._executor = executor
|
| 38 |
+
self._max_sync_chunk_size = max_sync_chunk_size
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class ZLibCompressor(ZlibBaseHandler):
|
| 42 |
+
def __init__(
|
| 43 |
+
self,
|
| 44 |
+
encoding: Optional[str] = None,
|
| 45 |
+
suppress_deflate_header: bool = False,
|
| 46 |
+
level: Optional[int] = None,
|
| 47 |
+
wbits: Optional[int] = None,
|
| 48 |
+
strategy: int = zlib.Z_DEFAULT_STRATEGY,
|
| 49 |
+
executor: Optional[Executor] = None,
|
| 50 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
| 51 |
+
):
|
| 52 |
+
super().__init__(
|
| 53 |
+
mode=(
|
| 54 |
+
encoding_to_mode(encoding, suppress_deflate_header)
|
| 55 |
+
if wbits is None
|
| 56 |
+
else wbits
|
| 57 |
+
),
|
| 58 |
+
executor=executor,
|
| 59 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
| 60 |
+
)
|
| 61 |
+
if level is None:
|
| 62 |
+
self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
|
| 63 |
+
else:
|
| 64 |
+
self._compressor = zlib.compressobj(
|
| 65 |
+
wbits=self._mode, strategy=strategy, level=level
|
| 66 |
+
)
|
| 67 |
+
self._compress_lock = asyncio.Lock()
|
| 68 |
+
|
| 69 |
+
def compress_sync(self, data: bytes) -> bytes:
|
| 70 |
+
return self._compressor.compress(data)
|
| 71 |
+
|
| 72 |
+
async def compress(self, data: bytes) -> bytes:
|
| 73 |
+
"""Compress the data and returned the compressed bytes.
|
| 74 |
+
|
| 75 |
+
Note that flush() must be called after the last call to compress()
|
| 76 |
+
|
| 77 |
+
If the data size is large than the max_sync_chunk_size, the compression
|
| 78 |
+
will be done in the executor. Otherwise, the compression will be done
|
| 79 |
+
in the event loop.
|
| 80 |
+
"""
|
| 81 |
+
async with self._compress_lock:
|
| 82 |
+
# To ensure the stream is consistent in the event
|
| 83 |
+
# there are multiple writers, we need to lock
|
| 84 |
+
# the compressor so that only one writer can
|
| 85 |
+
# compress at a time.
|
| 86 |
+
if (
|
| 87 |
+
self._max_sync_chunk_size is not None
|
| 88 |
+
and len(data) > self._max_sync_chunk_size
|
| 89 |
+
):
|
| 90 |
+
return await asyncio.get_running_loop().run_in_executor(
|
| 91 |
+
self._executor, self._compressor.compress, data
|
| 92 |
+
)
|
| 93 |
+
return self.compress_sync(data)
|
| 94 |
+
|
| 95 |
+
def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
|
| 96 |
+
return self._compressor.flush(mode)
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class ZLibDecompressor(ZlibBaseHandler):
|
| 100 |
+
def __init__(
|
| 101 |
+
self,
|
| 102 |
+
encoding: Optional[str] = None,
|
| 103 |
+
suppress_deflate_header: bool = False,
|
| 104 |
+
executor: Optional[Executor] = None,
|
| 105 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
| 106 |
+
):
|
| 107 |
+
super().__init__(
|
| 108 |
+
mode=encoding_to_mode(encoding, suppress_deflate_header),
|
| 109 |
+
executor=executor,
|
| 110 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
| 111 |
+
)
|
| 112 |
+
self._decompressor = zlib.decompressobj(wbits=self._mode)
|
| 113 |
+
|
| 114 |
+
def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
|
| 115 |
+
return self._decompressor.decompress(data, max_length)
|
| 116 |
+
|
| 117 |
+
async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
|
| 118 |
+
"""Decompress the data and return the decompressed bytes.
|
| 119 |
+
|
| 120 |
+
If the data size is large than the max_sync_chunk_size, the decompression
|
| 121 |
+
will be done in the executor. Otherwise, the decompression will be done
|
| 122 |
+
in the event loop.
|
| 123 |
+
"""
|
| 124 |
+
if (
|
| 125 |
+
self._max_sync_chunk_size is not None
|
| 126 |
+
and len(data) > self._max_sync_chunk_size
|
| 127 |
+
):
|
| 128 |
+
return await asyncio.get_running_loop().run_in_executor(
|
| 129 |
+
self._executor, self._decompressor.decompress, data, max_length
|
| 130 |
+
)
|
| 131 |
+
return self.decompress_sync(data, max_length)
|
| 132 |
+
|
| 133 |
+
def flush(self, length: int = 0) -> bytes:
|
| 134 |
+
return (
|
| 135 |
+
self._decompressor.flush(length)
|
| 136 |
+
if length > 0
|
| 137 |
+
else self._decompressor.flush()
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
@property
|
| 141 |
+
def eof(self) -> bool:
|
| 142 |
+
return self._decompressor.eof
|
| 143 |
+
|
| 144 |
+
@property
|
| 145 |
+
def unconsumed_tail(self) -> bytes:
|
| 146 |
+
return self._decompressor.unconsumed_tail
|
| 147 |
+
|
| 148 |
+
@property
|
| 149 |
+
def unused_data(self) -> bytes:
|
| 150 |
+
return self._decompressor.unused_data
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
class BrotliDecompressor:
|
| 154 |
+
# Supports both 'brotlipy' and 'Brotli' packages
|
| 155 |
+
# since they share an import name. The top branches
|
| 156 |
+
# are for 'brotlipy' and bottom branches for 'Brotli'
|
| 157 |
+
def __init__(self) -> None:
|
| 158 |
+
if not HAS_BROTLI:
|
| 159 |
+
raise RuntimeError(
|
| 160 |
+
"The brotli decompression is not available. "
|
| 161 |
+
"Please install `Brotli` module"
|
| 162 |
+
)
|
| 163 |
+
self._obj = brotli.Decompressor()
|
| 164 |
+
|
| 165 |
+
def decompress_sync(self, data: bytes) -> bytes:
|
| 166 |
+
if hasattr(self._obj, "decompress"):
|
| 167 |
+
return cast(bytes, self._obj.decompress(data))
|
| 168 |
+
return cast(bytes, self._obj.process(data))
|
| 169 |
+
|
| 170 |
+
def flush(self) -> bytes:
|
| 171 |
+
if hasattr(self._obj, "flush"):
|
| 172 |
+
return cast(bytes, self._obj.flush())
|
| 173 |
+
return b""
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/formdata.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import warnings
|
| 3 |
+
from typing import Any, Iterable, List, Optional
|
| 4 |
+
from urllib.parse import urlencode
|
| 5 |
+
|
| 6 |
+
from multidict import MultiDict, MultiDictProxy
|
| 7 |
+
|
| 8 |
+
from . import hdrs, multipart, payload
|
| 9 |
+
from .helpers import guess_filename
|
| 10 |
+
from .payload import Payload
|
| 11 |
+
|
| 12 |
+
__all__ = ("FormData",)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FormData:
|
| 16 |
+
"""Helper class for form body generation.
|
| 17 |
+
|
| 18 |
+
Supports multipart/form-data and application/x-www-form-urlencoded.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def __init__(
|
| 22 |
+
self,
|
| 23 |
+
fields: Iterable[Any] = (),
|
| 24 |
+
quote_fields: bool = True,
|
| 25 |
+
charset: Optional[str] = None,
|
| 26 |
+
*,
|
| 27 |
+
default_to_multipart: bool = False,
|
| 28 |
+
) -> None:
|
| 29 |
+
self._writer = multipart.MultipartWriter("form-data")
|
| 30 |
+
self._fields: List[Any] = []
|
| 31 |
+
self._is_multipart = default_to_multipart
|
| 32 |
+
self._is_processed = False
|
| 33 |
+
self._quote_fields = quote_fields
|
| 34 |
+
self._charset = charset
|
| 35 |
+
|
| 36 |
+
if isinstance(fields, dict):
|
| 37 |
+
fields = list(fields.items())
|
| 38 |
+
elif not isinstance(fields, (list, tuple)):
|
| 39 |
+
fields = (fields,)
|
| 40 |
+
self.add_fields(*fields)
|
| 41 |
+
|
| 42 |
+
@property
|
| 43 |
+
def is_multipart(self) -> bool:
|
| 44 |
+
return self._is_multipart
|
| 45 |
+
|
| 46 |
+
def add_field(
|
| 47 |
+
self,
|
| 48 |
+
name: str,
|
| 49 |
+
value: Any,
|
| 50 |
+
*,
|
| 51 |
+
content_type: Optional[str] = None,
|
| 52 |
+
filename: Optional[str] = None,
|
| 53 |
+
content_transfer_encoding: Optional[str] = None,
|
| 54 |
+
) -> None:
|
| 55 |
+
|
| 56 |
+
if isinstance(value, io.IOBase):
|
| 57 |
+
self._is_multipart = True
|
| 58 |
+
elif isinstance(value, (bytes, bytearray, memoryview)):
|
| 59 |
+
msg = (
|
| 60 |
+
"In v4, passing bytes will no longer create a file field. "
|
| 61 |
+
"Please explicitly use the filename parameter or pass a BytesIO object."
|
| 62 |
+
)
|
| 63 |
+
if filename is None and content_transfer_encoding is None:
|
| 64 |
+
warnings.warn(msg, DeprecationWarning)
|
| 65 |
+
filename = name
|
| 66 |
+
|
| 67 |
+
type_options: MultiDict[str] = MultiDict({"name": name})
|
| 68 |
+
if filename is not None and not isinstance(filename, str):
|
| 69 |
+
raise TypeError("filename must be an instance of str. Got: %s" % filename)
|
| 70 |
+
if filename is None and isinstance(value, io.IOBase):
|
| 71 |
+
filename = guess_filename(value, name)
|
| 72 |
+
if filename is not None:
|
| 73 |
+
type_options["filename"] = filename
|
| 74 |
+
self._is_multipart = True
|
| 75 |
+
|
| 76 |
+
headers = {}
|
| 77 |
+
if content_type is not None:
|
| 78 |
+
if not isinstance(content_type, str):
|
| 79 |
+
raise TypeError(
|
| 80 |
+
"content_type must be an instance of str. Got: %s" % content_type
|
| 81 |
+
)
|
| 82 |
+
headers[hdrs.CONTENT_TYPE] = content_type
|
| 83 |
+
self._is_multipart = True
|
| 84 |
+
if content_transfer_encoding is not None:
|
| 85 |
+
if not isinstance(content_transfer_encoding, str):
|
| 86 |
+
raise TypeError(
|
| 87 |
+
"content_transfer_encoding must be an instance"
|
| 88 |
+
" of str. Got: %s" % content_transfer_encoding
|
| 89 |
+
)
|
| 90 |
+
msg = (
|
| 91 |
+
"content_transfer_encoding is deprecated. "
|
| 92 |
+
"To maintain compatibility with v4 please pass a BytesPayload."
|
| 93 |
+
)
|
| 94 |
+
warnings.warn(msg, DeprecationWarning)
|
| 95 |
+
self._is_multipart = True
|
| 96 |
+
|
| 97 |
+
self._fields.append((type_options, headers, value))
|
| 98 |
+
|
| 99 |
+
def add_fields(self, *fields: Any) -> None:
|
| 100 |
+
to_add = list(fields)
|
| 101 |
+
|
| 102 |
+
while to_add:
|
| 103 |
+
rec = to_add.pop(0)
|
| 104 |
+
|
| 105 |
+
if isinstance(rec, io.IOBase):
|
| 106 |
+
k = guess_filename(rec, "unknown")
|
| 107 |
+
self.add_field(k, rec) # type: ignore[arg-type]
|
| 108 |
+
|
| 109 |
+
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
| 110 |
+
to_add.extend(rec.items())
|
| 111 |
+
|
| 112 |
+
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
| 113 |
+
k, fp = rec
|
| 114 |
+
self.add_field(k, fp) # type: ignore[arg-type]
|
| 115 |
+
|
| 116 |
+
else:
|
| 117 |
+
raise TypeError(
|
| 118 |
+
"Only io.IOBase, multidict and (name, file) "
|
| 119 |
+
"pairs allowed, use .add_field() for passing "
|
| 120 |
+
"more complex parameters, got {!r}".format(rec)
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
| 124 |
+
# form data (x-www-form-urlencoded)
|
| 125 |
+
data = []
|
| 126 |
+
for type_options, _, value in self._fields:
|
| 127 |
+
data.append((type_options["name"], value))
|
| 128 |
+
|
| 129 |
+
charset = self._charset if self._charset is not None else "utf-8"
|
| 130 |
+
|
| 131 |
+
if charset == "utf-8":
|
| 132 |
+
content_type = "application/x-www-form-urlencoded"
|
| 133 |
+
else:
|
| 134 |
+
content_type = "application/x-www-form-urlencoded; charset=%s" % charset
|
| 135 |
+
|
| 136 |
+
return payload.BytesPayload(
|
| 137 |
+
urlencode(data, doseq=True, encoding=charset).encode(),
|
| 138 |
+
content_type=content_type,
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
def _gen_form_data(self) -> multipart.MultipartWriter:
|
| 142 |
+
"""Encode a list of fields using the multipart/form-data MIME format"""
|
| 143 |
+
if self._is_processed:
|
| 144 |
+
raise RuntimeError("Form data has been processed already")
|
| 145 |
+
for dispparams, headers, value in self._fields:
|
| 146 |
+
try:
|
| 147 |
+
if hdrs.CONTENT_TYPE in headers:
|
| 148 |
+
part = payload.get_payload(
|
| 149 |
+
value,
|
| 150 |
+
content_type=headers[hdrs.CONTENT_TYPE],
|
| 151 |
+
headers=headers,
|
| 152 |
+
encoding=self._charset,
|
| 153 |
+
)
|
| 154 |
+
else:
|
| 155 |
+
part = payload.get_payload(
|
| 156 |
+
value, headers=headers, encoding=self._charset
|
| 157 |
+
)
|
| 158 |
+
except Exception as exc:
|
| 159 |
+
raise TypeError(
|
| 160 |
+
"Can not serialize value type: %r\n "
|
| 161 |
+
"headers: %r\n value: %r" % (type(value), headers, value)
|
| 162 |
+
) from exc
|
| 163 |
+
|
| 164 |
+
if dispparams:
|
| 165 |
+
part.set_content_disposition(
|
| 166 |
+
"form-data", quote_fields=self._quote_fields, **dispparams
|
| 167 |
+
)
|
| 168 |
+
# FIXME cgi.FieldStorage doesn't likes body parts with
|
| 169 |
+
# Content-Length which were sent via chunked transfer encoding
|
| 170 |
+
assert part.headers is not None
|
| 171 |
+
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
| 172 |
+
|
| 173 |
+
self._writer.append_payload(part)
|
| 174 |
+
|
| 175 |
+
self._is_processed = True
|
| 176 |
+
return self._writer
|
| 177 |
+
|
| 178 |
+
def __call__(self) -> Payload:
|
| 179 |
+
if self._is_multipart:
|
| 180 |
+
return self._gen_form_data()
|
| 181 |
+
else:
|
| 182 |
+
return self._gen_form_urlencoded()
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/hdrs.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HTTP Headers constants."""
|
| 2 |
+
|
| 3 |
+
# After changing the file content call ./tools/gen.py
|
| 4 |
+
# to regenerate the headers parser
|
| 5 |
+
import itertools
|
| 6 |
+
from typing import Final, Set
|
| 7 |
+
|
| 8 |
+
from multidict import istr
|
| 9 |
+
|
| 10 |
+
METH_ANY: Final[str] = "*"
|
| 11 |
+
METH_CONNECT: Final[str] = "CONNECT"
|
| 12 |
+
METH_HEAD: Final[str] = "HEAD"
|
| 13 |
+
METH_GET: Final[str] = "GET"
|
| 14 |
+
METH_DELETE: Final[str] = "DELETE"
|
| 15 |
+
METH_OPTIONS: Final[str] = "OPTIONS"
|
| 16 |
+
METH_PATCH: Final[str] = "PATCH"
|
| 17 |
+
METH_POST: Final[str] = "POST"
|
| 18 |
+
METH_PUT: Final[str] = "PUT"
|
| 19 |
+
METH_TRACE: Final[str] = "TRACE"
|
| 20 |
+
|
| 21 |
+
METH_ALL: Final[Set[str]] = {
|
| 22 |
+
METH_CONNECT,
|
| 23 |
+
METH_HEAD,
|
| 24 |
+
METH_GET,
|
| 25 |
+
METH_DELETE,
|
| 26 |
+
METH_OPTIONS,
|
| 27 |
+
METH_PATCH,
|
| 28 |
+
METH_POST,
|
| 29 |
+
METH_PUT,
|
| 30 |
+
METH_TRACE,
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
ACCEPT: Final[istr] = istr("Accept")
|
| 34 |
+
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
|
| 35 |
+
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
|
| 36 |
+
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
|
| 37 |
+
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
|
| 38 |
+
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
|
| 39 |
+
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
|
| 40 |
+
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
|
| 41 |
+
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
|
| 42 |
+
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
|
| 43 |
+
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
|
| 44 |
+
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
|
| 45 |
+
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
|
| 46 |
+
AGE: Final[istr] = istr("Age")
|
| 47 |
+
ALLOW: Final[istr] = istr("Allow")
|
| 48 |
+
AUTHORIZATION: Final[istr] = istr("Authorization")
|
| 49 |
+
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
|
| 50 |
+
CONNECTION: Final[istr] = istr("Connection")
|
| 51 |
+
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
|
| 52 |
+
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
|
| 53 |
+
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
|
| 54 |
+
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
|
| 55 |
+
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
|
| 56 |
+
CONTENT_MD5: Final[istr] = istr("Content-MD5")
|
| 57 |
+
CONTENT_RANGE: Final[istr] = istr("Content-Range")
|
| 58 |
+
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
|
| 59 |
+
CONTENT_TYPE: Final[istr] = istr("Content-Type")
|
| 60 |
+
COOKIE: Final[istr] = istr("Cookie")
|
| 61 |
+
DATE: Final[istr] = istr("Date")
|
| 62 |
+
DESTINATION: Final[istr] = istr("Destination")
|
| 63 |
+
DIGEST: Final[istr] = istr("Digest")
|
| 64 |
+
ETAG: Final[istr] = istr("Etag")
|
| 65 |
+
EXPECT: Final[istr] = istr("Expect")
|
| 66 |
+
EXPIRES: Final[istr] = istr("Expires")
|
| 67 |
+
FORWARDED: Final[istr] = istr("Forwarded")
|
| 68 |
+
FROM: Final[istr] = istr("From")
|
| 69 |
+
HOST: Final[istr] = istr("Host")
|
| 70 |
+
IF_MATCH: Final[istr] = istr("If-Match")
|
| 71 |
+
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
|
| 72 |
+
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
|
| 73 |
+
IF_RANGE: Final[istr] = istr("If-Range")
|
| 74 |
+
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
|
| 75 |
+
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
|
| 76 |
+
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
|
| 77 |
+
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
|
| 78 |
+
LINK: Final[istr] = istr("Link")
|
| 79 |
+
LOCATION: Final[istr] = istr("Location")
|
| 80 |
+
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
|
| 81 |
+
ORIGIN: Final[istr] = istr("Origin")
|
| 82 |
+
PRAGMA: Final[istr] = istr("Pragma")
|
| 83 |
+
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
|
| 84 |
+
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
|
| 85 |
+
RANGE: Final[istr] = istr("Range")
|
| 86 |
+
REFERER: Final[istr] = istr("Referer")
|
| 87 |
+
RETRY_AFTER: Final[istr] = istr("Retry-After")
|
| 88 |
+
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
|
| 89 |
+
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
|
| 90 |
+
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
|
| 91 |
+
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
|
| 92 |
+
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
|
| 93 |
+
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
|
| 94 |
+
SERVER: Final[istr] = istr("Server")
|
| 95 |
+
SET_COOKIE: Final[istr] = istr("Set-Cookie")
|
| 96 |
+
TE: Final[istr] = istr("TE")
|
| 97 |
+
TRAILER: Final[istr] = istr("Trailer")
|
| 98 |
+
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
|
| 99 |
+
UPGRADE: Final[istr] = istr("Upgrade")
|
| 100 |
+
URI: Final[istr] = istr("URI")
|
| 101 |
+
USER_AGENT: Final[istr] = istr("User-Agent")
|
| 102 |
+
VARY: Final[istr] = istr("Vary")
|
| 103 |
+
VIA: Final[istr] = istr("Via")
|
| 104 |
+
WANT_DIGEST: Final[istr] = istr("Want-Digest")
|
| 105 |
+
WARNING: Final[istr] = istr("Warning")
|
| 106 |
+
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
|
| 107 |
+
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
|
| 108 |
+
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
|
| 109 |
+
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
|
| 110 |
+
|
| 111 |
+
# These are the upper/lower case variants of the headers/methods
|
| 112 |
+
# Example: {'hOst', 'host', 'HoST', 'HOSt', 'hOsT', 'HosT', 'hoSt', ...}
|
| 113 |
+
METH_HEAD_ALL: Final = frozenset(
|
| 114 |
+
map("".join, itertools.product(*zip(METH_HEAD.upper(), METH_HEAD.lower())))
|
| 115 |
+
)
|
| 116 |
+
METH_CONNECT_ALL: Final = frozenset(
|
| 117 |
+
map("".join, itertools.product(*zip(METH_CONNECT.upper(), METH_CONNECT.lower())))
|
| 118 |
+
)
|
| 119 |
+
HOST_ALL: Final = frozenset(
|
| 120 |
+
map("".join, itertools.product(*zip(HOST.upper(), HOST.lower())))
|
| 121 |
+
)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/helpers.py
ADDED
|
@@ -0,0 +1,944 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Various helper functions"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import binascii
|
| 6 |
+
import contextlib
|
| 7 |
+
import datetime
|
| 8 |
+
import enum
|
| 9 |
+
import functools
|
| 10 |
+
import inspect
|
| 11 |
+
import netrc
|
| 12 |
+
import os
|
| 13 |
+
import platform
|
| 14 |
+
import re
|
| 15 |
+
import sys
|
| 16 |
+
import time
|
| 17 |
+
import weakref
|
| 18 |
+
from collections import namedtuple
|
| 19 |
+
from contextlib import suppress
|
| 20 |
+
from email.parser import HeaderParser
|
| 21 |
+
from email.utils import parsedate
|
| 22 |
+
from math import ceil
|
| 23 |
+
from pathlib import Path
|
| 24 |
+
from types import TracebackType
|
| 25 |
+
from typing import (
|
| 26 |
+
Any,
|
| 27 |
+
Callable,
|
| 28 |
+
ContextManager,
|
| 29 |
+
Dict,
|
| 30 |
+
Generator,
|
| 31 |
+
Generic,
|
| 32 |
+
Iterable,
|
| 33 |
+
Iterator,
|
| 34 |
+
List,
|
| 35 |
+
Mapping,
|
| 36 |
+
Optional,
|
| 37 |
+
Protocol,
|
| 38 |
+
Tuple,
|
| 39 |
+
Type,
|
| 40 |
+
TypeVar,
|
| 41 |
+
Union,
|
| 42 |
+
get_args,
|
| 43 |
+
overload,
|
| 44 |
+
)
|
| 45 |
+
from urllib.parse import quote
|
| 46 |
+
from urllib.request import getproxies, proxy_bypass
|
| 47 |
+
|
| 48 |
+
import attr
|
| 49 |
+
from multidict import MultiDict, MultiDictProxy, MultiMapping
|
| 50 |
+
from propcache.api import under_cached_property as reify
|
| 51 |
+
from yarl import URL
|
| 52 |
+
|
| 53 |
+
from . import hdrs
|
| 54 |
+
from .log import client_logger
|
| 55 |
+
|
| 56 |
+
if sys.version_info >= (3, 11):
|
| 57 |
+
import asyncio as async_timeout
|
| 58 |
+
else:
|
| 59 |
+
import async_timeout
|
| 60 |
+
|
| 61 |
+
__all__ = ("BasicAuth", "ChainMapProxy", "ETag", "reify")
|
| 62 |
+
|
| 63 |
+
IS_MACOS = platform.system() == "Darwin"
|
| 64 |
+
IS_WINDOWS = platform.system() == "Windows"
|
| 65 |
+
|
| 66 |
+
PY_310 = sys.version_info >= (3, 10)
|
| 67 |
+
PY_311 = sys.version_info >= (3, 11)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
_T = TypeVar("_T")
|
| 71 |
+
_S = TypeVar("_S")
|
| 72 |
+
|
| 73 |
+
_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
|
| 74 |
+
sentinel = _SENTINEL.sentinel
|
| 75 |
+
|
| 76 |
+
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
| 77 |
+
|
| 78 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
| 79 |
+
EMPTY_BODY_STATUS_CODES = frozenset((204, 304, *range(100, 200)))
|
| 80 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
| 81 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
| 82 |
+
EMPTY_BODY_METHODS = hdrs.METH_HEAD_ALL
|
| 83 |
+
|
| 84 |
+
DEBUG = sys.flags.dev_mode or (
|
| 85 |
+
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
CHAR = {chr(i) for i in range(0, 128)}
|
| 90 |
+
CTL = {chr(i) for i in range(0, 32)} | {
|
| 91 |
+
chr(127),
|
| 92 |
+
}
|
| 93 |
+
SEPARATORS = {
|
| 94 |
+
"(",
|
| 95 |
+
")",
|
| 96 |
+
"<",
|
| 97 |
+
">",
|
| 98 |
+
"@",
|
| 99 |
+
",",
|
| 100 |
+
";",
|
| 101 |
+
":",
|
| 102 |
+
"\\",
|
| 103 |
+
'"',
|
| 104 |
+
"/",
|
| 105 |
+
"[",
|
| 106 |
+
"]",
|
| 107 |
+
"?",
|
| 108 |
+
"=",
|
| 109 |
+
"{",
|
| 110 |
+
"}",
|
| 111 |
+
" ",
|
| 112 |
+
chr(9),
|
| 113 |
+
}
|
| 114 |
+
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class noop:
|
| 118 |
+
def __await__(self) -> Generator[None, None, None]:
|
| 119 |
+
yield
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
| 123 |
+
"""Http basic authentication helper."""
|
| 124 |
+
|
| 125 |
+
def __new__(
|
| 126 |
+
cls, login: str, password: str = "", encoding: str = "latin1"
|
| 127 |
+
) -> "BasicAuth":
|
| 128 |
+
if login is None:
|
| 129 |
+
raise ValueError("None is not allowed as login value")
|
| 130 |
+
|
| 131 |
+
if password is None:
|
| 132 |
+
raise ValueError("None is not allowed as password value")
|
| 133 |
+
|
| 134 |
+
if ":" in login:
|
| 135 |
+
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
| 136 |
+
|
| 137 |
+
return super().__new__(cls, login, password, encoding)
|
| 138 |
+
|
| 139 |
+
@classmethod
|
| 140 |
+
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
| 141 |
+
"""Create a BasicAuth object from an Authorization HTTP header."""
|
| 142 |
+
try:
|
| 143 |
+
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
| 144 |
+
except ValueError:
|
| 145 |
+
raise ValueError("Could not parse authorization header.")
|
| 146 |
+
|
| 147 |
+
if auth_type.lower() != "basic":
|
| 148 |
+
raise ValueError("Unknown authorization method %s" % auth_type)
|
| 149 |
+
|
| 150 |
+
try:
|
| 151 |
+
decoded = base64.b64decode(
|
| 152 |
+
encoded_credentials.encode("ascii"), validate=True
|
| 153 |
+
).decode(encoding)
|
| 154 |
+
except binascii.Error:
|
| 155 |
+
raise ValueError("Invalid base64 encoding.")
|
| 156 |
+
|
| 157 |
+
try:
|
| 158 |
+
# RFC 2617 HTTP Authentication
|
| 159 |
+
# https://www.ietf.org/rfc/rfc2617.txt
|
| 160 |
+
# the colon must be present, but the username and password may be
|
| 161 |
+
# otherwise blank.
|
| 162 |
+
username, password = decoded.split(":", 1)
|
| 163 |
+
except ValueError:
|
| 164 |
+
raise ValueError("Invalid credentials.")
|
| 165 |
+
|
| 166 |
+
return cls(username, password, encoding=encoding)
|
| 167 |
+
|
| 168 |
+
@classmethod
|
| 169 |
+
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
| 170 |
+
"""Create BasicAuth from url."""
|
| 171 |
+
if not isinstance(url, URL):
|
| 172 |
+
raise TypeError("url should be yarl.URL instance")
|
| 173 |
+
# Check raw_user and raw_password first as yarl is likely
|
| 174 |
+
# to already have these values parsed from the netloc in the cache.
|
| 175 |
+
if url.raw_user is None and url.raw_password is None:
|
| 176 |
+
return None
|
| 177 |
+
return cls(url.user or "", url.password or "", encoding=encoding)
|
| 178 |
+
|
| 179 |
+
def encode(self) -> str:
|
| 180 |
+
"""Encode credentials."""
|
| 181 |
+
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
| 182 |
+
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
| 186 |
+
"""Remove user and password from URL if present and return BasicAuth object."""
|
| 187 |
+
# Check raw_user and raw_password first as yarl is likely
|
| 188 |
+
# to already have these values parsed from the netloc in the cache.
|
| 189 |
+
if url.raw_user is None and url.raw_password is None:
|
| 190 |
+
return url, None
|
| 191 |
+
return url.with_user(None), BasicAuth(url.user or "", url.password or "")
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def netrc_from_env() -> Optional[netrc.netrc]:
|
| 195 |
+
"""Load netrc from file.
|
| 196 |
+
|
| 197 |
+
Attempt to load it from the path specified by the env-var
|
| 198 |
+
NETRC or in the default location in the user's home directory.
|
| 199 |
+
|
| 200 |
+
Returns None if it couldn't be found or fails to parse.
|
| 201 |
+
"""
|
| 202 |
+
netrc_env = os.environ.get("NETRC")
|
| 203 |
+
|
| 204 |
+
if netrc_env is not None:
|
| 205 |
+
netrc_path = Path(netrc_env)
|
| 206 |
+
else:
|
| 207 |
+
try:
|
| 208 |
+
home_dir = Path.home()
|
| 209 |
+
except RuntimeError as e: # pragma: no cover
|
| 210 |
+
# if pathlib can't resolve home, it may raise a RuntimeError
|
| 211 |
+
client_logger.debug(
|
| 212 |
+
"Could not resolve home directory when "
|
| 213 |
+
"trying to look for .netrc file: %s",
|
| 214 |
+
e,
|
| 215 |
+
)
|
| 216 |
+
return None
|
| 217 |
+
|
| 218 |
+
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
| 219 |
+
|
| 220 |
+
try:
|
| 221 |
+
return netrc.netrc(str(netrc_path))
|
| 222 |
+
except netrc.NetrcParseError as e:
|
| 223 |
+
client_logger.warning("Could not parse .netrc file: %s", e)
|
| 224 |
+
except OSError as e:
|
| 225 |
+
netrc_exists = False
|
| 226 |
+
with contextlib.suppress(OSError):
|
| 227 |
+
netrc_exists = netrc_path.is_file()
|
| 228 |
+
# we couldn't read the file (doesn't exist, permissions, etc.)
|
| 229 |
+
if netrc_env or netrc_exists:
|
| 230 |
+
# only warn if the environment wanted us to load it,
|
| 231 |
+
# or it appears like the default file does actually exist
|
| 232 |
+
client_logger.warning("Could not read .netrc file: %s", e)
|
| 233 |
+
|
| 234 |
+
return None
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 238 |
+
class ProxyInfo:
|
| 239 |
+
proxy: URL
|
| 240 |
+
proxy_auth: Optional[BasicAuth]
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
|
| 244 |
+
"""
|
| 245 |
+
Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
|
| 246 |
+
|
| 247 |
+
:raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
|
| 248 |
+
entry is found for the ``host``.
|
| 249 |
+
"""
|
| 250 |
+
if netrc_obj is None:
|
| 251 |
+
raise LookupError("No .netrc file found")
|
| 252 |
+
auth_from_netrc = netrc_obj.authenticators(host)
|
| 253 |
+
|
| 254 |
+
if auth_from_netrc is None:
|
| 255 |
+
raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
|
| 256 |
+
login, account, password = auth_from_netrc
|
| 257 |
+
|
| 258 |
+
# TODO(PY311): username = login or account
|
| 259 |
+
# Up to python 3.10, account could be None if not specified,
|
| 260 |
+
# and login will be empty string if not specified. From 3.11,
|
| 261 |
+
# login and account will be empty string if not specified.
|
| 262 |
+
username = login if (login or account is None) else account
|
| 263 |
+
|
| 264 |
+
# TODO(PY311): Remove this, as password will be empty string
|
| 265 |
+
# if not specified
|
| 266 |
+
if password is None:
|
| 267 |
+
password = ""
|
| 268 |
+
|
| 269 |
+
return BasicAuth(username, password)
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
| 273 |
+
proxy_urls = {
|
| 274 |
+
k: URL(v)
|
| 275 |
+
for k, v in getproxies().items()
|
| 276 |
+
if k in ("http", "https", "ws", "wss")
|
| 277 |
+
}
|
| 278 |
+
netrc_obj = netrc_from_env()
|
| 279 |
+
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
| 280 |
+
ret = {}
|
| 281 |
+
for proto, val in stripped.items():
|
| 282 |
+
proxy, auth = val
|
| 283 |
+
if proxy.scheme in ("https", "wss"):
|
| 284 |
+
client_logger.warning(
|
| 285 |
+
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
| 286 |
+
)
|
| 287 |
+
continue
|
| 288 |
+
if netrc_obj and auth is None:
|
| 289 |
+
if proxy.host is not None:
|
| 290 |
+
try:
|
| 291 |
+
auth = basicauth_from_netrc(netrc_obj, proxy.host)
|
| 292 |
+
except LookupError:
|
| 293 |
+
auth = None
|
| 294 |
+
ret[proto] = ProxyInfo(proxy, auth)
|
| 295 |
+
return ret
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
| 299 |
+
"""Get a permitted proxy for the given URL from the env."""
|
| 300 |
+
if url.host is not None and proxy_bypass(url.host):
|
| 301 |
+
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
| 302 |
+
|
| 303 |
+
proxies_in_env = proxies_from_env()
|
| 304 |
+
try:
|
| 305 |
+
proxy_info = proxies_in_env[url.scheme]
|
| 306 |
+
except KeyError:
|
| 307 |
+
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
| 308 |
+
else:
|
| 309 |
+
return proxy_info.proxy, proxy_info.proxy_auth
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 313 |
+
class MimeType:
|
| 314 |
+
type: str
|
| 315 |
+
subtype: str
|
| 316 |
+
suffix: str
|
| 317 |
+
parameters: "MultiDictProxy[str]"
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
@functools.lru_cache(maxsize=56)
|
| 321 |
+
def parse_mimetype(mimetype: str) -> MimeType:
|
| 322 |
+
"""Parses a MIME type into its components.
|
| 323 |
+
|
| 324 |
+
mimetype is a MIME type string.
|
| 325 |
+
|
| 326 |
+
Returns a MimeType object.
|
| 327 |
+
|
| 328 |
+
Example:
|
| 329 |
+
|
| 330 |
+
>>> parse_mimetype('text/html; charset=utf-8')
|
| 331 |
+
MimeType(type='text', subtype='html', suffix='',
|
| 332 |
+
parameters={'charset': 'utf-8'})
|
| 333 |
+
|
| 334 |
+
"""
|
| 335 |
+
if not mimetype:
|
| 336 |
+
return MimeType(
|
| 337 |
+
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
| 338 |
+
)
|
| 339 |
+
|
| 340 |
+
parts = mimetype.split(";")
|
| 341 |
+
params: MultiDict[str] = MultiDict()
|
| 342 |
+
for item in parts[1:]:
|
| 343 |
+
if not item:
|
| 344 |
+
continue
|
| 345 |
+
key, _, value = item.partition("=")
|
| 346 |
+
params.add(key.lower().strip(), value.strip(' "'))
|
| 347 |
+
|
| 348 |
+
fulltype = parts[0].strip().lower()
|
| 349 |
+
if fulltype == "*":
|
| 350 |
+
fulltype = "*/*"
|
| 351 |
+
|
| 352 |
+
mtype, _, stype = fulltype.partition("/")
|
| 353 |
+
stype, _, suffix = stype.partition("+")
|
| 354 |
+
|
| 355 |
+
return MimeType(
|
| 356 |
+
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
| 361 |
+
name = getattr(obj, "name", None)
|
| 362 |
+
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
| 363 |
+
return Path(name).name
|
| 364 |
+
return default
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
| 368 |
+
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
def quoted_string(content: str) -> str:
|
| 372 |
+
"""Return 7-bit content as quoted-string.
|
| 373 |
+
|
| 374 |
+
Format content into a quoted-string as defined in RFC5322 for
|
| 375 |
+
Internet Message Format. Notice that this is not the 8-bit HTTP
|
| 376 |
+
format, but the 7-bit email format. Content must be in usascii or
|
| 377 |
+
a ValueError is raised.
|
| 378 |
+
"""
|
| 379 |
+
if not (QCONTENT > set(content)):
|
| 380 |
+
raise ValueError(f"bad content for quoted-string {content!r}")
|
| 381 |
+
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
| 382 |
+
|
| 383 |
+
|
| 384 |
+
def content_disposition_header(
|
| 385 |
+
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
| 386 |
+
) -> str:
|
| 387 |
+
"""Sets ``Content-Disposition`` header for MIME.
|
| 388 |
+
|
| 389 |
+
This is the MIME payload Content-Disposition header from RFC 2183
|
| 390 |
+
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
| 391 |
+
RFC 6266.
|
| 392 |
+
|
| 393 |
+
disptype is a disposition type: inline, attachment, form-data.
|
| 394 |
+
Should be valid extension token (see RFC 2183)
|
| 395 |
+
|
| 396 |
+
quote_fields performs value quoting to 7-bit MIME headers
|
| 397 |
+
according to RFC 7578. Set to quote_fields to False if recipient
|
| 398 |
+
can take 8-bit file names and field values.
|
| 399 |
+
|
| 400 |
+
_charset specifies the charset to use when quote_fields is True.
|
| 401 |
+
|
| 402 |
+
params is a dict with disposition params.
|
| 403 |
+
"""
|
| 404 |
+
if not disptype or not (TOKEN > set(disptype)):
|
| 405 |
+
raise ValueError(f"bad content disposition type {disptype!r}")
|
| 406 |
+
|
| 407 |
+
value = disptype
|
| 408 |
+
if params:
|
| 409 |
+
lparams = []
|
| 410 |
+
for key, val in params.items():
|
| 411 |
+
if not key or not (TOKEN > set(key)):
|
| 412 |
+
raise ValueError(f"bad content disposition parameter {key!r}={val!r}")
|
| 413 |
+
if quote_fields:
|
| 414 |
+
if key.lower() == "filename":
|
| 415 |
+
qval = quote(val, "", encoding=_charset)
|
| 416 |
+
lparams.append((key, '"%s"' % qval))
|
| 417 |
+
else:
|
| 418 |
+
try:
|
| 419 |
+
qval = quoted_string(val)
|
| 420 |
+
except ValueError:
|
| 421 |
+
qval = "".join(
|
| 422 |
+
(_charset, "''", quote(val, "", encoding=_charset))
|
| 423 |
+
)
|
| 424 |
+
lparams.append((key + "*", qval))
|
| 425 |
+
else:
|
| 426 |
+
lparams.append((key, '"%s"' % qval))
|
| 427 |
+
else:
|
| 428 |
+
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
| 429 |
+
lparams.append((key, '"%s"' % qval))
|
| 430 |
+
sparams = "; ".join("=".join(pair) for pair in lparams)
|
| 431 |
+
value = "; ".join((value, sparams))
|
| 432 |
+
return value
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
def is_ip_address(host: Optional[str]) -> bool:
|
| 436 |
+
"""Check if host looks like an IP Address.
|
| 437 |
+
|
| 438 |
+
This check is only meant as a heuristic to ensure that
|
| 439 |
+
a host is not a domain name.
|
| 440 |
+
"""
|
| 441 |
+
if not host:
|
| 442 |
+
return False
|
| 443 |
+
# For a host to be an ipv4 address, it must be all numeric.
|
| 444 |
+
# The host must contain a colon to be an IPv6 address.
|
| 445 |
+
return ":" in host or host.replace(".", "").isdigit()
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
_cached_current_datetime: Optional[int] = None
|
| 449 |
+
_cached_formatted_datetime = ""
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
def rfc822_formatted_time() -> str:
|
| 453 |
+
global _cached_current_datetime
|
| 454 |
+
global _cached_formatted_datetime
|
| 455 |
+
|
| 456 |
+
now = int(time.time())
|
| 457 |
+
if now != _cached_current_datetime:
|
| 458 |
+
# Weekday and month names for HTTP date/time formatting;
|
| 459 |
+
# always English!
|
| 460 |
+
# Tuples are constants stored in codeobject!
|
| 461 |
+
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
| 462 |
+
_monthname = (
|
| 463 |
+
"", # Dummy so we can use 1-based month numbers
|
| 464 |
+
"Jan",
|
| 465 |
+
"Feb",
|
| 466 |
+
"Mar",
|
| 467 |
+
"Apr",
|
| 468 |
+
"May",
|
| 469 |
+
"Jun",
|
| 470 |
+
"Jul",
|
| 471 |
+
"Aug",
|
| 472 |
+
"Sep",
|
| 473 |
+
"Oct",
|
| 474 |
+
"Nov",
|
| 475 |
+
"Dec",
|
| 476 |
+
)
|
| 477 |
+
|
| 478 |
+
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
| 479 |
+
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
| 480 |
+
_weekdayname[wd],
|
| 481 |
+
day,
|
| 482 |
+
_monthname[month],
|
| 483 |
+
year,
|
| 484 |
+
hh,
|
| 485 |
+
mm,
|
| 486 |
+
ss,
|
| 487 |
+
)
|
| 488 |
+
_cached_current_datetime = now
|
| 489 |
+
return _cached_formatted_datetime
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
| 493 |
+
ref, name = info
|
| 494 |
+
ob = ref()
|
| 495 |
+
if ob is not None:
|
| 496 |
+
with suppress(Exception):
|
| 497 |
+
getattr(ob, name)()
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
def weakref_handle(
|
| 501 |
+
ob: object,
|
| 502 |
+
name: str,
|
| 503 |
+
timeout: float,
|
| 504 |
+
loop: asyncio.AbstractEventLoop,
|
| 505 |
+
timeout_ceil_threshold: float = 5,
|
| 506 |
+
) -> Optional[asyncio.TimerHandle]:
|
| 507 |
+
if timeout is not None and timeout > 0:
|
| 508 |
+
when = loop.time() + timeout
|
| 509 |
+
if timeout >= timeout_ceil_threshold:
|
| 510 |
+
when = ceil(when)
|
| 511 |
+
|
| 512 |
+
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
| 513 |
+
return None
|
| 514 |
+
|
| 515 |
+
|
| 516 |
+
def call_later(
|
| 517 |
+
cb: Callable[[], Any],
|
| 518 |
+
timeout: float,
|
| 519 |
+
loop: asyncio.AbstractEventLoop,
|
| 520 |
+
timeout_ceil_threshold: float = 5,
|
| 521 |
+
) -> Optional[asyncio.TimerHandle]:
|
| 522 |
+
if timeout is None or timeout <= 0:
|
| 523 |
+
return None
|
| 524 |
+
now = loop.time()
|
| 525 |
+
when = calculate_timeout_when(now, timeout, timeout_ceil_threshold)
|
| 526 |
+
return loop.call_at(when, cb)
|
| 527 |
+
|
| 528 |
+
|
| 529 |
+
def calculate_timeout_when(
|
| 530 |
+
loop_time: float,
|
| 531 |
+
timeout: float,
|
| 532 |
+
timeout_ceiling_threshold: float,
|
| 533 |
+
) -> float:
|
| 534 |
+
"""Calculate when to execute a timeout."""
|
| 535 |
+
when = loop_time + timeout
|
| 536 |
+
if timeout > timeout_ceiling_threshold:
|
| 537 |
+
return ceil(when)
|
| 538 |
+
return when
|
| 539 |
+
|
| 540 |
+
|
| 541 |
+
class TimeoutHandle:
|
| 542 |
+
"""Timeout handle"""
|
| 543 |
+
|
| 544 |
+
__slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks")
|
| 545 |
+
|
| 546 |
+
def __init__(
|
| 547 |
+
self,
|
| 548 |
+
loop: asyncio.AbstractEventLoop,
|
| 549 |
+
timeout: Optional[float],
|
| 550 |
+
ceil_threshold: float = 5,
|
| 551 |
+
) -> None:
|
| 552 |
+
self._timeout = timeout
|
| 553 |
+
self._loop = loop
|
| 554 |
+
self._ceil_threshold = ceil_threshold
|
| 555 |
+
self._callbacks: List[
|
| 556 |
+
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
| 557 |
+
] = []
|
| 558 |
+
|
| 559 |
+
def register(
|
| 560 |
+
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
| 561 |
+
) -> None:
|
| 562 |
+
self._callbacks.append((callback, args, kwargs))
|
| 563 |
+
|
| 564 |
+
def close(self) -> None:
|
| 565 |
+
self._callbacks.clear()
|
| 566 |
+
|
| 567 |
+
def start(self) -> Optional[asyncio.TimerHandle]:
|
| 568 |
+
timeout = self._timeout
|
| 569 |
+
if timeout is not None and timeout > 0:
|
| 570 |
+
when = self._loop.time() + timeout
|
| 571 |
+
if timeout >= self._ceil_threshold:
|
| 572 |
+
when = ceil(when)
|
| 573 |
+
return self._loop.call_at(when, self.__call__)
|
| 574 |
+
else:
|
| 575 |
+
return None
|
| 576 |
+
|
| 577 |
+
def timer(self) -> "BaseTimerContext":
|
| 578 |
+
if self._timeout is not None and self._timeout > 0:
|
| 579 |
+
timer = TimerContext(self._loop)
|
| 580 |
+
self.register(timer.timeout)
|
| 581 |
+
return timer
|
| 582 |
+
else:
|
| 583 |
+
return TimerNoop()
|
| 584 |
+
|
| 585 |
+
def __call__(self) -> None:
|
| 586 |
+
for cb, args, kwargs in self._callbacks:
|
| 587 |
+
with suppress(Exception):
|
| 588 |
+
cb(*args, **kwargs)
|
| 589 |
+
|
| 590 |
+
self._callbacks.clear()
|
| 591 |
+
|
| 592 |
+
|
| 593 |
+
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
| 594 |
+
|
| 595 |
+
__slots__ = ()
|
| 596 |
+
|
| 597 |
+
def assert_timeout(self) -> None:
|
| 598 |
+
"""Raise TimeoutError if timeout has been exceeded."""
|
| 599 |
+
|
| 600 |
+
|
| 601 |
+
class TimerNoop(BaseTimerContext):
|
| 602 |
+
|
| 603 |
+
__slots__ = ()
|
| 604 |
+
|
| 605 |
+
def __enter__(self) -> BaseTimerContext:
|
| 606 |
+
return self
|
| 607 |
+
|
| 608 |
+
def __exit__(
|
| 609 |
+
self,
|
| 610 |
+
exc_type: Optional[Type[BaseException]],
|
| 611 |
+
exc_val: Optional[BaseException],
|
| 612 |
+
exc_tb: Optional[TracebackType],
|
| 613 |
+
) -> None:
|
| 614 |
+
return
|
| 615 |
+
|
| 616 |
+
|
| 617 |
+
class TimerContext(BaseTimerContext):
|
| 618 |
+
"""Low resolution timeout context manager"""
|
| 619 |
+
|
| 620 |
+
__slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling")
|
| 621 |
+
|
| 622 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 623 |
+
self._loop = loop
|
| 624 |
+
self._tasks: List[asyncio.Task[Any]] = []
|
| 625 |
+
self._cancelled = False
|
| 626 |
+
self._cancelling = 0
|
| 627 |
+
|
| 628 |
+
def assert_timeout(self) -> None:
|
| 629 |
+
"""Raise TimeoutError if timer has already been cancelled."""
|
| 630 |
+
if self._cancelled:
|
| 631 |
+
raise asyncio.TimeoutError from None
|
| 632 |
+
|
| 633 |
+
def __enter__(self) -> BaseTimerContext:
|
| 634 |
+
task = asyncio.current_task(loop=self._loop)
|
| 635 |
+
if task is None:
|
| 636 |
+
raise RuntimeError("Timeout context manager should be used inside a task")
|
| 637 |
+
|
| 638 |
+
if sys.version_info >= (3, 11):
|
| 639 |
+
# Remember if the task was already cancelling
|
| 640 |
+
# so when we __exit__ we can decide if we should
|
| 641 |
+
# raise asyncio.TimeoutError or let the cancellation propagate
|
| 642 |
+
self._cancelling = task.cancelling()
|
| 643 |
+
|
| 644 |
+
if self._cancelled:
|
| 645 |
+
raise asyncio.TimeoutError from None
|
| 646 |
+
|
| 647 |
+
self._tasks.append(task)
|
| 648 |
+
return self
|
| 649 |
+
|
| 650 |
+
def __exit__(
|
| 651 |
+
self,
|
| 652 |
+
exc_type: Optional[Type[BaseException]],
|
| 653 |
+
exc_val: Optional[BaseException],
|
| 654 |
+
exc_tb: Optional[TracebackType],
|
| 655 |
+
) -> Optional[bool]:
|
| 656 |
+
enter_task: Optional[asyncio.Task[Any]] = None
|
| 657 |
+
if self._tasks:
|
| 658 |
+
enter_task = self._tasks.pop()
|
| 659 |
+
|
| 660 |
+
if exc_type is asyncio.CancelledError and self._cancelled:
|
| 661 |
+
assert enter_task is not None
|
| 662 |
+
# The timeout was hit, and the task was cancelled
|
| 663 |
+
# so we need to uncancel the last task that entered the context manager
|
| 664 |
+
# since the cancellation should not leak out of the context manager
|
| 665 |
+
if sys.version_info >= (3, 11):
|
| 666 |
+
# If the task was already cancelling don't raise
|
| 667 |
+
# asyncio.TimeoutError and instead return None
|
| 668 |
+
# to allow the cancellation to propagate
|
| 669 |
+
if enter_task.uncancel() > self._cancelling:
|
| 670 |
+
return None
|
| 671 |
+
raise asyncio.TimeoutError from exc_val
|
| 672 |
+
return None
|
| 673 |
+
|
| 674 |
+
def timeout(self) -> None:
|
| 675 |
+
if not self._cancelled:
|
| 676 |
+
for task in set(self._tasks):
|
| 677 |
+
task.cancel()
|
| 678 |
+
|
| 679 |
+
self._cancelled = True
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
def ceil_timeout(
|
| 683 |
+
delay: Optional[float], ceil_threshold: float = 5
|
| 684 |
+
) -> async_timeout.Timeout:
|
| 685 |
+
if delay is None or delay <= 0:
|
| 686 |
+
return async_timeout.timeout(None)
|
| 687 |
+
|
| 688 |
+
loop = asyncio.get_running_loop()
|
| 689 |
+
now = loop.time()
|
| 690 |
+
when = now + delay
|
| 691 |
+
if delay > ceil_threshold:
|
| 692 |
+
when = ceil(when)
|
| 693 |
+
return async_timeout.timeout_at(when)
|
| 694 |
+
|
| 695 |
+
|
| 696 |
+
class HeadersMixin:
|
| 697 |
+
"""Mixin for handling headers."""
|
| 698 |
+
|
| 699 |
+
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
| 700 |
+
|
| 701 |
+
_headers: MultiMapping[str]
|
| 702 |
+
_content_type: Optional[str] = None
|
| 703 |
+
_content_dict: Optional[Dict[str, str]] = None
|
| 704 |
+
_stored_content_type: Union[str, None, _SENTINEL] = sentinel
|
| 705 |
+
|
| 706 |
+
def _parse_content_type(self, raw: Optional[str]) -> None:
|
| 707 |
+
self._stored_content_type = raw
|
| 708 |
+
if raw is None:
|
| 709 |
+
# default value according to RFC 2616
|
| 710 |
+
self._content_type = "application/octet-stream"
|
| 711 |
+
self._content_dict = {}
|
| 712 |
+
else:
|
| 713 |
+
msg = HeaderParser().parsestr("Content-Type: " + raw)
|
| 714 |
+
self._content_type = msg.get_content_type()
|
| 715 |
+
params = msg.get_params(())
|
| 716 |
+
self._content_dict = dict(params[1:]) # First element is content type again
|
| 717 |
+
|
| 718 |
+
@property
|
| 719 |
+
def content_type(self) -> str:
|
| 720 |
+
"""The value of content part for Content-Type HTTP header."""
|
| 721 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
| 722 |
+
if self._stored_content_type != raw:
|
| 723 |
+
self._parse_content_type(raw)
|
| 724 |
+
assert self._content_type is not None
|
| 725 |
+
return self._content_type
|
| 726 |
+
|
| 727 |
+
@property
|
| 728 |
+
def charset(self) -> Optional[str]:
|
| 729 |
+
"""The value of charset part for Content-Type HTTP header."""
|
| 730 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
| 731 |
+
if self._stored_content_type != raw:
|
| 732 |
+
self._parse_content_type(raw)
|
| 733 |
+
assert self._content_dict is not None
|
| 734 |
+
return self._content_dict.get("charset")
|
| 735 |
+
|
| 736 |
+
@property
|
| 737 |
+
def content_length(self) -> Optional[int]:
|
| 738 |
+
"""The value of Content-Length HTTP header."""
|
| 739 |
+
content_length = self._headers.get(hdrs.CONTENT_LENGTH)
|
| 740 |
+
return None if content_length is None else int(content_length)
|
| 741 |
+
|
| 742 |
+
|
| 743 |
+
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
| 744 |
+
if not fut.done():
|
| 745 |
+
fut.set_result(result)
|
| 746 |
+
|
| 747 |
+
|
| 748 |
+
_EXC_SENTINEL = BaseException()
|
| 749 |
+
|
| 750 |
+
|
| 751 |
+
class ErrorableProtocol(Protocol):
|
| 752 |
+
def set_exception(
|
| 753 |
+
self,
|
| 754 |
+
exc: BaseException,
|
| 755 |
+
exc_cause: BaseException = ...,
|
| 756 |
+
) -> None: ... # pragma: no cover
|
| 757 |
+
|
| 758 |
+
|
| 759 |
+
def set_exception(
|
| 760 |
+
fut: "asyncio.Future[_T] | ErrorableProtocol",
|
| 761 |
+
exc: BaseException,
|
| 762 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 763 |
+
) -> None:
|
| 764 |
+
"""Set future exception.
|
| 765 |
+
|
| 766 |
+
If the future is marked as complete, this function is a no-op.
|
| 767 |
+
|
| 768 |
+
:param exc_cause: An exception that is a direct cause of ``exc``.
|
| 769 |
+
Only set if provided.
|
| 770 |
+
"""
|
| 771 |
+
if asyncio.isfuture(fut) and fut.done():
|
| 772 |
+
return
|
| 773 |
+
|
| 774 |
+
exc_is_sentinel = exc_cause is _EXC_SENTINEL
|
| 775 |
+
exc_causes_itself = exc is exc_cause
|
| 776 |
+
if not exc_is_sentinel and not exc_causes_itself:
|
| 777 |
+
exc.__cause__ = exc_cause
|
| 778 |
+
|
| 779 |
+
fut.set_exception(exc)
|
| 780 |
+
|
| 781 |
+
|
| 782 |
+
@functools.total_ordering
|
| 783 |
+
class AppKey(Generic[_T]):
|
| 784 |
+
"""Keys for static typing support in Application."""
|
| 785 |
+
|
| 786 |
+
__slots__ = ("_name", "_t", "__orig_class__")
|
| 787 |
+
|
| 788 |
+
# This may be set by Python when instantiating with a generic type. We need to
|
| 789 |
+
# support this, in order to support types that are not concrete classes,
|
| 790 |
+
# like Iterable, which can't be passed as the second parameter to __init__.
|
| 791 |
+
__orig_class__: Type[object]
|
| 792 |
+
|
| 793 |
+
def __init__(self, name: str, t: Optional[Type[_T]] = None):
|
| 794 |
+
# Prefix with module name to help deduplicate key names.
|
| 795 |
+
frame = inspect.currentframe()
|
| 796 |
+
while frame:
|
| 797 |
+
if frame.f_code.co_name == "<module>":
|
| 798 |
+
module: str = frame.f_globals["__name__"]
|
| 799 |
+
break
|
| 800 |
+
frame = frame.f_back
|
| 801 |
+
|
| 802 |
+
self._name = module + "." + name
|
| 803 |
+
self._t = t
|
| 804 |
+
|
| 805 |
+
def __lt__(self, other: object) -> bool:
|
| 806 |
+
if isinstance(other, AppKey):
|
| 807 |
+
return self._name < other._name
|
| 808 |
+
return True # Order AppKey above other types.
|
| 809 |
+
|
| 810 |
+
def __repr__(self) -> str:
|
| 811 |
+
t = self._t
|
| 812 |
+
if t is None:
|
| 813 |
+
with suppress(AttributeError):
|
| 814 |
+
# Set to type arg.
|
| 815 |
+
t = get_args(self.__orig_class__)[0]
|
| 816 |
+
|
| 817 |
+
if t is None:
|
| 818 |
+
t_repr = "<<Unknown>>"
|
| 819 |
+
elif isinstance(t, type):
|
| 820 |
+
if t.__module__ == "builtins":
|
| 821 |
+
t_repr = t.__qualname__
|
| 822 |
+
else:
|
| 823 |
+
t_repr = f"{t.__module__}.{t.__qualname__}"
|
| 824 |
+
else:
|
| 825 |
+
t_repr = repr(t)
|
| 826 |
+
return f"<AppKey({self._name}, type={t_repr})>"
|
| 827 |
+
|
| 828 |
+
|
| 829 |
+
class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
|
| 830 |
+
__slots__ = ("_maps",)
|
| 831 |
+
|
| 832 |
+
def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
|
| 833 |
+
self._maps = tuple(maps)
|
| 834 |
+
|
| 835 |
+
def __init_subclass__(cls) -> None:
|
| 836 |
+
raise TypeError(
|
| 837 |
+
"Inheritance class {} from ChainMapProxy "
|
| 838 |
+
"is forbidden".format(cls.__name__)
|
| 839 |
+
)
|
| 840 |
+
|
| 841 |
+
@overload # type: ignore[override]
|
| 842 |
+
def __getitem__(self, key: AppKey[_T]) -> _T: ...
|
| 843 |
+
|
| 844 |
+
@overload
|
| 845 |
+
def __getitem__(self, key: str) -> Any: ...
|
| 846 |
+
|
| 847 |
+
def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
|
| 848 |
+
for mapping in self._maps:
|
| 849 |
+
try:
|
| 850 |
+
return mapping[key]
|
| 851 |
+
except KeyError:
|
| 852 |
+
pass
|
| 853 |
+
raise KeyError(key)
|
| 854 |
+
|
| 855 |
+
@overload # type: ignore[override]
|
| 856 |
+
def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ...
|
| 857 |
+
|
| 858 |
+
@overload
|
| 859 |
+
def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ...
|
| 860 |
+
|
| 861 |
+
@overload
|
| 862 |
+
def get(self, key: str, default: Any = ...) -> Any: ...
|
| 863 |
+
|
| 864 |
+
def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
|
| 865 |
+
try:
|
| 866 |
+
return self[key]
|
| 867 |
+
except KeyError:
|
| 868 |
+
return default
|
| 869 |
+
|
| 870 |
+
def __len__(self) -> int:
|
| 871 |
+
# reuses stored hash values if possible
|
| 872 |
+
return len(set().union(*self._maps))
|
| 873 |
+
|
| 874 |
+
def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
|
| 875 |
+
d: Dict[Union[str, AppKey[Any]], Any] = {}
|
| 876 |
+
for mapping in reversed(self._maps):
|
| 877 |
+
# reuses stored hash values if possible
|
| 878 |
+
d.update(mapping)
|
| 879 |
+
return iter(d)
|
| 880 |
+
|
| 881 |
+
def __contains__(self, key: object) -> bool:
|
| 882 |
+
return any(key in m for m in self._maps)
|
| 883 |
+
|
| 884 |
+
def __bool__(self) -> bool:
|
| 885 |
+
return any(self._maps)
|
| 886 |
+
|
| 887 |
+
def __repr__(self) -> str:
|
| 888 |
+
content = ", ".join(map(repr, self._maps))
|
| 889 |
+
return f"ChainMapProxy({content})"
|
| 890 |
+
|
| 891 |
+
|
| 892 |
+
# https://tools.ietf.org/html/rfc7232#section-2.3
|
| 893 |
+
_ETAGC = r"[!\x23-\x7E\x80-\xff]+"
|
| 894 |
+
_ETAGC_RE = re.compile(_ETAGC)
|
| 895 |
+
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
|
| 896 |
+
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
| 897 |
+
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
| 898 |
+
|
| 899 |
+
ETAG_ANY = "*"
|
| 900 |
+
|
| 901 |
+
|
| 902 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 903 |
+
class ETag:
|
| 904 |
+
value: str
|
| 905 |
+
is_weak: bool = False
|
| 906 |
+
|
| 907 |
+
|
| 908 |
+
def validate_etag_value(value: str) -> None:
|
| 909 |
+
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
| 910 |
+
raise ValueError(
|
| 911 |
+
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
| 912 |
+
)
|
| 913 |
+
|
| 914 |
+
|
| 915 |
+
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
| 916 |
+
"""Process a date string, return a datetime object"""
|
| 917 |
+
if date_str is not None:
|
| 918 |
+
timetuple = parsedate(date_str)
|
| 919 |
+
if timetuple is not None:
|
| 920 |
+
with suppress(ValueError):
|
| 921 |
+
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
| 922 |
+
return None
|
| 923 |
+
|
| 924 |
+
|
| 925 |
+
@functools.lru_cache
|
| 926 |
+
def must_be_empty_body(method: str, code: int) -> bool:
|
| 927 |
+
"""Check if a request must return an empty body."""
|
| 928 |
+
return (
|
| 929 |
+
code in EMPTY_BODY_STATUS_CODES
|
| 930 |
+
or method in EMPTY_BODY_METHODS
|
| 931 |
+
or (200 <= code < 300 and method in hdrs.METH_CONNECT_ALL)
|
| 932 |
+
)
|
| 933 |
+
|
| 934 |
+
|
| 935 |
+
def should_remove_content_length(method: str, code: int) -> bool:
|
| 936 |
+
"""Check if a Content-Length header should be removed.
|
| 937 |
+
|
| 938 |
+
This should always be a subset of must_be_empty_body
|
| 939 |
+
"""
|
| 940 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
|
| 941 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
|
| 942 |
+
return code in EMPTY_BODY_STATUS_CODES or (
|
| 943 |
+
200 <= code < 300 and method in hdrs.METH_CONNECT_ALL
|
| 944 |
+
)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/http.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from http import HTTPStatus
|
| 3 |
+
from typing import Mapping, Tuple
|
| 4 |
+
|
| 5 |
+
from . import __version__
|
| 6 |
+
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
| 7 |
+
from .http_parser import (
|
| 8 |
+
HeadersParser as HeadersParser,
|
| 9 |
+
HttpParser as HttpParser,
|
| 10 |
+
HttpRequestParser as HttpRequestParser,
|
| 11 |
+
HttpResponseParser as HttpResponseParser,
|
| 12 |
+
RawRequestMessage as RawRequestMessage,
|
| 13 |
+
RawResponseMessage as RawResponseMessage,
|
| 14 |
+
)
|
| 15 |
+
from .http_websocket import (
|
| 16 |
+
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
| 17 |
+
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
| 18 |
+
WS_KEY as WS_KEY,
|
| 19 |
+
WebSocketError as WebSocketError,
|
| 20 |
+
WebSocketReader as WebSocketReader,
|
| 21 |
+
WebSocketWriter as WebSocketWriter,
|
| 22 |
+
WSCloseCode as WSCloseCode,
|
| 23 |
+
WSMessage as WSMessage,
|
| 24 |
+
WSMsgType as WSMsgType,
|
| 25 |
+
ws_ext_gen as ws_ext_gen,
|
| 26 |
+
ws_ext_parse as ws_ext_parse,
|
| 27 |
+
)
|
| 28 |
+
from .http_writer import (
|
| 29 |
+
HttpVersion as HttpVersion,
|
| 30 |
+
HttpVersion10 as HttpVersion10,
|
| 31 |
+
HttpVersion11 as HttpVersion11,
|
| 32 |
+
StreamWriter as StreamWriter,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
__all__ = (
|
| 36 |
+
"HttpProcessingError",
|
| 37 |
+
"RESPONSES",
|
| 38 |
+
"SERVER_SOFTWARE",
|
| 39 |
+
# .http_writer
|
| 40 |
+
"StreamWriter",
|
| 41 |
+
"HttpVersion",
|
| 42 |
+
"HttpVersion10",
|
| 43 |
+
"HttpVersion11",
|
| 44 |
+
# .http_parser
|
| 45 |
+
"HeadersParser",
|
| 46 |
+
"HttpParser",
|
| 47 |
+
"HttpRequestParser",
|
| 48 |
+
"HttpResponseParser",
|
| 49 |
+
"RawRequestMessage",
|
| 50 |
+
"RawResponseMessage",
|
| 51 |
+
# .http_websocket
|
| 52 |
+
"WS_CLOSED_MESSAGE",
|
| 53 |
+
"WS_CLOSING_MESSAGE",
|
| 54 |
+
"WS_KEY",
|
| 55 |
+
"WebSocketReader",
|
| 56 |
+
"WebSocketWriter",
|
| 57 |
+
"ws_ext_gen",
|
| 58 |
+
"ws_ext_parse",
|
| 59 |
+
"WSMessage",
|
| 60 |
+
"WebSocketError",
|
| 61 |
+
"WSMsgType",
|
| 62 |
+
"WSCloseCode",
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
| 67 |
+
sys.version_info, __version__
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
RESPONSES: Mapping[int, Tuple[str, str]] = {
|
| 71 |
+
v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
|
| 72 |
+
}
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/http_exceptions.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Low-level http related exceptions."""
|
| 2 |
+
|
| 3 |
+
from textwrap import indent
|
| 4 |
+
from typing import Optional, Union
|
| 5 |
+
|
| 6 |
+
from .typedefs import _CIMultiDict
|
| 7 |
+
|
| 8 |
+
__all__ = ("HttpProcessingError",)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class HttpProcessingError(Exception):
|
| 12 |
+
"""HTTP error.
|
| 13 |
+
|
| 14 |
+
Shortcut for raising HTTP errors with custom code, message and headers.
|
| 15 |
+
|
| 16 |
+
code: HTTP Error code.
|
| 17 |
+
message: (optional) Error message.
|
| 18 |
+
headers: (optional) Headers to be sent in response, a list of pairs
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
code = 0
|
| 22 |
+
message = ""
|
| 23 |
+
headers = None
|
| 24 |
+
|
| 25 |
+
def __init__(
|
| 26 |
+
self,
|
| 27 |
+
*,
|
| 28 |
+
code: Optional[int] = None,
|
| 29 |
+
message: str = "",
|
| 30 |
+
headers: Optional[_CIMultiDict] = None,
|
| 31 |
+
) -> None:
|
| 32 |
+
if code is not None:
|
| 33 |
+
self.code = code
|
| 34 |
+
self.headers = headers
|
| 35 |
+
self.message = message
|
| 36 |
+
|
| 37 |
+
def __str__(self) -> str:
|
| 38 |
+
msg = indent(self.message, " ")
|
| 39 |
+
return f"{self.code}, message:\n{msg}"
|
| 40 |
+
|
| 41 |
+
def __repr__(self) -> str:
|
| 42 |
+
return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class BadHttpMessage(HttpProcessingError):
|
| 46 |
+
|
| 47 |
+
code = 400
|
| 48 |
+
message = "Bad Request"
|
| 49 |
+
|
| 50 |
+
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
| 51 |
+
super().__init__(message=message, headers=headers)
|
| 52 |
+
self.args = (message,)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class HttpBadRequest(BadHttpMessage):
|
| 56 |
+
|
| 57 |
+
code = 400
|
| 58 |
+
message = "Bad Request"
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class PayloadEncodingError(BadHttpMessage):
|
| 62 |
+
"""Base class for payload errors"""
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class ContentEncodingError(PayloadEncodingError):
|
| 66 |
+
"""Content encoding error."""
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class TransferEncodingError(PayloadEncodingError):
|
| 70 |
+
"""transfer encoding error."""
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class ContentLengthError(PayloadEncodingError):
|
| 74 |
+
"""Not enough data for satisfy content length header."""
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class LineTooLong(BadHttpMessage):
|
| 78 |
+
def __init__(
|
| 79 |
+
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
| 80 |
+
) -> None:
|
| 81 |
+
super().__init__(
|
| 82 |
+
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
| 83 |
+
)
|
| 84 |
+
self.args = (line, limit, actual_size)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class InvalidHeader(BadHttpMessage):
|
| 88 |
+
def __init__(self, hdr: Union[bytes, str]) -> None:
|
| 89 |
+
hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
|
| 90 |
+
super().__init__(f"Invalid HTTP header: {hdr!r}")
|
| 91 |
+
self.hdr = hdr_s
|
| 92 |
+
self.args = (hdr,)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class BadStatusLine(BadHttpMessage):
|
| 96 |
+
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
| 97 |
+
if not isinstance(line, str):
|
| 98 |
+
line = repr(line)
|
| 99 |
+
super().__init__(error or f"Bad status line {line!r}")
|
| 100 |
+
self.args = (line,)
|
| 101 |
+
self.line = line
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class BadHttpMethod(BadStatusLine):
|
| 105 |
+
"""Invalid HTTP method in status line."""
|
| 106 |
+
|
| 107 |
+
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
| 108 |
+
super().__init__(line, error or f"Bad HTTP method in status line {line!r}")
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class InvalidURLError(BadHttpMessage):
|
| 112 |
+
pass
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/http_parser.py
ADDED
|
@@ -0,0 +1,1046 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import asyncio
|
| 3 |
+
import re
|
| 4 |
+
import string
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from enum import IntEnum
|
| 7 |
+
from typing import (
|
| 8 |
+
Any,
|
| 9 |
+
ClassVar,
|
| 10 |
+
Final,
|
| 11 |
+
Generic,
|
| 12 |
+
List,
|
| 13 |
+
Literal,
|
| 14 |
+
NamedTuple,
|
| 15 |
+
Optional,
|
| 16 |
+
Pattern,
|
| 17 |
+
Set,
|
| 18 |
+
Tuple,
|
| 19 |
+
Type,
|
| 20 |
+
TypeVar,
|
| 21 |
+
Union,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
| 25 |
+
from yarl import URL
|
| 26 |
+
|
| 27 |
+
from . import hdrs
|
| 28 |
+
from .base_protocol import BaseProtocol
|
| 29 |
+
from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
|
| 30 |
+
from .helpers import (
|
| 31 |
+
_EXC_SENTINEL,
|
| 32 |
+
DEBUG,
|
| 33 |
+
EMPTY_BODY_METHODS,
|
| 34 |
+
EMPTY_BODY_STATUS_CODES,
|
| 35 |
+
NO_EXTENSIONS,
|
| 36 |
+
BaseTimerContext,
|
| 37 |
+
set_exception,
|
| 38 |
+
)
|
| 39 |
+
from .http_exceptions import (
|
| 40 |
+
BadHttpMessage,
|
| 41 |
+
BadHttpMethod,
|
| 42 |
+
BadStatusLine,
|
| 43 |
+
ContentEncodingError,
|
| 44 |
+
ContentLengthError,
|
| 45 |
+
InvalidHeader,
|
| 46 |
+
InvalidURLError,
|
| 47 |
+
LineTooLong,
|
| 48 |
+
TransferEncodingError,
|
| 49 |
+
)
|
| 50 |
+
from .http_writer import HttpVersion, HttpVersion10
|
| 51 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 52 |
+
from .typedefs import RawHeaders
|
| 53 |
+
|
| 54 |
+
__all__ = (
|
| 55 |
+
"HeadersParser",
|
| 56 |
+
"HttpParser",
|
| 57 |
+
"HttpRequestParser",
|
| 58 |
+
"HttpResponseParser",
|
| 59 |
+
"RawRequestMessage",
|
| 60 |
+
"RawResponseMessage",
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
_SEP = Literal[b"\r\n", b"\n"]
|
| 64 |
+
|
| 65 |
+
ASCIISET: Final[Set[str]] = set(string.printable)
|
| 66 |
+
|
| 67 |
+
# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
|
| 68 |
+
# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
|
| 69 |
+
#
|
| 70 |
+
# method = token
|
| 71 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
| 72 |
+
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
| 73 |
+
# token = 1*tchar
|
| 74 |
+
_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
|
| 75 |
+
TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
|
| 76 |
+
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
|
| 77 |
+
DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
|
| 78 |
+
HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class RawRequestMessage(NamedTuple):
|
| 82 |
+
method: str
|
| 83 |
+
path: str
|
| 84 |
+
version: HttpVersion
|
| 85 |
+
headers: "CIMultiDictProxy[str]"
|
| 86 |
+
raw_headers: RawHeaders
|
| 87 |
+
should_close: bool
|
| 88 |
+
compression: Optional[str]
|
| 89 |
+
upgrade: bool
|
| 90 |
+
chunked: bool
|
| 91 |
+
url: URL
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class RawResponseMessage(NamedTuple):
|
| 95 |
+
version: HttpVersion
|
| 96 |
+
code: int
|
| 97 |
+
reason: str
|
| 98 |
+
headers: CIMultiDictProxy[str]
|
| 99 |
+
raw_headers: RawHeaders
|
| 100 |
+
should_close: bool
|
| 101 |
+
compression: Optional[str]
|
| 102 |
+
upgrade: bool
|
| 103 |
+
chunked: bool
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class ParseState(IntEnum):
|
| 110 |
+
|
| 111 |
+
PARSE_NONE = 0
|
| 112 |
+
PARSE_LENGTH = 1
|
| 113 |
+
PARSE_CHUNKED = 2
|
| 114 |
+
PARSE_UNTIL_EOF = 3
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class ChunkState(IntEnum):
|
| 118 |
+
PARSE_CHUNKED_SIZE = 0
|
| 119 |
+
PARSE_CHUNKED_CHUNK = 1
|
| 120 |
+
PARSE_CHUNKED_CHUNK_EOF = 2
|
| 121 |
+
PARSE_MAYBE_TRAILERS = 3
|
| 122 |
+
PARSE_TRAILERS = 4
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class HeadersParser:
|
| 126 |
+
def __init__(
|
| 127 |
+
self,
|
| 128 |
+
max_line_size: int = 8190,
|
| 129 |
+
max_headers: int = 32768,
|
| 130 |
+
max_field_size: int = 8190,
|
| 131 |
+
lax: bool = False,
|
| 132 |
+
) -> None:
|
| 133 |
+
self.max_line_size = max_line_size
|
| 134 |
+
self.max_headers = max_headers
|
| 135 |
+
self.max_field_size = max_field_size
|
| 136 |
+
self._lax = lax
|
| 137 |
+
|
| 138 |
+
def parse_headers(
|
| 139 |
+
self, lines: List[bytes]
|
| 140 |
+
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
| 141 |
+
headers: CIMultiDict[str] = CIMultiDict()
|
| 142 |
+
# note: "raw" does not mean inclusion of OWS before/after the field value
|
| 143 |
+
raw_headers = []
|
| 144 |
+
|
| 145 |
+
lines_idx = 1
|
| 146 |
+
line = lines[1]
|
| 147 |
+
line_count = len(lines)
|
| 148 |
+
|
| 149 |
+
while line:
|
| 150 |
+
# Parse initial header name : value pair.
|
| 151 |
+
try:
|
| 152 |
+
bname, bvalue = line.split(b":", 1)
|
| 153 |
+
except ValueError:
|
| 154 |
+
raise InvalidHeader(line) from None
|
| 155 |
+
|
| 156 |
+
if len(bname) == 0:
|
| 157 |
+
raise InvalidHeader(bname)
|
| 158 |
+
|
| 159 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
|
| 160 |
+
if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
|
| 161 |
+
raise InvalidHeader(line)
|
| 162 |
+
|
| 163 |
+
bvalue = bvalue.lstrip(b" \t")
|
| 164 |
+
if len(bname) > self.max_field_size:
|
| 165 |
+
raise LineTooLong(
|
| 166 |
+
"request header name {}".format(
|
| 167 |
+
bname.decode("utf8", "backslashreplace")
|
| 168 |
+
),
|
| 169 |
+
str(self.max_field_size),
|
| 170 |
+
str(len(bname)),
|
| 171 |
+
)
|
| 172 |
+
name = bname.decode("utf-8", "surrogateescape")
|
| 173 |
+
if not TOKENRE.fullmatch(name):
|
| 174 |
+
raise InvalidHeader(bname)
|
| 175 |
+
|
| 176 |
+
header_length = len(bvalue)
|
| 177 |
+
|
| 178 |
+
# next line
|
| 179 |
+
lines_idx += 1
|
| 180 |
+
line = lines[lines_idx]
|
| 181 |
+
|
| 182 |
+
# consume continuation lines
|
| 183 |
+
continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
|
| 184 |
+
|
| 185 |
+
# Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
|
| 186 |
+
if continuation:
|
| 187 |
+
bvalue_lst = [bvalue]
|
| 188 |
+
while continuation:
|
| 189 |
+
header_length += len(line)
|
| 190 |
+
if header_length > self.max_field_size:
|
| 191 |
+
raise LineTooLong(
|
| 192 |
+
"request header field {}".format(
|
| 193 |
+
bname.decode("utf8", "backslashreplace")
|
| 194 |
+
),
|
| 195 |
+
str(self.max_field_size),
|
| 196 |
+
str(header_length),
|
| 197 |
+
)
|
| 198 |
+
bvalue_lst.append(line)
|
| 199 |
+
|
| 200 |
+
# next line
|
| 201 |
+
lines_idx += 1
|
| 202 |
+
if lines_idx < line_count:
|
| 203 |
+
line = lines[lines_idx]
|
| 204 |
+
if line:
|
| 205 |
+
continuation = line[0] in (32, 9) # (' ', '\t')
|
| 206 |
+
else:
|
| 207 |
+
line = b""
|
| 208 |
+
break
|
| 209 |
+
bvalue = b"".join(bvalue_lst)
|
| 210 |
+
else:
|
| 211 |
+
if header_length > self.max_field_size:
|
| 212 |
+
raise LineTooLong(
|
| 213 |
+
"request header field {}".format(
|
| 214 |
+
bname.decode("utf8", "backslashreplace")
|
| 215 |
+
),
|
| 216 |
+
str(self.max_field_size),
|
| 217 |
+
str(header_length),
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
bvalue = bvalue.strip(b" \t")
|
| 221 |
+
value = bvalue.decode("utf-8", "surrogateescape")
|
| 222 |
+
|
| 223 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
|
| 224 |
+
if "\n" in value or "\r" in value or "\x00" in value:
|
| 225 |
+
raise InvalidHeader(bvalue)
|
| 226 |
+
|
| 227 |
+
headers.add(name, value)
|
| 228 |
+
raw_headers.append((bname, bvalue))
|
| 229 |
+
|
| 230 |
+
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
|
| 234 |
+
"""Check if the upgrade header is supported."""
|
| 235 |
+
return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
class HttpParser(abc.ABC, Generic[_MsgT]):
|
| 239 |
+
lax: ClassVar[bool] = False
|
| 240 |
+
|
| 241 |
+
def __init__(
|
| 242 |
+
self,
|
| 243 |
+
protocol: Optional[BaseProtocol] = None,
|
| 244 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 245 |
+
limit: int = 2**16,
|
| 246 |
+
max_line_size: int = 8190,
|
| 247 |
+
max_headers: int = 32768,
|
| 248 |
+
max_field_size: int = 8190,
|
| 249 |
+
timer: Optional[BaseTimerContext] = None,
|
| 250 |
+
code: Optional[int] = None,
|
| 251 |
+
method: Optional[str] = None,
|
| 252 |
+
payload_exception: Optional[Type[BaseException]] = None,
|
| 253 |
+
response_with_body: bool = True,
|
| 254 |
+
read_until_eof: bool = False,
|
| 255 |
+
auto_decompress: bool = True,
|
| 256 |
+
) -> None:
|
| 257 |
+
self.protocol = protocol
|
| 258 |
+
self.loop = loop
|
| 259 |
+
self.max_line_size = max_line_size
|
| 260 |
+
self.max_headers = max_headers
|
| 261 |
+
self.max_field_size = max_field_size
|
| 262 |
+
self.timer = timer
|
| 263 |
+
self.code = code
|
| 264 |
+
self.method = method
|
| 265 |
+
self.payload_exception = payload_exception
|
| 266 |
+
self.response_with_body = response_with_body
|
| 267 |
+
self.read_until_eof = read_until_eof
|
| 268 |
+
|
| 269 |
+
self._lines: List[bytes] = []
|
| 270 |
+
self._tail = b""
|
| 271 |
+
self._upgraded = False
|
| 272 |
+
self._payload = None
|
| 273 |
+
self._payload_parser: Optional[HttpPayloadParser] = None
|
| 274 |
+
self._auto_decompress = auto_decompress
|
| 275 |
+
self._limit = limit
|
| 276 |
+
self._headers_parser = HeadersParser(
|
| 277 |
+
max_line_size, max_headers, max_field_size, self.lax
|
| 278 |
+
)
|
| 279 |
+
|
| 280 |
+
@abc.abstractmethod
|
| 281 |
+
def parse_message(self, lines: List[bytes]) -> _MsgT: ...
|
| 282 |
+
|
| 283 |
+
@abc.abstractmethod
|
| 284 |
+
def _is_chunked_te(self, te: str) -> bool: ...
|
| 285 |
+
|
| 286 |
+
def feed_eof(self) -> Optional[_MsgT]:
|
| 287 |
+
if self._payload_parser is not None:
|
| 288 |
+
self._payload_parser.feed_eof()
|
| 289 |
+
self._payload_parser = None
|
| 290 |
+
else:
|
| 291 |
+
# try to extract partial message
|
| 292 |
+
if self._tail:
|
| 293 |
+
self._lines.append(self._tail)
|
| 294 |
+
|
| 295 |
+
if self._lines:
|
| 296 |
+
if self._lines[-1] != "\r\n":
|
| 297 |
+
self._lines.append(b"")
|
| 298 |
+
with suppress(Exception):
|
| 299 |
+
return self.parse_message(self._lines)
|
| 300 |
+
return None
|
| 301 |
+
|
| 302 |
+
def feed_data(
|
| 303 |
+
self,
|
| 304 |
+
data: bytes,
|
| 305 |
+
SEP: _SEP = b"\r\n",
|
| 306 |
+
EMPTY: bytes = b"",
|
| 307 |
+
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
| 308 |
+
METH_CONNECT: str = hdrs.METH_CONNECT,
|
| 309 |
+
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
| 310 |
+
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
|
| 311 |
+
|
| 312 |
+
messages = []
|
| 313 |
+
|
| 314 |
+
if self._tail:
|
| 315 |
+
data, self._tail = self._tail + data, b""
|
| 316 |
+
|
| 317 |
+
data_len = len(data)
|
| 318 |
+
start_pos = 0
|
| 319 |
+
loop = self.loop
|
| 320 |
+
|
| 321 |
+
should_close = False
|
| 322 |
+
while start_pos < data_len:
|
| 323 |
+
|
| 324 |
+
# read HTTP message (request/response line + headers), \r\n\r\n
|
| 325 |
+
# and split by lines
|
| 326 |
+
if self._payload_parser is None and not self._upgraded:
|
| 327 |
+
pos = data.find(SEP, start_pos)
|
| 328 |
+
# consume \r\n
|
| 329 |
+
if pos == start_pos and not self._lines:
|
| 330 |
+
start_pos = pos + len(SEP)
|
| 331 |
+
continue
|
| 332 |
+
|
| 333 |
+
if pos >= start_pos:
|
| 334 |
+
if should_close:
|
| 335 |
+
raise BadHttpMessage("Data after `Connection: close`")
|
| 336 |
+
|
| 337 |
+
# line found
|
| 338 |
+
line = data[start_pos:pos]
|
| 339 |
+
if SEP == b"\n": # For lax response parsing
|
| 340 |
+
line = line.rstrip(b"\r")
|
| 341 |
+
self._lines.append(line)
|
| 342 |
+
start_pos = pos + len(SEP)
|
| 343 |
+
|
| 344 |
+
# \r\n\r\n found
|
| 345 |
+
if self._lines[-1] == EMPTY:
|
| 346 |
+
try:
|
| 347 |
+
msg: _MsgT = self.parse_message(self._lines)
|
| 348 |
+
finally:
|
| 349 |
+
self._lines.clear()
|
| 350 |
+
|
| 351 |
+
def get_content_length() -> Optional[int]:
|
| 352 |
+
# payload length
|
| 353 |
+
length_hdr = msg.headers.get(CONTENT_LENGTH)
|
| 354 |
+
if length_hdr is None:
|
| 355 |
+
return None
|
| 356 |
+
|
| 357 |
+
# Shouldn't allow +/- or other number formats.
|
| 358 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
|
| 359 |
+
# msg.headers is already stripped of leading/trailing wsp
|
| 360 |
+
if not DIGITS.fullmatch(length_hdr):
|
| 361 |
+
raise InvalidHeader(CONTENT_LENGTH)
|
| 362 |
+
|
| 363 |
+
return int(length_hdr)
|
| 364 |
+
|
| 365 |
+
length = get_content_length()
|
| 366 |
+
# do not support old websocket spec
|
| 367 |
+
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
| 368 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
| 369 |
+
|
| 370 |
+
self._upgraded = msg.upgrade and _is_supported_upgrade(
|
| 371 |
+
msg.headers
|
| 372 |
+
)
|
| 373 |
+
|
| 374 |
+
method = getattr(msg, "method", self.method)
|
| 375 |
+
# code is only present on responses
|
| 376 |
+
code = getattr(msg, "code", 0)
|
| 377 |
+
|
| 378 |
+
assert self.protocol is not None
|
| 379 |
+
# calculate payload
|
| 380 |
+
empty_body = code in EMPTY_BODY_STATUS_CODES or bool(
|
| 381 |
+
method and method in EMPTY_BODY_METHODS
|
| 382 |
+
)
|
| 383 |
+
if not empty_body and (
|
| 384 |
+
((length is not None and length > 0) or msg.chunked)
|
| 385 |
+
and not self._upgraded
|
| 386 |
+
):
|
| 387 |
+
payload = StreamReader(
|
| 388 |
+
self.protocol,
|
| 389 |
+
timer=self.timer,
|
| 390 |
+
loop=loop,
|
| 391 |
+
limit=self._limit,
|
| 392 |
+
)
|
| 393 |
+
payload_parser = HttpPayloadParser(
|
| 394 |
+
payload,
|
| 395 |
+
length=length,
|
| 396 |
+
chunked=msg.chunked,
|
| 397 |
+
method=method,
|
| 398 |
+
compression=msg.compression,
|
| 399 |
+
code=self.code,
|
| 400 |
+
response_with_body=self.response_with_body,
|
| 401 |
+
auto_decompress=self._auto_decompress,
|
| 402 |
+
lax=self.lax,
|
| 403 |
+
)
|
| 404 |
+
if not payload_parser.done:
|
| 405 |
+
self._payload_parser = payload_parser
|
| 406 |
+
elif method == METH_CONNECT:
|
| 407 |
+
assert isinstance(msg, RawRequestMessage)
|
| 408 |
+
payload = StreamReader(
|
| 409 |
+
self.protocol,
|
| 410 |
+
timer=self.timer,
|
| 411 |
+
loop=loop,
|
| 412 |
+
limit=self._limit,
|
| 413 |
+
)
|
| 414 |
+
self._upgraded = True
|
| 415 |
+
self._payload_parser = HttpPayloadParser(
|
| 416 |
+
payload,
|
| 417 |
+
method=msg.method,
|
| 418 |
+
compression=msg.compression,
|
| 419 |
+
auto_decompress=self._auto_decompress,
|
| 420 |
+
lax=self.lax,
|
| 421 |
+
)
|
| 422 |
+
elif not empty_body and length is None and self.read_until_eof:
|
| 423 |
+
payload = StreamReader(
|
| 424 |
+
self.protocol,
|
| 425 |
+
timer=self.timer,
|
| 426 |
+
loop=loop,
|
| 427 |
+
limit=self._limit,
|
| 428 |
+
)
|
| 429 |
+
payload_parser = HttpPayloadParser(
|
| 430 |
+
payload,
|
| 431 |
+
length=length,
|
| 432 |
+
chunked=msg.chunked,
|
| 433 |
+
method=method,
|
| 434 |
+
compression=msg.compression,
|
| 435 |
+
code=self.code,
|
| 436 |
+
response_with_body=self.response_with_body,
|
| 437 |
+
auto_decompress=self._auto_decompress,
|
| 438 |
+
lax=self.lax,
|
| 439 |
+
)
|
| 440 |
+
if not payload_parser.done:
|
| 441 |
+
self._payload_parser = payload_parser
|
| 442 |
+
else:
|
| 443 |
+
payload = EMPTY_PAYLOAD
|
| 444 |
+
|
| 445 |
+
messages.append((msg, payload))
|
| 446 |
+
should_close = msg.should_close
|
| 447 |
+
else:
|
| 448 |
+
self._tail = data[start_pos:]
|
| 449 |
+
data = EMPTY
|
| 450 |
+
break
|
| 451 |
+
|
| 452 |
+
# no parser, just store
|
| 453 |
+
elif self._payload_parser is None and self._upgraded:
|
| 454 |
+
assert not self._lines
|
| 455 |
+
break
|
| 456 |
+
|
| 457 |
+
# feed payload
|
| 458 |
+
elif data and start_pos < data_len:
|
| 459 |
+
assert not self._lines
|
| 460 |
+
assert self._payload_parser is not None
|
| 461 |
+
try:
|
| 462 |
+
eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
|
| 463 |
+
except BaseException as underlying_exc:
|
| 464 |
+
reraised_exc = underlying_exc
|
| 465 |
+
if self.payload_exception is not None:
|
| 466 |
+
reraised_exc = self.payload_exception(str(underlying_exc))
|
| 467 |
+
|
| 468 |
+
set_exception(
|
| 469 |
+
self._payload_parser.payload,
|
| 470 |
+
reraised_exc,
|
| 471 |
+
underlying_exc,
|
| 472 |
+
)
|
| 473 |
+
|
| 474 |
+
eof = True
|
| 475 |
+
data = b""
|
| 476 |
+
|
| 477 |
+
if eof:
|
| 478 |
+
start_pos = 0
|
| 479 |
+
data_len = len(data)
|
| 480 |
+
self._payload_parser = None
|
| 481 |
+
continue
|
| 482 |
+
else:
|
| 483 |
+
break
|
| 484 |
+
|
| 485 |
+
if data and start_pos < data_len:
|
| 486 |
+
data = data[start_pos:]
|
| 487 |
+
else:
|
| 488 |
+
data = EMPTY
|
| 489 |
+
|
| 490 |
+
return messages, self._upgraded, data
|
| 491 |
+
|
| 492 |
+
def parse_headers(
|
| 493 |
+
self, lines: List[bytes]
|
| 494 |
+
) -> Tuple[
|
| 495 |
+
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
| 496 |
+
]:
|
| 497 |
+
"""Parses RFC 5322 headers from a stream.
|
| 498 |
+
|
| 499 |
+
Line continuations are supported. Returns list of header name
|
| 500 |
+
and value pairs. Header name is in upper case.
|
| 501 |
+
"""
|
| 502 |
+
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
| 503 |
+
close_conn = None
|
| 504 |
+
encoding = None
|
| 505 |
+
upgrade = False
|
| 506 |
+
chunked = False
|
| 507 |
+
|
| 508 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
|
| 509 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
|
| 510 |
+
singletons = (
|
| 511 |
+
hdrs.CONTENT_LENGTH,
|
| 512 |
+
hdrs.CONTENT_LOCATION,
|
| 513 |
+
hdrs.CONTENT_RANGE,
|
| 514 |
+
hdrs.CONTENT_TYPE,
|
| 515 |
+
hdrs.ETAG,
|
| 516 |
+
hdrs.HOST,
|
| 517 |
+
hdrs.MAX_FORWARDS,
|
| 518 |
+
hdrs.SERVER,
|
| 519 |
+
hdrs.TRANSFER_ENCODING,
|
| 520 |
+
hdrs.USER_AGENT,
|
| 521 |
+
)
|
| 522 |
+
bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
|
| 523 |
+
if bad_hdr is not None:
|
| 524 |
+
raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
|
| 525 |
+
|
| 526 |
+
# keep-alive
|
| 527 |
+
conn = headers.get(hdrs.CONNECTION)
|
| 528 |
+
if conn:
|
| 529 |
+
v = conn.lower()
|
| 530 |
+
if v == "close":
|
| 531 |
+
close_conn = True
|
| 532 |
+
elif v == "keep-alive":
|
| 533 |
+
close_conn = False
|
| 534 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
|
| 535 |
+
elif v == "upgrade" and headers.get(hdrs.UPGRADE):
|
| 536 |
+
upgrade = True
|
| 537 |
+
|
| 538 |
+
# encoding
|
| 539 |
+
enc = headers.get(hdrs.CONTENT_ENCODING)
|
| 540 |
+
if enc:
|
| 541 |
+
enc = enc.lower()
|
| 542 |
+
if enc in ("gzip", "deflate", "br"):
|
| 543 |
+
encoding = enc
|
| 544 |
+
|
| 545 |
+
# chunking
|
| 546 |
+
te = headers.get(hdrs.TRANSFER_ENCODING)
|
| 547 |
+
if te is not None:
|
| 548 |
+
if self._is_chunked_te(te):
|
| 549 |
+
chunked = True
|
| 550 |
+
|
| 551 |
+
if hdrs.CONTENT_LENGTH in headers:
|
| 552 |
+
raise BadHttpMessage(
|
| 553 |
+
"Transfer-Encoding can't be present with Content-Length",
|
| 554 |
+
)
|
| 555 |
+
|
| 556 |
+
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
| 557 |
+
|
| 558 |
+
def set_upgraded(self, val: bool) -> None:
|
| 559 |
+
"""Set connection upgraded (to websocket) mode.
|
| 560 |
+
|
| 561 |
+
:param bool val: new state.
|
| 562 |
+
"""
|
| 563 |
+
self._upgraded = val
|
| 564 |
+
|
| 565 |
+
|
| 566 |
+
class HttpRequestParser(HttpParser[RawRequestMessage]):
|
| 567 |
+
"""Read request status line.
|
| 568 |
+
|
| 569 |
+
Exception .http_exceptions.BadStatusLine
|
| 570 |
+
could be raised in case of any errors in status line.
|
| 571 |
+
Returns RawRequestMessage.
|
| 572 |
+
"""
|
| 573 |
+
|
| 574 |
+
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
|
| 575 |
+
# request line
|
| 576 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 577 |
+
try:
|
| 578 |
+
method, path, version = line.split(" ", maxsplit=2)
|
| 579 |
+
except ValueError:
|
| 580 |
+
raise BadHttpMethod(line) from None
|
| 581 |
+
|
| 582 |
+
if len(path) > self.max_line_size:
|
| 583 |
+
raise LineTooLong(
|
| 584 |
+
"Status line is too long", str(self.max_line_size), str(len(path))
|
| 585 |
+
)
|
| 586 |
+
|
| 587 |
+
# method
|
| 588 |
+
if not TOKENRE.fullmatch(method):
|
| 589 |
+
raise BadHttpMethod(method)
|
| 590 |
+
|
| 591 |
+
# version
|
| 592 |
+
match = VERSRE.fullmatch(version)
|
| 593 |
+
if match is None:
|
| 594 |
+
raise BadStatusLine(line)
|
| 595 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
| 596 |
+
|
| 597 |
+
if method == "CONNECT":
|
| 598 |
+
# authority-form,
|
| 599 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
| 600 |
+
url = URL.build(authority=path, encoded=True)
|
| 601 |
+
elif path.startswith("/"):
|
| 602 |
+
# origin-form,
|
| 603 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
| 604 |
+
path_part, _hash_separator, url_fragment = path.partition("#")
|
| 605 |
+
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
| 606 |
+
|
| 607 |
+
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
| 608 |
+
# NOTE: parser does, otherwise it results into the same
|
| 609 |
+
# NOTE: HTTP Request-Line input producing different
|
| 610 |
+
# NOTE: `yarl.URL()` objects
|
| 611 |
+
url = URL.build(
|
| 612 |
+
path=path_part,
|
| 613 |
+
query_string=qs_part,
|
| 614 |
+
fragment=url_fragment,
|
| 615 |
+
encoded=True,
|
| 616 |
+
)
|
| 617 |
+
elif path == "*" and method == "OPTIONS":
|
| 618 |
+
# asterisk-form,
|
| 619 |
+
url = URL(path, encoded=True)
|
| 620 |
+
else:
|
| 621 |
+
# absolute-form for proxy maybe,
|
| 622 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
| 623 |
+
url = URL(path, encoded=True)
|
| 624 |
+
if url.scheme == "":
|
| 625 |
+
# not absolute-form
|
| 626 |
+
raise InvalidURLError(
|
| 627 |
+
path.encode(errors="surrogateescape").decode("latin1")
|
| 628 |
+
)
|
| 629 |
+
|
| 630 |
+
# read headers
|
| 631 |
+
(
|
| 632 |
+
headers,
|
| 633 |
+
raw_headers,
|
| 634 |
+
close,
|
| 635 |
+
compression,
|
| 636 |
+
upgrade,
|
| 637 |
+
chunked,
|
| 638 |
+
) = self.parse_headers(lines)
|
| 639 |
+
|
| 640 |
+
if close is None: # then the headers weren't set in the request
|
| 641 |
+
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
| 642 |
+
close = True
|
| 643 |
+
else: # HTTP 1.1 must ask to close.
|
| 644 |
+
close = False
|
| 645 |
+
|
| 646 |
+
return RawRequestMessage(
|
| 647 |
+
method,
|
| 648 |
+
path,
|
| 649 |
+
version_o,
|
| 650 |
+
headers,
|
| 651 |
+
raw_headers,
|
| 652 |
+
close,
|
| 653 |
+
compression,
|
| 654 |
+
upgrade,
|
| 655 |
+
chunked,
|
| 656 |
+
url,
|
| 657 |
+
)
|
| 658 |
+
|
| 659 |
+
def _is_chunked_te(self, te: str) -> bool:
|
| 660 |
+
if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked":
|
| 661 |
+
return True
|
| 662 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
|
| 663 |
+
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
|
| 664 |
+
|
| 665 |
+
|
| 666 |
+
class HttpResponseParser(HttpParser[RawResponseMessage]):
|
| 667 |
+
"""Read response status line and headers.
|
| 668 |
+
|
| 669 |
+
BadStatusLine could be raised in case of any errors in status line.
|
| 670 |
+
Returns RawResponseMessage.
|
| 671 |
+
"""
|
| 672 |
+
|
| 673 |
+
# Lax mode should only be enabled on response parser.
|
| 674 |
+
lax = not DEBUG
|
| 675 |
+
|
| 676 |
+
def feed_data(
|
| 677 |
+
self,
|
| 678 |
+
data: bytes,
|
| 679 |
+
SEP: Optional[_SEP] = None,
|
| 680 |
+
*args: Any,
|
| 681 |
+
**kwargs: Any,
|
| 682 |
+
) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
|
| 683 |
+
if SEP is None:
|
| 684 |
+
SEP = b"\r\n" if DEBUG else b"\n"
|
| 685 |
+
return super().feed_data(data, SEP, *args, **kwargs)
|
| 686 |
+
|
| 687 |
+
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
|
| 688 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 689 |
+
try:
|
| 690 |
+
version, status = line.split(maxsplit=1)
|
| 691 |
+
except ValueError:
|
| 692 |
+
raise BadStatusLine(line) from None
|
| 693 |
+
|
| 694 |
+
try:
|
| 695 |
+
status, reason = status.split(maxsplit=1)
|
| 696 |
+
except ValueError:
|
| 697 |
+
status = status.strip()
|
| 698 |
+
reason = ""
|
| 699 |
+
|
| 700 |
+
if len(reason) > self.max_line_size:
|
| 701 |
+
raise LineTooLong(
|
| 702 |
+
"Status line is too long", str(self.max_line_size), str(len(reason))
|
| 703 |
+
)
|
| 704 |
+
|
| 705 |
+
# version
|
| 706 |
+
match = VERSRE.fullmatch(version)
|
| 707 |
+
if match is None:
|
| 708 |
+
raise BadStatusLine(line)
|
| 709 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
| 710 |
+
|
| 711 |
+
# The status code is a three-digit ASCII number, no padding
|
| 712 |
+
if len(status) != 3 or not DIGITS.fullmatch(status):
|
| 713 |
+
raise BadStatusLine(line)
|
| 714 |
+
status_i = int(status)
|
| 715 |
+
|
| 716 |
+
# read headers
|
| 717 |
+
(
|
| 718 |
+
headers,
|
| 719 |
+
raw_headers,
|
| 720 |
+
close,
|
| 721 |
+
compression,
|
| 722 |
+
upgrade,
|
| 723 |
+
chunked,
|
| 724 |
+
) = self.parse_headers(lines)
|
| 725 |
+
|
| 726 |
+
if close is None:
|
| 727 |
+
if version_o <= HttpVersion10:
|
| 728 |
+
close = True
|
| 729 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
|
| 730 |
+
elif 100 <= status_i < 200 or status_i in {204, 304}:
|
| 731 |
+
close = False
|
| 732 |
+
elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
|
| 733 |
+
close = False
|
| 734 |
+
else:
|
| 735 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
|
| 736 |
+
close = True
|
| 737 |
+
|
| 738 |
+
return RawResponseMessage(
|
| 739 |
+
version_o,
|
| 740 |
+
status_i,
|
| 741 |
+
reason.strip(),
|
| 742 |
+
headers,
|
| 743 |
+
raw_headers,
|
| 744 |
+
close,
|
| 745 |
+
compression,
|
| 746 |
+
upgrade,
|
| 747 |
+
chunked,
|
| 748 |
+
)
|
| 749 |
+
|
| 750 |
+
def _is_chunked_te(self, te: str) -> bool:
|
| 751 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
|
| 752 |
+
return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked"
|
| 753 |
+
|
| 754 |
+
|
| 755 |
+
class HttpPayloadParser:
|
| 756 |
+
def __init__(
|
| 757 |
+
self,
|
| 758 |
+
payload: StreamReader,
|
| 759 |
+
length: Optional[int] = None,
|
| 760 |
+
chunked: bool = False,
|
| 761 |
+
compression: Optional[str] = None,
|
| 762 |
+
code: Optional[int] = None,
|
| 763 |
+
method: Optional[str] = None,
|
| 764 |
+
response_with_body: bool = True,
|
| 765 |
+
auto_decompress: bool = True,
|
| 766 |
+
lax: bool = False,
|
| 767 |
+
) -> None:
|
| 768 |
+
self._length = 0
|
| 769 |
+
self._type = ParseState.PARSE_UNTIL_EOF
|
| 770 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 771 |
+
self._chunk_size = 0
|
| 772 |
+
self._chunk_tail = b""
|
| 773 |
+
self._auto_decompress = auto_decompress
|
| 774 |
+
self._lax = lax
|
| 775 |
+
self.done = False
|
| 776 |
+
|
| 777 |
+
# payload decompression wrapper
|
| 778 |
+
if response_with_body and compression and self._auto_decompress:
|
| 779 |
+
real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
|
| 780 |
+
payload, compression
|
| 781 |
+
)
|
| 782 |
+
else:
|
| 783 |
+
real_payload = payload
|
| 784 |
+
|
| 785 |
+
# payload parser
|
| 786 |
+
if not response_with_body:
|
| 787 |
+
# don't parse payload if it's not expected to be received
|
| 788 |
+
self._type = ParseState.PARSE_NONE
|
| 789 |
+
real_payload.feed_eof()
|
| 790 |
+
self.done = True
|
| 791 |
+
elif chunked:
|
| 792 |
+
self._type = ParseState.PARSE_CHUNKED
|
| 793 |
+
elif length is not None:
|
| 794 |
+
self._type = ParseState.PARSE_LENGTH
|
| 795 |
+
self._length = length
|
| 796 |
+
if self._length == 0:
|
| 797 |
+
real_payload.feed_eof()
|
| 798 |
+
self.done = True
|
| 799 |
+
|
| 800 |
+
self.payload = real_payload
|
| 801 |
+
|
| 802 |
+
def feed_eof(self) -> None:
|
| 803 |
+
if self._type == ParseState.PARSE_UNTIL_EOF:
|
| 804 |
+
self.payload.feed_eof()
|
| 805 |
+
elif self._type == ParseState.PARSE_LENGTH:
|
| 806 |
+
raise ContentLengthError(
|
| 807 |
+
"Not enough data for satisfy content length header."
|
| 808 |
+
)
|
| 809 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 810 |
+
raise TransferEncodingError(
|
| 811 |
+
"Not enough data for satisfy transfer length header."
|
| 812 |
+
)
|
| 813 |
+
|
| 814 |
+
def feed_data(
|
| 815 |
+
self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
|
| 816 |
+
) -> Tuple[bool, bytes]:
|
| 817 |
+
# Read specified amount of bytes
|
| 818 |
+
if self._type == ParseState.PARSE_LENGTH:
|
| 819 |
+
required = self._length
|
| 820 |
+
chunk_len = len(chunk)
|
| 821 |
+
|
| 822 |
+
if required >= chunk_len:
|
| 823 |
+
self._length = required - chunk_len
|
| 824 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 825 |
+
if self._length == 0:
|
| 826 |
+
self.payload.feed_eof()
|
| 827 |
+
return True, b""
|
| 828 |
+
else:
|
| 829 |
+
self._length = 0
|
| 830 |
+
self.payload.feed_data(chunk[:required], required)
|
| 831 |
+
self.payload.feed_eof()
|
| 832 |
+
return True, chunk[required:]
|
| 833 |
+
|
| 834 |
+
# Chunked transfer encoding parser
|
| 835 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 836 |
+
if self._chunk_tail:
|
| 837 |
+
chunk = self._chunk_tail + chunk
|
| 838 |
+
self._chunk_tail = b""
|
| 839 |
+
|
| 840 |
+
while chunk:
|
| 841 |
+
|
| 842 |
+
# read next chunk size
|
| 843 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
| 844 |
+
pos = chunk.find(SEP)
|
| 845 |
+
if pos >= 0:
|
| 846 |
+
i = chunk.find(CHUNK_EXT, 0, pos)
|
| 847 |
+
if i >= 0:
|
| 848 |
+
size_b = chunk[:i] # strip chunk-extensions
|
| 849 |
+
# Verify no LF in the chunk-extension
|
| 850 |
+
if b"\n" in (ext := chunk[i:pos]):
|
| 851 |
+
exc = BadHttpMessage(
|
| 852 |
+
f"Unexpected LF in chunk-extension: {ext!r}"
|
| 853 |
+
)
|
| 854 |
+
set_exception(self.payload, exc)
|
| 855 |
+
raise exc
|
| 856 |
+
else:
|
| 857 |
+
size_b = chunk[:pos]
|
| 858 |
+
|
| 859 |
+
if self._lax: # Allow whitespace in lax mode.
|
| 860 |
+
size_b = size_b.strip()
|
| 861 |
+
|
| 862 |
+
if not re.fullmatch(HEXDIGITS, size_b):
|
| 863 |
+
exc = TransferEncodingError(
|
| 864 |
+
chunk[:pos].decode("ascii", "surrogateescape")
|
| 865 |
+
)
|
| 866 |
+
set_exception(self.payload, exc)
|
| 867 |
+
raise exc
|
| 868 |
+
size = int(bytes(size_b), 16)
|
| 869 |
+
|
| 870 |
+
chunk = chunk[pos + len(SEP) :]
|
| 871 |
+
if size == 0: # eof marker
|
| 872 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 873 |
+
if self._lax and chunk.startswith(b"\r"):
|
| 874 |
+
chunk = chunk[1:]
|
| 875 |
+
else:
|
| 876 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
| 877 |
+
self._chunk_size = size
|
| 878 |
+
self.payload.begin_http_chunk_receiving()
|
| 879 |
+
else:
|
| 880 |
+
self._chunk_tail = chunk
|
| 881 |
+
return False, b""
|
| 882 |
+
|
| 883 |
+
# read chunk and feed buffer
|
| 884 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
| 885 |
+
required = self._chunk_size
|
| 886 |
+
chunk_len = len(chunk)
|
| 887 |
+
|
| 888 |
+
if required > chunk_len:
|
| 889 |
+
self._chunk_size = required - chunk_len
|
| 890 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 891 |
+
return False, b""
|
| 892 |
+
else:
|
| 893 |
+
self._chunk_size = 0
|
| 894 |
+
self.payload.feed_data(chunk[:required], required)
|
| 895 |
+
chunk = chunk[required:]
|
| 896 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
| 897 |
+
self.payload.end_http_chunk_receiving()
|
| 898 |
+
|
| 899 |
+
# toss the CRLF at the end of the chunk
|
| 900 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
| 901 |
+
if self._lax and chunk.startswith(b"\r"):
|
| 902 |
+
chunk = chunk[1:]
|
| 903 |
+
if chunk[: len(SEP)] == SEP:
|
| 904 |
+
chunk = chunk[len(SEP) :]
|
| 905 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 906 |
+
else:
|
| 907 |
+
self._chunk_tail = chunk
|
| 908 |
+
return False, b""
|
| 909 |
+
|
| 910 |
+
# if stream does not contain trailer, after 0\r\n
|
| 911 |
+
# we should get another \r\n otherwise
|
| 912 |
+
# trailers needs to be skipped until \r\n\r\n
|
| 913 |
+
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
| 914 |
+
head = chunk[: len(SEP)]
|
| 915 |
+
if head == SEP:
|
| 916 |
+
# end of stream
|
| 917 |
+
self.payload.feed_eof()
|
| 918 |
+
return True, chunk[len(SEP) :]
|
| 919 |
+
# Both CR and LF, or only LF may not be received yet. It is
|
| 920 |
+
# expected that CRLF or LF will be shown at the very first
|
| 921 |
+
# byte next time, otherwise trailers should come. The last
|
| 922 |
+
# CRLF which marks the end of response might not be
|
| 923 |
+
# contained in the same TCP segment which delivered the
|
| 924 |
+
# size indicator.
|
| 925 |
+
if not head:
|
| 926 |
+
return False, b""
|
| 927 |
+
if head == SEP[:1]:
|
| 928 |
+
self._chunk_tail = head
|
| 929 |
+
return False, b""
|
| 930 |
+
self._chunk = ChunkState.PARSE_TRAILERS
|
| 931 |
+
|
| 932 |
+
# read and discard trailer up to the CRLF terminator
|
| 933 |
+
if self._chunk == ChunkState.PARSE_TRAILERS:
|
| 934 |
+
pos = chunk.find(SEP)
|
| 935 |
+
if pos >= 0:
|
| 936 |
+
chunk = chunk[pos + len(SEP) :]
|
| 937 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 938 |
+
else:
|
| 939 |
+
self._chunk_tail = chunk
|
| 940 |
+
return False, b""
|
| 941 |
+
|
| 942 |
+
# Read all bytes until eof
|
| 943 |
+
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
| 944 |
+
self.payload.feed_data(chunk, len(chunk))
|
| 945 |
+
|
| 946 |
+
return False, b""
|
| 947 |
+
|
| 948 |
+
|
| 949 |
+
class DeflateBuffer:
|
| 950 |
+
"""DeflateStream decompress stream and feed data into specified stream."""
|
| 951 |
+
|
| 952 |
+
decompressor: Any
|
| 953 |
+
|
| 954 |
+
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
| 955 |
+
self.out = out
|
| 956 |
+
self.size = 0
|
| 957 |
+
self.encoding = encoding
|
| 958 |
+
self._started_decoding = False
|
| 959 |
+
|
| 960 |
+
self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
|
| 961 |
+
if encoding == "br":
|
| 962 |
+
if not HAS_BROTLI: # pragma: no cover
|
| 963 |
+
raise ContentEncodingError(
|
| 964 |
+
"Can not decode content-encoding: brotli (br). "
|
| 965 |
+
"Please install `Brotli`"
|
| 966 |
+
)
|
| 967 |
+
self.decompressor = BrotliDecompressor()
|
| 968 |
+
else:
|
| 969 |
+
self.decompressor = ZLibDecompressor(encoding=encoding)
|
| 970 |
+
|
| 971 |
+
def set_exception(
|
| 972 |
+
self,
|
| 973 |
+
exc: BaseException,
|
| 974 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 975 |
+
) -> None:
|
| 976 |
+
set_exception(self.out, exc, exc_cause)
|
| 977 |
+
|
| 978 |
+
def feed_data(self, chunk: bytes, size: int) -> None:
|
| 979 |
+
if not size:
|
| 980 |
+
return
|
| 981 |
+
|
| 982 |
+
self.size += size
|
| 983 |
+
|
| 984 |
+
# RFC1950
|
| 985 |
+
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
| 986 |
+
# bits 4..7 = CINFO = 1..7 = windows size.
|
| 987 |
+
if (
|
| 988 |
+
not self._started_decoding
|
| 989 |
+
and self.encoding == "deflate"
|
| 990 |
+
and chunk[0] & 0xF != 8
|
| 991 |
+
):
|
| 992 |
+
# Change the decoder to decompress incorrectly compressed data
|
| 993 |
+
# Actually we should issue a warning about non-RFC-compliant data.
|
| 994 |
+
self.decompressor = ZLibDecompressor(
|
| 995 |
+
encoding=self.encoding, suppress_deflate_header=True
|
| 996 |
+
)
|
| 997 |
+
|
| 998 |
+
try:
|
| 999 |
+
chunk = self.decompressor.decompress_sync(chunk)
|
| 1000 |
+
except Exception:
|
| 1001 |
+
raise ContentEncodingError(
|
| 1002 |
+
"Can not decode content-encoding: %s" % self.encoding
|
| 1003 |
+
)
|
| 1004 |
+
|
| 1005 |
+
self._started_decoding = True
|
| 1006 |
+
|
| 1007 |
+
if chunk:
|
| 1008 |
+
self.out.feed_data(chunk, len(chunk))
|
| 1009 |
+
|
| 1010 |
+
def feed_eof(self) -> None:
|
| 1011 |
+
chunk = self.decompressor.flush()
|
| 1012 |
+
|
| 1013 |
+
if chunk or self.size > 0:
|
| 1014 |
+
self.out.feed_data(chunk, len(chunk))
|
| 1015 |
+
if self.encoding == "deflate" and not self.decompressor.eof:
|
| 1016 |
+
raise ContentEncodingError("deflate")
|
| 1017 |
+
|
| 1018 |
+
self.out.feed_eof()
|
| 1019 |
+
|
| 1020 |
+
def begin_http_chunk_receiving(self) -> None:
|
| 1021 |
+
self.out.begin_http_chunk_receiving()
|
| 1022 |
+
|
| 1023 |
+
def end_http_chunk_receiving(self) -> None:
|
| 1024 |
+
self.out.end_http_chunk_receiving()
|
| 1025 |
+
|
| 1026 |
+
|
| 1027 |
+
HttpRequestParserPy = HttpRequestParser
|
| 1028 |
+
HttpResponseParserPy = HttpResponseParser
|
| 1029 |
+
RawRequestMessagePy = RawRequestMessage
|
| 1030 |
+
RawResponseMessagePy = RawResponseMessage
|
| 1031 |
+
|
| 1032 |
+
try:
|
| 1033 |
+
if not NO_EXTENSIONS:
|
| 1034 |
+
from ._http_parser import ( # type: ignore[import-not-found,no-redef]
|
| 1035 |
+
HttpRequestParser,
|
| 1036 |
+
HttpResponseParser,
|
| 1037 |
+
RawRequestMessage,
|
| 1038 |
+
RawResponseMessage,
|
| 1039 |
+
)
|
| 1040 |
+
|
| 1041 |
+
HttpRequestParserC = HttpRequestParser
|
| 1042 |
+
HttpResponseParserC = HttpResponseParser
|
| 1043 |
+
RawRequestMessageC = RawRequestMessage
|
| 1044 |
+
RawResponseMessageC = RawResponseMessage
|
| 1045 |
+
except ImportError: # pragma: no cover
|
| 1046 |
+
pass
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/http_writer.py
ADDED
|
@@ -0,0 +1,234 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Http related parsers and protocol."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import zlib
|
| 5 |
+
from typing import ( # noqa
|
| 6 |
+
Any,
|
| 7 |
+
Awaitable,
|
| 8 |
+
Callable,
|
| 9 |
+
Iterable,
|
| 10 |
+
List,
|
| 11 |
+
NamedTuple,
|
| 12 |
+
Optional,
|
| 13 |
+
Union,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
from multidict import CIMultiDict
|
| 17 |
+
|
| 18 |
+
from .abc import AbstractStreamWriter
|
| 19 |
+
from .base_protocol import BaseProtocol
|
| 20 |
+
from .client_exceptions import ClientConnectionResetError
|
| 21 |
+
from .compression_utils import ZLibCompressor
|
| 22 |
+
from .helpers import NO_EXTENSIONS
|
| 23 |
+
|
| 24 |
+
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class HttpVersion(NamedTuple):
|
| 28 |
+
major: int
|
| 29 |
+
minor: int
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
HttpVersion10 = HttpVersion(1, 0)
|
| 33 |
+
HttpVersion11 = HttpVersion(1, 1)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
| 37 |
+
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class StreamWriter(AbstractStreamWriter):
|
| 41 |
+
|
| 42 |
+
length: Optional[int] = None
|
| 43 |
+
chunked: bool = False
|
| 44 |
+
_eof: bool = False
|
| 45 |
+
_compress: Optional[ZLibCompressor] = None
|
| 46 |
+
|
| 47 |
+
def __init__(
|
| 48 |
+
self,
|
| 49 |
+
protocol: BaseProtocol,
|
| 50 |
+
loop: asyncio.AbstractEventLoop,
|
| 51 |
+
on_chunk_sent: _T_OnChunkSent = None,
|
| 52 |
+
on_headers_sent: _T_OnHeadersSent = None,
|
| 53 |
+
) -> None:
|
| 54 |
+
self._protocol = protocol
|
| 55 |
+
self.loop = loop
|
| 56 |
+
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
|
| 57 |
+
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
|
| 58 |
+
|
| 59 |
+
@property
|
| 60 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
| 61 |
+
return self._protocol.transport
|
| 62 |
+
|
| 63 |
+
@property
|
| 64 |
+
def protocol(self) -> BaseProtocol:
|
| 65 |
+
return self._protocol
|
| 66 |
+
|
| 67 |
+
def enable_chunking(self) -> None:
|
| 68 |
+
self.chunked = True
|
| 69 |
+
|
| 70 |
+
def enable_compression(
|
| 71 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
| 72 |
+
) -> None:
|
| 73 |
+
self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
|
| 74 |
+
|
| 75 |
+
def _write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
|
| 76 |
+
size = len(chunk)
|
| 77 |
+
self.buffer_size += size
|
| 78 |
+
self.output_size += size
|
| 79 |
+
transport = self._protocol.transport
|
| 80 |
+
if transport is None or transport.is_closing():
|
| 81 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
| 82 |
+
transport.write(chunk)
|
| 83 |
+
|
| 84 |
+
def _writelines(self, chunks: Iterable[bytes]) -> None:
|
| 85 |
+
size = 0
|
| 86 |
+
for chunk in chunks:
|
| 87 |
+
size += len(chunk)
|
| 88 |
+
self.buffer_size += size
|
| 89 |
+
self.output_size += size
|
| 90 |
+
transport = self._protocol.transport
|
| 91 |
+
if transport is None or transport.is_closing():
|
| 92 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
| 93 |
+
transport.write(b"".join(chunks))
|
| 94 |
+
|
| 95 |
+
async def write(
|
| 96 |
+
self,
|
| 97 |
+
chunk: Union[bytes, bytearray, memoryview],
|
| 98 |
+
*,
|
| 99 |
+
drain: bool = True,
|
| 100 |
+
LIMIT: int = 0x10000,
|
| 101 |
+
) -> None:
|
| 102 |
+
"""Writes chunk of data to a stream.
|
| 103 |
+
|
| 104 |
+
write_eof() indicates end of stream.
|
| 105 |
+
writer can't be used after write_eof() method being called.
|
| 106 |
+
write() return drain future.
|
| 107 |
+
"""
|
| 108 |
+
if self._on_chunk_sent is not None:
|
| 109 |
+
await self._on_chunk_sent(chunk)
|
| 110 |
+
|
| 111 |
+
if isinstance(chunk, memoryview):
|
| 112 |
+
if chunk.nbytes != len(chunk):
|
| 113 |
+
# just reshape it
|
| 114 |
+
chunk = chunk.cast("c")
|
| 115 |
+
|
| 116 |
+
if self._compress is not None:
|
| 117 |
+
chunk = await self._compress.compress(chunk)
|
| 118 |
+
if not chunk:
|
| 119 |
+
return
|
| 120 |
+
|
| 121 |
+
if self.length is not None:
|
| 122 |
+
chunk_len = len(chunk)
|
| 123 |
+
if self.length >= chunk_len:
|
| 124 |
+
self.length = self.length - chunk_len
|
| 125 |
+
else:
|
| 126 |
+
chunk = chunk[: self.length]
|
| 127 |
+
self.length = 0
|
| 128 |
+
if not chunk:
|
| 129 |
+
return
|
| 130 |
+
|
| 131 |
+
if chunk:
|
| 132 |
+
if self.chunked:
|
| 133 |
+
self._writelines(
|
| 134 |
+
(f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n")
|
| 135 |
+
)
|
| 136 |
+
else:
|
| 137 |
+
self._write(chunk)
|
| 138 |
+
|
| 139 |
+
if self.buffer_size > LIMIT and drain:
|
| 140 |
+
self.buffer_size = 0
|
| 141 |
+
await self.drain()
|
| 142 |
+
|
| 143 |
+
async def write_headers(
|
| 144 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
| 145 |
+
) -> None:
|
| 146 |
+
"""Write request/response status and headers."""
|
| 147 |
+
if self._on_headers_sent is not None:
|
| 148 |
+
await self._on_headers_sent(headers)
|
| 149 |
+
|
| 150 |
+
# status + headers
|
| 151 |
+
buf = _serialize_headers(status_line, headers)
|
| 152 |
+
self._write(buf)
|
| 153 |
+
|
| 154 |
+
def set_eof(self) -> None:
|
| 155 |
+
"""Indicate that the message is complete."""
|
| 156 |
+
self._eof = True
|
| 157 |
+
|
| 158 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
| 159 |
+
if self._eof:
|
| 160 |
+
return
|
| 161 |
+
|
| 162 |
+
if chunk and self._on_chunk_sent is not None:
|
| 163 |
+
await self._on_chunk_sent(chunk)
|
| 164 |
+
|
| 165 |
+
if self._compress:
|
| 166 |
+
chunks: List[bytes] = []
|
| 167 |
+
chunks_len = 0
|
| 168 |
+
if chunk and (compressed_chunk := await self._compress.compress(chunk)):
|
| 169 |
+
chunks_len = len(compressed_chunk)
|
| 170 |
+
chunks.append(compressed_chunk)
|
| 171 |
+
|
| 172 |
+
flush_chunk = self._compress.flush()
|
| 173 |
+
chunks_len += len(flush_chunk)
|
| 174 |
+
chunks.append(flush_chunk)
|
| 175 |
+
assert chunks_len
|
| 176 |
+
|
| 177 |
+
if self.chunked:
|
| 178 |
+
chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii")
|
| 179 |
+
self._writelines((chunk_len_pre, *chunks, b"\r\n0\r\n\r\n"))
|
| 180 |
+
elif len(chunks) > 1:
|
| 181 |
+
self._writelines(chunks)
|
| 182 |
+
else:
|
| 183 |
+
self._write(chunks[0])
|
| 184 |
+
elif self.chunked:
|
| 185 |
+
if chunk:
|
| 186 |
+
chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii")
|
| 187 |
+
self._writelines((chunk_len_pre, chunk, b"\r\n0\r\n\r\n"))
|
| 188 |
+
else:
|
| 189 |
+
self._write(b"0\r\n\r\n")
|
| 190 |
+
elif chunk:
|
| 191 |
+
self._write(chunk)
|
| 192 |
+
|
| 193 |
+
await self.drain()
|
| 194 |
+
|
| 195 |
+
self._eof = True
|
| 196 |
+
|
| 197 |
+
async def drain(self) -> None:
|
| 198 |
+
"""Flush the write buffer.
|
| 199 |
+
|
| 200 |
+
The intended use is to write
|
| 201 |
+
|
| 202 |
+
await w.write(data)
|
| 203 |
+
await w.drain()
|
| 204 |
+
"""
|
| 205 |
+
protocol = self._protocol
|
| 206 |
+
if protocol.transport is not None and protocol._paused:
|
| 207 |
+
await protocol._drain_helper()
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
def _safe_header(string: str) -> str:
|
| 211 |
+
if "\r" in string or "\n" in string:
|
| 212 |
+
raise ValueError(
|
| 213 |
+
"Newline or carriage return detected in headers. "
|
| 214 |
+
"Potential header injection attack."
|
| 215 |
+
)
|
| 216 |
+
return string
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
| 220 |
+
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
|
| 221 |
+
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
|
| 222 |
+
return line.encode("utf-8")
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
_serialize_headers = _py_serialize_headers
|
| 226 |
+
|
| 227 |
+
try:
|
| 228 |
+
import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
|
| 229 |
+
|
| 230 |
+
_c_serialize_headers = _http_writer._serialize_headers
|
| 231 |
+
if not NO_EXTENSIONS:
|
| 232 |
+
_serialize_headers = _c_serialize_headers
|
| 233 |
+
except ImportError:
|
| 234 |
+
pass
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/log.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
|
| 3 |
+
access_logger = logging.getLogger("aiohttp.access")
|
| 4 |
+
client_logger = logging.getLogger("aiohttp.client")
|
| 5 |
+
internal_logger = logging.getLogger("aiohttp.internal")
|
| 6 |
+
server_logger = logging.getLogger("aiohttp.server")
|
| 7 |
+
web_logger = logging.getLogger("aiohttp.web")
|
| 8 |
+
ws_logger = logging.getLogger("aiohttp.websocket")
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/payload.py
ADDED
|
@@ -0,0 +1,519 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import enum
|
| 3 |
+
import io
|
| 4 |
+
import json
|
| 5 |
+
import mimetypes
|
| 6 |
+
import os
|
| 7 |
+
import sys
|
| 8 |
+
import warnings
|
| 9 |
+
from abc import ABC, abstractmethod
|
| 10 |
+
from itertools import chain
|
| 11 |
+
from typing import (
|
| 12 |
+
IO,
|
| 13 |
+
TYPE_CHECKING,
|
| 14 |
+
Any,
|
| 15 |
+
Dict,
|
| 16 |
+
Final,
|
| 17 |
+
Iterable,
|
| 18 |
+
Optional,
|
| 19 |
+
TextIO,
|
| 20 |
+
Tuple,
|
| 21 |
+
Type,
|
| 22 |
+
Union,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
from multidict import CIMultiDict
|
| 26 |
+
|
| 27 |
+
from . import hdrs
|
| 28 |
+
from .abc import AbstractStreamWriter
|
| 29 |
+
from .helpers import (
|
| 30 |
+
_SENTINEL,
|
| 31 |
+
content_disposition_header,
|
| 32 |
+
guess_filename,
|
| 33 |
+
parse_mimetype,
|
| 34 |
+
sentinel,
|
| 35 |
+
)
|
| 36 |
+
from .streams import StreamReader
|
| 37 |
+
from .typedefs import JSONEncoder, _CIMultiDict
|
| 38 |
+
|
| 39 |
+
__all__ = (
|
| 40 |
+
"PAYLOAD_REGISTRY",
|
| 41 |
+
"get_payload",
|
| 42 |
+
"payload_type",
|
| 43 |
+
"Payload",
|
| 44 |
+
"BytesPayload",
|
| 45 |
+
"StringPayload",
|
| 46 |
+
"IOBasePayload",
|
| 47 |
+
"BytesIOPayload",
|
| 48 |
+
"BufferedReaderPayload",
|
| 49 |
+
"TextIOPayload",
|
| 50 |
+
"StringIOPayload",
|
| 51 |
+
"JsonPayload",
|
| 52 |
+
"AsyncIterablePayload",
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
|
| 56 |
+
|
| 57 |
+
if TYPE_CHECKING:
|
| 58 |
+
from typing import List
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class LookupError(Exception):
|
| 62 |
+
pass
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class Order(str, enum.Enum):
|
| 66 |
+
normal = "normal"
|
| 67 |
+
try_first = "try_first"
|
| 68 |
+
try_last = "try_last"
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
| 72 |
+
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def register_payload(
|
| 76 |
+
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
| 77 |
+
) -> None:
|
| 78 |
+
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class payload_type:
|
| 82 |
+
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
| 83 |
+
self.type = type
|
| 84 |
+
self.order = order
|
| 85 |
+
|
| 86 |
+
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
| 87 |
+
register_payload(factory, self.type, order=self.order)
|
| 88 |
+
return factory
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
PayloadType = Type["Payload"]
|
| 92 |
+
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class PayloadRegistry:
|
| 96 |
+
"""Payload registry.
|
| 97 |
+
|
| 98 |
+
note: we need zope.interface for more efficient adapter search
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
__slots__ = ("_first", "_normal", "_last", "_normal_lookup")
|
| 102 |
+
|
| 103 |
+
def __init__(self) -> None:
|
| 104 |
+
self._first: List[_PayloadRegistryItem] = []
|
| 105 |
+
self._normal: List[_PayloadRegistryItem] = []
|
| 106 |
+
self._last: List[_PayloadRegistryItem] = []
|
| 107 |
+
self._normal_lookup: Dict[Any, PayloadType] = {}
|
| 108 |
+
|
| 109 |
+
def get(
|
| 110 |
+
self,
|
| 111 |
+
data: Any,
|
| 112 |
+
*args: Any,
|
| 113 |
+
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
| 114 |
+
**kwargs: Any,
|
| 115 |
+
) -> "Payload":
|
| 116 |
+
if self._first:
|
| 117 |
+
for factory, type_ in self._first:
|
| 118 |
+
if isinstance(data, type_):
|
| 119 |
+
return factory(data, *args, **kwargs)
|
| 120 |
+
# Try the fast lookup first
|
| 121 |
+
if lookup_factory := self._normal_lookup.get(type(data)):
|
| 122 |
+
return lookup_factory(data, *args, **kwargs)
|
| 123 |
+
# Bail early if its already a Payload
|
| 124 |
+
if isinstance(data, Payload):
|
| 125 |
+
return data
|
| 126 |
+
# Fallback to the slower linear search
|
| 127 |
+
for factory, type_ in _CHAIN(self._normal, self._last):
|
| 128 |
+
if isinstance(data, type_):
|
| 129 |
+
return factory(data, *args, **kwargs)
|
| 130 |
+
raise LookupError()
|
| 131 |
+
|
| 132 |
+
def register(
|
| 133 |
+
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
| 134 |
+
) -> None:
|
| 135 |
+
if order is Order.try_first:
|
| 136 |
+
self._first.append((factory, type))
|
| 137 |
+
elif order is Order.normal:
|
| 138 |
+
self._normal.append((factory, type))
|
| 139 |
+
if isinstance(type, Iterable):
|
| 140 |
+
for t in type:
|
| 141 |
+
self._normal_lookup[t] = factory
|
| 142 |
+
else:
|
| 143 |
+
self._normal_lookup[type] = factory
|
| 144 |
+
elif order is Order.try_last:
|
| 145 |
+
self._last.append((factory, type))
|
| 146 |
+
else:
|
| 147 |
+
raise ValueError(f"Unsupported order {order!r}")
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class Payload(ABC):
|
| 151 |
+
|
| 152 |
+
_default_content_type: str = "application/octet-stream"
|
| 153 |
+
_size: Optional[int] = None
|
| 154 |
+
|
| 155 |
+
def __init__(
|
| 156 |
+
self,
|
| 157 |
+
value: Any,
|
| 158 |
+
headers: Optional[
|
| 159 |
+
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
| 160 |
+
] = None,
|
| 161 |
+
content_type: Union[str, None, _SENTINEL] = sentinel,
|
| 162 |
+
filename: Optional[str] = None,
|
| 163 |
+
encoding: Optional[str] = None,
|
| 164 |
+
**kwargs: Any,
|
| 165 |
+
) -> None:
|
| 166 |
+
self._encoding = encoding
|
| 167 |
+
self._filename = filename
|
| 168 |
+
self._headers: _CIMultiDict = CIMultiDict()
|
| 169 |
+
self._value = value
|
| 170 |
+
if content_type is not sentinel and content_type is not None:
|
| 171 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
| 172 |
+
elif self._filename is not None:
|
| 173 |
+
if sys.version_info >= (3, 13):
|
| 174 |
+
guesser = mimetypes.guess_file_type
|
| 175 |
+
else:
|
| 176 |
+
guesser = mimetypes.guess_type
|
| 177 |
+
content_type = guesser(self._filename)[0]
|
| 178 |
+
if content_type is None:
|
| 179 |
+
content_type = self._default_content_type
|
| 180 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
| 181 |
+
else:
|
| 182 |
+
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
| 183 |
+
if headers:
|
| 184 |
+
self._headers.update(headers)
|
| 185 |
+
|
| 186 |
+
@property
|
| 187 |
+
def size(self) -> Optional[int]:
|
| 188 |
+
"""Size of the payload."""
|
| 189 |
+
return self._size
|
| 190 |
+
|
| 191 |
+
@property
|
| 192 |
+
def filename(self) -> Optional[str]:
|
| 193 |
+
"""Filename of the payload."""
|
| 194 |
+
return self._filename
|
| 195 |
+
|
| 196 |
+
@property
|
| 197 |
+
def headers(self) -> _CIMultiDict:
|
| 198 |
+
"""Custom item headers"""
|
| 199 |
+
return self._headers
|
| 200 |
+
|
| 201 |
+
@property
|
| 202 |
+
def _binary_headers(self) -> bytes:
|
| 203 |
+
return (
|
| 204 |
+
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
| 205 |
+
"utf-8"
|
| 206 |
+
)
|
| 207 |
+
+ b"\r\n"
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
@property
|
| 211 |
+
def encoding(self) -> Optional[str]:
|
| 212 |
+
"""Payload encoding"""
|
| 213 |
+
return self._encoding
|
| 214 |
+
|
| 215 |
+
@property
|
| 216 |
+
def content_type(self) -> str:
|
| 217 |
+
"""Content type"""
|
| 218 |
+
return self._headers[hdrs.CONTENT_TYPE]
|
| 219 |
+
|
| 220 |
+
def set_content_disposition(
|
| 221 |
+
self,
|
| 222 |
+
disptype: str,
|
| 223 |
+
quote_fields: bool = True,
|
| 224 |
+
_charset: str = "utf-8",
|
| 225 |
+
**params: Any,
|
| 226 |
+
) -> None:
|
| 227 |
+
"""Sets ``Content-Disposition`` header."""
|
| 228 |
+
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
| 229 |
+
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
| 230 |
+
)
|
| 231 |
+
|
| 232 |
+
@abstractmethod
|
| 233 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 234 |
+
"""Return string representation of the value.
|
| 235 |
+
|
| 236 |
+
This is named decode() to allow compatibility with bytes objects.
|
| 237 |
+
"""
|
| 238 |
+
|
| 239 |
+
@abstractmethod
|
| 240 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 241 |
+
"""Write payload.
|
| 242 |
+
|
| 243 |
+
writer is an AbstractStreamWriter instance:
|
| 244 |
+
"""
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
class BytesPayload(Payload):
|
| 248 |
+
_value: bytes
|
| 249 |
+
|
| 250 |
+
def __init__(
|
| 251 |
+
self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any
|
| 252 |
+
) -> None:
|
| 253 |
+
if "content_type" not in kwargs:
|
| 254 |
+
kwargs["content_type"] = "application/octet-stream"
|
| 255 |
+
|
| 256 |
+
super().__init__(value, *args, **kwargs)
|
| 257 |
+
|
| 258 |
+
if isinstance(value, memoryview):
|
| 259 |
+
self._size = value.nbytes
|
| 260 |
+
elif isinstance(value, (bytes, bytearray)):
|
| 261 |
+
self._size = len(value)
|
| 262 |
+
else:
|
| 263 |
+
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
| 264 |
+
|
| 265 |
+
if self._size > TOO_LARGE_BYTES_BODY:
|
| 266 |
+
kwargs = {"source": self}
|
| 267 |
+
warnings.warn(
|
| 268 |
+
"Sending a large body directly with raw bytes might"
|
| 269 |
+
" lock the event loop. You should probably pass an "
|
| 270 |
+
"io.BytesIO object instead",
|
| 271 |
+
ResourceWarning,
|
| 272 |
+
**kwargs,
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 276 |
+
return self._value.decode(encoding, errors)
|
| 277 |
+
|
| 278 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 279 |
+
await writer.write(self._value)
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
class StringPayload(BytesPayload):
|
| 283 |
+
def __init__(
|
| 284 |
+
self,
|
| 285 |
+
value: str,
|
| 286 |
+
*args: Any,
|
| 287 |
+
encoding: Optional[str] = None,
|
| 288 |
+
content_type: Optional[str] = None,
|
| 289 |
+
**kwargs: Any,
|
| 290 |
+
) -> None:
|
| 291 |
+
|
| 292 |
+
if encoding is None:
|
| 293 |
+
if content_type is None:
|
| 294 |
+
real_encoding = "utf-8"
|
| 295 |
+
content_type = "text/plain; charset=utf-8"
|
| 296 |
+
else:
|
| 297 |
+
mimetype = parse_mimetype(content_type)
|
| 298 |
+
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
| 299 |
+
else:
|
| 300 |
+
if content_type is None:
|
| 301 |
+
content_type = "text/plain; charset=%s" % encoding
|
| 302 |
+
real_encoding = encoding
|
| 303 |
+
|
| 304 |
+
super().__init__(
|
| 305 |
+
value.encode(real_encoding),
|
| 306 |
+
encoding=real_encoding,
|
| 307 |
+
content_type=content_type,
|
| 308 |
+
*args,
|
| 309 |
+
**kwargs,
|
| 310 |
+
)
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
class StringIOPayload(StringPayload):
|
| 314 |
+
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
| 315 |
+
super().__init__(value.read(), *args, **kwargs)
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
class IOBasePayload(Payload):
|
| 319 |
+
_value: io.IOBase
|
| 320 |
+
|
| 321 |
+
def __init__(
|
| 322 |
+
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
| 323 |
+
) -> None:
|
| 324 |
+
if "filename" not in kwargs:
|
| 325 |
+
kwargs["filename"] = guess_filename(value)
|
| 326 |
+
|
| 327 |
+
super().__init__(value, *args, **kwargs)
|
| 328 |
+
|
| 329 |
+
if self._filename is not None and disposition is not None:
|
| 330 |
+
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
| 331 |
+
self.set_content_disposition(disposition, filename=self._filename)
|
| 332 |
+
|
| 333 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 334 |
+
loop = asyncio.get_event_loop()
|
| 335 |
+
try:
|
| 336 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 337 |
+
while chunk:
|
| 338 |
+
await writer.write(chunk)
|
| 339 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 340 |
+
finally:
|
| 341 |
+
await loop.run_in_executor(None, self._value.close)
|
| 342 |
+
|
| 343 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 344 |
+
return "".join(r.decode(encoding, errors) for r in self._value.readlines())
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
class TextIOPayload(IOBasePayload):
|
| 348 |
+
_value: io.TextIOBase
|
| 349 |
+
|
| 350 |
+
def __init__(
|
| 351 |
+
self,
|
| 352 |
+
value: TextIO,
|
| 353 |
+
*args: Any,
|
| 354 |
+
encoding: Optional[str] = None,
|
| 355 |
+
content_type: Optional[str] = None,
|
| 356 |
+
**kwargs: Any,
|
| 357 |
+
) -> None:
|
| 358 |
+
|
| 359 |
+
if encoding is None:
|
| 360 |
+
if content_type is None:
|
| 361 |
+
encoding = "utf-8"
|
| 362 |
+
content_type = "text/plain; charset=utf-8"
|
| 363 |
+
else:
|
| 364 |
+
mimetype = parse_mimetype(content_type)
|
| 365 |
+
encoding = mimetype.parameters.get("charset", "utf-8")
|
| 366 |
+
else:
|
| 367 |
+
if content_type is None:
|
| 368 |
+
content_type = "text/plain; charset=%s" % encoding
|
| 369 |
+
|
| 370 |
+
super().__init__(
|
| 371 |
+
value,
|
| 372 |
+
content_type=content_type,
|
| 373 |
+
encoding=encoding,
|
| 374 |
+
*args,
|
| 375 |
+
**kwargs,
|
| 376 |
+
)
|
| 377 |
+
|
| 378 |
+
@property
|
| 379 |
+
def size(self) -> Optional[int]:
|
| 380 |
+
try:
|
| 381 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
| 382 |
+
except OSError:
|
| 383 |
+
return None
|
| 384 |
+
|
| 385 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 386 |
+
return self._value.read()
|
| 387 |
+
|
| 388 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 389 |
+
loop = asyncio.get_event_loop()
|
| 390 |
+
try:
|
| 391 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 392 |
+
while chunk:
|
| 393 |
+
data = (
|
| 394 |
+
chunk.encode(encoding=self._encoding)
|
| 395 |
+
if self._encoding
|
| 396 |
+
else chunk.encode()
|
| 397 |
+
)
|
| 398 |
+
await writer.write(data)
|
| 399 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 400 |
+
finally:
|
| 401 |
+
await loop.run_in_executor(None, self._value.close)
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
class BytesIOPayload(IOBasePayload):
|
| 405 |
+
_value: io.BytesIO
|
| 406 |
+
|
| 407 |
+
@property
|
| 408 |
+
def size(self) -> int:
|
| 409 |
+
position = self._value.tell()
|
| 410 |
+
end = self._value.seek(0, os.SEEK_END)
|
| 411 |
+
self._value.seek(position)
|
| 412 |
+
return end - position
|
| 413 |
+
|
| 414 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 415 |
+
return self._value.read().decode(encoding, errors)
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
class BufferedReaderPayload(IOBasePayload):
|
| 419 |
+
_value: io.BufferedIOBase
|
| 420 |
+
|
| 421 |
+
@property
|
| 422 |
+
def size(self) -> Optional[int]:
|
| 423 |
+
try:
|
| 424 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
| 425 |
+
except (OSError, AttributeError):
|
| 426 |
+
# data.fileno() is not supported, e.g.
|
| 427 |
+
# io.BufferedReader(io.BytesIO(b'data'))
|
| 428 |
+
# For some file-like objects (e.g. tarfile), the fileno() attribute may
|
| 429 |
+
# not exist at all, and will instead raise an AttributeError.
|
| 430 |
+
return None
|
| 431 |
+
|
| 432 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 433 |
+
return self._value.read().decode(encoding, errors)
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
class JsonPayload(BytesPayload):
|
| 437 |
+
def __init__(
|
| 438 |
+
self,
|
| 439 |
+
value: Any,
|
| 440 |
+
encoding: str = "utf-8",
|
| 441 |
+
content_type: str = "application/json",
|
| 442 |
+
dumps: JSONEncoder = json.dumps,
|
| 443 |
+
*args: Any,
|
| 444 |
+
**kwargs: Any,
|
| 445 |
+
) -> None:
|
| 446 |
+
|
| 447 |
+
super().__init__(
|
| 448 |
+
dumps(value).encode(encoding),
|
| 449 |
+
content_type=content_type,
|
| 450 |
+
encoding=encoding,
|
| 451 |
+
*args,
|
| 452 |
+
**kwargs,
|
| 453 |
+
)
|
| 454 |
+
|
| 455 |
+
|
| 456 |
+
if TYPE_CHECKING:
|
| 457 |
+
from typing import AsyncIterable, AsyncIterator
|
| 458 |
+
|
| 459 |
+
_AsyncIterator = AsyncIterator[bytes]
|
| 460 |
+
_AsyncIterable = AsyncIterable[bytes]
|
| 461 |
+
else:
|
| 462 |
+
from collections.abc import AsyncIterable, AsyncIterator
|
| 463 |
+
|
| 464 |
+
_AsyncIterator = AsyncIterator
|
| 465 |
+
_AsyncIterable = AsyncIterable
|
| 466 |
+
|
| 467 |
+
|
| 468 |
+
class AsyncIterablePayload(Payload):
|
| 469 |
+
|
| 470 |
+
_iter: Optional[_AsyncIterator] = None
|
| 471 |
+
_value: _AsyncIterable
|
| 472 |
+
|
| 473 |
+
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
| 474 |
+
if not isinstance(value, AsyncIterable):
|
| 475 |
+
raise TypeError(
|
| 476 |
+
"value argument must support "
|
| 477 |
+
"collections.abc.AsyncIterable interface, "
|
| 478 |
+
"got {!r}".format(type(value))
|
| 479 |
+
)
|
| 480 |
+
|
| 481 |
+
if "content_type" not in kwargs:
|
| 482 |
+
kwargs["content_type"] = "application/octet-stream"
|
| 483 |
+
|
| 484 |
+
super().__init__(value, *args, **kwargs)
|
| 485 |
+
|
| 486 |
+
self._iter = value.__aiter__()
|
| 487 |
+
|
| 488 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 489 |
+
if self._iter:
|
| 490 |
+
try:
|
| 491 |
+
# iter is not None check prevents rare cases
|
| 492 |
+
# when the case iterable is used twice
|
| 493 |
+
while True:
|
| 494 |
+
chunk = await self._iter.__anext__()
|
| 495 |
+
await writer.write(chunk)
|
| 496 |
+
except StopAsyncIteration:
|
| 497 |
+
self._iter = None
|
| 498 |
+
|
| 499 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 500 |
+
raise TypeError("Unable to decode.")
|
| 501 |
+
|
| 502 |
+
|
| 503 |
+
class StreamReaderPayload(AsyncIterablePayload):
|
| 504 |
+
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
| 505 |
+
super().__init__(value.iter_any(), *args, **kwargs)
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
PAYLOAD_REGISTRY = PayloadRegistry()
|
| 509 |
+
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
| 510 |
+
PAYLOAD_REGISTRY.register(StringPayload, str)
|
| 511 |
+
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
| 512 |
+
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
| 513 |
+
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
| 514 |
+
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
| 515 |
+
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
| 516 |
+
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
| 517 |
+
# try_last for giving a chance to more specialized async interables like
|
| 518 |
+
# multidict.BodyPartReaderPayload override the default
|
| 519 |
+
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/py.typed
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Marker
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/pytest_plugin.py
ADDED
|
@@ -0,0 +1,436 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import contextlib
|
| 3 |
+
import inspect
|
| 4 |
+
import warnings
|
| 5 |
+
from typing import (
|
| 6 |
+
Any,
|
| 7 |
+
Awaitable,
|
| 8 |
+
Callable,
|
| 9 |
+
Dict,
|
| 10 |
+
Iterator,
|
| 11 |
+
Optional,
|
| 12 |
+
Protocol,
|
| 13 |
+
Type,
|
| 14 |
+
Union,
|
| 15 |
+
overload,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
import pytest
|
| 19 |
+
|
| 20 |
+
from .test_utils import (
|
| 21 |
+
BaseTestServer,
|
| 22 |
+
RawTestServer,
|
| 23 |
+
TestClient,
|
| 24 |
+
TestServer,
|
| 25 |
+
loop_context,
|
| 26 |
+
setup_test_loop,
|
| 27 |
+
teardown_test_loop,
|
| 28 |
+
unused_port as _unused_port,
|
| 29 |
+
)
|
| 30 |
+
from .web import Application, BaseRequest, Request
|
| 31 |
+
from .web_protocol import _RequestHandler
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
import uvloop
|
| 35 |
+
except ImportError: # pragma: no cover
|
| 36 |
+
uvloop = None # type: ignore[assignment]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class AiohttpClient(Protocol):
|
| 40 |
+
@overload
|
| 41 |
+
async def __call__(
|
| 42 |
+
self,
|
| 43 |
+
__param: Application,
|
| 44 |
+
*,
|
| 45 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 46 |
+
**kwargs: Any,
|
| 47 |
+
) -> TestClient[Request, Application]: ...
|
| 48 |
+
@overload
|
| 49 |
+
async def __call__(
|
| 50 |
+
self,
|
| 51 |
+
__param: BaseTestServer,
|
| 52 |
+
*,
|
| 53 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 54 |
+
**kwargs: Any,
|
| 55 |
+
) -> TestClient[BaseRequest, None]: ...
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class AiohttpServer(Protocol):
|
| 59 |
+
def __call__(
|
| 60 |
+
self, app: Application, *, port: Optional[int] = None, **kwargs: Any
|
| 61 |
+
) -> Awaitable[TestServer]: ...
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class AiohttpRawServer(Protocol):
|
| 65 |
+
def __call__(
|
| 66 |
+
self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
|
| 67 |
+
) -> Awaitable[RawTestServer]: ...
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
| 71 |
+
parser.addoption(
|
| 72 |
+
"--aiohttp-fast",
|
| 73 |
+
action="store_true",
|
| 74 |
+
default=False,
|
| 75 |
+
help="run tests faster by disabling extra checks",
|
| 76 |
+
)
|
| 77 |
+
parser.addoption(
|
| 78 |
+
"--aiohttp-loop",
|
| 79 |
+
action="store",
|
| 80 |
+
default="pyloop",
|
| 81 |
+
help="run tests with specific loop: pyloop, uvloop or all",
|
| 82 |
+
)
|
| 83 |
+
parser.addoption(
|
| 84 |
+
"--aiohttp-enable-loop-debug",
|
| 85 |
+
action="store_true",
|
| 86 |
+
default=False,
|
| 87 |
+
help="enable event loop debug mode",
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
| 92 |
+
"""Set up pytest fixture.
|
| 93 |
+
|
| 94 |
+
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
| 95 |
+
"""
|
| 96 |
+
func = fixturedef.func
|
| 97 |
+
|
| 98 |
+
if inspect.isasyncgenfunction(func):
|
| 99 |
+
# async generator fixture
|
| 100 |
+
is_async_gen = True
|
| 101 |
+
elif asyncio.iscoroutinefunction(func):
|
| 102 |
+
# regular async fixture
|
| 103 |
+
is_async_gen = False
|
| 104 |
+
else:
|
| 105 |
+
# not an async fixture, nothing to do
|
| 106 |
+
return
|
| 107 |
+
|
| 108 |
+
strip_request = False
|
| 109 |
+
if "request" not in fixturedef.argnames:
|
| 110 |
+
fixturedef.argnames += ("request",)
|
| 111 |
+
strip_request = True
|
| 112 |
+
|
| 113 |
+
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
| 114 |
+
request = kwargs["request"]
|
| 115 |
+
if strip_request:
|
| 116 |
+
del kwargs["request"]
|
| 117 |
+
|
| 118 |
+
# if neither the fixture nor the test use the 'loop' fixture,
|
| 119 |
+
# 'getfixturevalue' will fail because the test is not parameterized
|
| 120 |
+
# (this can be removed someday if 'loop' is no longer parameterized)
|
| 121 |
+
if "loop" not in request.fixturenames:
|
| 122 |
+
raise Exception(
|
| 123 |
+
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
| 124 |
+
"be used in tests depending from it."
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
_loop = request.getfixturevalue("loop")
|
| 128 |
+
|
| 129 |
+
if is_async_gen:
|
| 130 |
+
# for async generators, we need to advance the generator once,
|
| 131 |
+
# then advance it again in a finalizer
|
| 132 |
+
gen = func(*args, **kwargs)
|
| 133 |
+
|
| 134 |
+
def finalizer(): # type: ignore[no-untyped-def]
|
| 135 |
+
try:
|
| 136 |
+
return _loop.run_until_complete(gen.__anext__())
|
| 137 |
+
except StopAsyncIteration:
|
| 138 |
+
pass
|
| 139 |
+
|
| 140 |
+
request.addfinalizer(finalizer)
|
| 141 |
+
return _loop.run_until_complete(gen.__anext__())
|
| 142 |
+
else:
|
| 143 |
+
return _loop.run_until_complete(func(*args, **kwargs))
|
| 144 |
+
|
| 145 |
+
fixturedef.func = wrapper
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
@pytest.fixture
|
| 149 |
+
def fast(request): # type: ignore[no-untyped-def]
|
| 150 |
+
"""--fast config option"""
|
| 151 |
+
return request.config.getoption("--aiohttp-fast")
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
@pytest.fixture
|
| 155 |
+
def loop_debug(request): # type: ignore[no-untyped-def]
|
| 156 |
+
"""--enable-loop-debug config option"""
|
| 157 |
+
return request.config.getoption("--aiohttp-enable-loop-debug")
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
@contextlib.contextmanager
|
| 161 |
+
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
| 162 |
+
"""Context manager which checks for RuntimeWarnings.
|
| 163 |
+
|
| 164 |
+
This exists specifically to
|
| 165 |
+
avoid "coroutine 'X' was never awaited" warnings being missed.
|
| 166 |
+
|
| 167 |
+
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
| 168 |
+
"""
|
| 169 |
+
with warnings.catch_warnings(record=True) as _warnings:
|
| 170 |
+
yield
|
| 171 |
+
rw = [
|
| 172 |
+
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
| 173 |
+
for w in _warnings
|
| 174 |
+
if w.category == RuntimeWarning
|
| 175 |
+
]
|
| 176 |
+
if rw:
|
| 177 |
+
raise RuntimeError(
|
| 178 |
+
"{} Runtime Warning{},\n{}".format(
|
| 179 |
+
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
| 180 |
+
)
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
@contextlib.contextmanager
|
| 185 |
+
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
| 186 |
+
"""Passthrough loop context.
|
| 187 |
+
|
| 188 |
+
Sets up and tears down a loop unless one is passed in via the loop
|
| 189 |
+
argument when it's passed straight through.
|
| 190 |
+
"""
|
| 191 |
+
if loop:
|
| 192 |
+
# loop already exists, pass it straight through
|
| 193 |
+
yield loop
|
| 194 |
+
else:
|
| 195 |
+
# this shadows loop_context's standard behavior
|
| 196 |
+
loop = setup_test_loop()
|
| 197 |
+
yield loop
|
| 198 |
+
teardown_test_loop(loop, fast=fast)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
| 202 |
+
"""Fix pytest collecting for coroutines."""
|
| 203 |
+
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
| 204 |
+
return list(collector._genfunctions(name, obj))
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
| 208 |
+
"""Run coroutines in an event loop instead of a normal function call."""
|
| 209 |
+
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
| 210 |
+
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
| 211 |
+
existing_loop = pyfuncitem.funcargs.get(
|
| 212 |
+
"proactor_loop"
|
| 213 |
+
) or pyfuncitem.funcargs.get("loop", None)
|
| 214 |
+
with _runtime_warning_context():
|
| 215 |
+
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
| 216 |
+
testargs = {
|
| 217 |
+
arg: pyfuncitem.funcargs[arg]
|
| 218 |
+
for arg in pyfuncitem._fixtureinfo.argnames
|
| 219 |
+
}
|
| 220 |
+
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
| 221 |
+
|
| 222 |
+
return True
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
| 226 |
+
if "loop_factory" not in metafunc.fixturenames:
|
| 227 |
+
return
|
| 228 |
+
|
| 229 |
+
loops = metafunc.config.option.aiohttp_loop
|
| 230 |
+
avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
|
| 231 |
+
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
| 232 |
+
|
| 233 |
+
if uvloop is not None: # pragma: no cover
|
| 234 |
+
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
| 235 |
+
|
| 236 |
+
if loops == "all":
|
| 237 |
+
loops = "pyloop,uvloop?"
|
| 238 |
+
|
| 239 |
+
factories = {} # type: ignore[var-annotated]
|
| 240 |
+
for name in loops.split(","):
|
| 241 |
+
required = not name.endswith("?")
|
| 242 |
+
name = name.strip(" ?")
|
| 243 |
+
if name not in avail_factories: # pragma: no cover
|
| 244 |
+
if required:
|
| 245 |
+
raise ValueError(
|
| 246 |
+
"Unknown loop '%s', available loops: %s"
|
| 247 |
+
% (name, list(factories.keys()))
|
| 248 |
+
)
|
| 249 |
+
else:
|
| 250 |
+
continue
|
| 251 |
+
factories[name] = avail_factories[name]
|
| 252 |
+
metafunc.parametrize(
|
| 253 |
+
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
| 254 |
+
)
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
@pytest.fixture
|
| 258 |
+
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
| 259 |
+
"""Return an instance of the event loop."""
|
| 260 |
+
policy = loop_factory()
|
| 261 |
+
asyncio.set_event_loop_policy(policy)
|
| 262 |
+
with loop_context(fast=fast) as _loop:
|
| 263 |
+
if loop_debug:
|
| 264 |
+
_loop.set_debug(True) # pragma: no cover
|
| 265 |
+
asyncio.set_event_loop(_loop)
|
| 266 |
+
yield _loop
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
@pytest.fixture
|
| 270 |
+
def proactor_loop(): # type: ignore[no-untyped-def]
|
| 271 |
+
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
| 272 |
+
asyncio.set_event_loop_policy(policy)
|
| 273 |
+
|
| 274 |
+
with loop_context(policy.new_event_loop) as _loop:
|
| 275 |
+
asyncio.set_event_loop(_loop)
|
| 276 |
+
yield _loop
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
@pytest.fixture
|
| 280 |
+
def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
|
| 281 |
+
warnings.warn(
|
| 282 |
+
"Deprecated, use aiohttp_unused_port fixture instead",
|
| 283 |
+
DeprecationWarning,
|
| 284 |
+
stacklevel=2,
|
| 285 |
+
)
|
| 286 |
+
return aiohttp_unused_port
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
@pytest.fixture
|
| 290 |
+
def aiohttp_unused_port() -> Callable[[], int]:
|
| 291 |
+
"""Return a port that is unused on the current host."""
|
| 292 |
+
return _unused_port
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
@pytest.fixture
|
| 296 |
+
def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
|
| 297 |
+
"""Factory to create a TestServer instance, given an app.
|
| 298 |
+
|
| 299 |
+
aiohttp_server(app, **kwargs)
|
| 300 |
+
"""
|
| 301 |
+
servers = []
|
| 302 |
+
|
| 303 |
+
async def go(
|
| 304 |
+
app: Application, *, port: Optional[int] = None, **kwargs: Any
|
| 305 |
+
) -> TestServer:
|
| 306 |
+
server = TestServer(app, port=port)
|
| 307 |
+
await server.start_server(loop=loop, **kwargs)
|
| 308 |
+
servers.append(server)
|
| 309 |
+
return server
|
| 310 |
+
|
| 311 |
+
yield go
|
| 312 |
+
|
| 313 |
+
async def finalize() -> None:
|
| 314 |
+
while servers:
|
| 315 |
+
await servers.pop().close()
|
| 316 |
+
|
| 317 |
+
loop.run_until_complete(finalize())
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
@pytest.fixture
|
| 321 |
+
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
| 322 |
+
warnings.warn(
|
| 323 |
+
"Deprecated, use aiohttp_server fixture instead",
|
| 324 |
+
DeprecationWarning,
|
| 325 |
+
stacklevel=2,
|
| 326 |
+
)
|
| 327 |
+
return aiohttp_server
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
@pytest.fixture
|
| 331 |
+
def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
|
| 332 |
+
"""Factory to create a RawTestServer instance, given a web handler.
|
| 333 |
+
|
| 334 |
+
aiohttp_raw_server(handler, **kwargs)
|
| 335 |
+
"""
|
| 336 |
+
servers = []
|
| 337 |
+
|
| 338 |
+
async def go(
|
| 339 |
+
handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
|
| 340 |
+
) -> RawTestServer:
|
| 341 |
+
server = RawTestServer(handler, port=port)
|
| 342 |
+
await server.start_server(loop=loop, **kwargs)
|
| 343 |
+
servers.append(server)
|
| 344 |
+
return server
|
| 345 |
+
|
| 346 |
+
yield go
|
| 347 |
+
|
| 348 |
+
async def finalize() -> None:
|
| 349 |
+
while servers:
|
| 350 |
+
await servers.pop().close()
|
| 351 |
+
|
| 352 |
+
loop.run_until_complete(finalize())
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
@pytest.fixture
|
| 356 |
+
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
| 357 |
+
aiohttp_raw_server,
|
| 358 |
+
):
|
| 359 |
+
warnings.warn(
|
| 360 |
+
"Deprecated, use aiohttp_raw_server fixture instead",
|
| 361 |
+
DeprecationWarning,
|
| 362 |
+
stacklevel=2,
|
| 363 |
+
)
|
| 364 |
+
return aiohttp_raw_server
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
@pytest.fixture
|
| 368 |
+
def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]:
|
| 369 |
+
"""Factory to create a TestClient instance.
|
| 370 |
+
|
| 371 |
+
aiohttp_client(app, **kwargs)
|
| 372 |
+
aiohttp_client(server, **kwargs)
|
| 373 |
+
aiohttp_client(raw_server, **kwargs)
|
| 374 |
+
"""
|
| 375 |
+
clients = []
|
| 376 |
+
|
| 377 |
+
@overload
|
| 378 |
+
async def go(
|
| 379 |
+
__param: Application,
|
| 380 |
+
*,
|
| 381 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 382 |
+
**kwargs: Any,
|
| 383 |
+
) -> TestClient[Request, Application]: ...
|
| 384 |
+
|
| 385 |
+
@overload
|
| 386 |
+
async def go(
|
| 387 |
+
__param: BaseTestServer,
|
| 388 |
+
*,
|
| 389 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 390 |
+
**kwargs: Any,
|
| 391 |
+
) -> TestClient[BaseRequest, None]: ...
|
| 392 |
+
|
| 393 |
+
async def go(
|
| 394 |
+
__param: Union[Application, BaseTestServer],
|
| 395 |
+
*args: Any,
|
| 396 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 397 |
+
**kwargs: Any,
|
| 398 |
+
) -> TestClient[Any, Any]:
|
| 399 |
+
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
| 400 |
+
__param, (Application, BaseTestServer)
|
| 401 |
+
):
|
| 402 |
+
__param = __param(loop, *args, **kwargs)
|
| 403 |
+
kwargs = {}
|
| 404 |
+
else:
|
| 405 |
+
assert not args, "args should be empty"
|
| 406 |
+
|
| 407 |
+
if isinstance(__param, Application):
|
| 408 |
+
server_kwargs = server_kwargs or {}
|
| 409 |
+
server = TestServer(__param, loop=loop, **server_kwargs)
|
| 410 |
+
client = TestClient(server, loop=loop, **kwargs)
|
| 411 |
+
elif isinstance(__param, BaseTestServer):
|
| 412 |
+
client = TestClient(__param, loop=loop, **kwargs)
|
| 413 |
+
else:
|
| 414 |
+
raise ValueError("Unknown argument type: %r" % type(__param))
|
| 415 |
+
|
| 416 |
+
await client.start_server()
|
| 417 |
+
clients.append(client)
|
| 418 |
+
return client
|
| 419 |
+
|
| 420 |
+
yield go
|
| 421 |
+
|
| 422 |
+
async def finalize() -> None:
|
| 423 |
+
while clients:
|
| 424 |
+
await clients.pop().close()
|
| 425 |
+
|
| 426 |
+
loop.run_until_complete(finalize())
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
@pytest.fixture
|
| 430 |
+
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
| 431 |
+
warnings.warn(
|
| 432 |
+
"Deprecated, use aiohttp_client fixture instead",
|
| 433 |
+
DeprecationWarning,
|
| 434 |
+
stacklevel=2,
|
| 435 |
+
)
|
| 436 |
+
return aiohttp_client
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/resolver.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import socket
|
| 3 |
+
from typing import Any, Dict, List, Optional, Tuple, Type, Union
|
| 4 |
+
|
| 5 |
+
from .abc import AbstractResolver, ResolveResult
|
| 6 |
+
|
| 7 |
+
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
import aiodns
|
| 12 |
+
|
| 13 |
+
aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo")
|
| 14 |
+
except ImportError: # pragma: no cover
|
| 15 |
+
aiodns = None # type: ignore[assignment]
|
| 16 |
+
aiodns_default = False
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV
|
| 20 |
+
_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class ThreadedResolver(AbstractResolver):
|
| 24 |
+
"""Threaded resolver.
|
| 25 |
+
|
| 26 |
+
Uses an Executor for synchronous getaddrinfo() calls.
|
| 27 |
+
concurrent.futures.ThreadPoolExecutor is used by default.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 31 |
+
self._loop = loop or asyncio.get_running_loop()
|
| 32 |
+
|
| 33 |
+
async def resolve(
|
| 34 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 35 |
+
) -> List[ResolveResult]:
|
| 36 |
+
infos = await self._loop.getaddrinfo(
|
| 37 |
+
host,
|
| 38 |
+
port,
|
| 39 |
+
type=socket.SOCK_STREAM,
|
| 40 |
+
family=family,
|
| 41 |
+
flags=socket.AI_ADDRCONFIG,
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
hosts: List[ResolveResult] = []
|
| 45 |
+
for family, _, proto, _, address in infos:
|
| 46 |
+
if family == socket.AF_INET6:
|
| 47 |
+
if len(address) < 3:
|
| 48 |
+
# IPv6 is not supported by Python build,
|
| 49 |
+
# or IPv6 is not enabled in the host
|
| 50 |
+
continue
|
| 51 |
+
if address[3]:
|
| 52 |
+
# This is essential for link-local IPv6 addresses.
|
| 53 |
+
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
| 54 |
+
# getnameinfo() unconditionally, but performance makes sense.
|
| 55 |
+
resolved_host, _port = await self._loop.getnameinfo(
|
| 56 |
+
address, _NAME_SOCKET_FLAGS
|
| 57 |
+
)
|
| 58 |
+
port = int(_port)
|
| 59 |
+
else:
|
| 60 |
+
resolved_host, port = address[:2]
|
| 61 |
+
else: # IPv4
|
| 62 |
+
assert family == socket.AF_INET
|
| 63 |
+
resolved_host, port = address # type: ignore[misc]
|
| 64 |
+
hosts.append(
|
| 65 |
+
ResolveResult(
|
| 66 |
+
hostname=host,
|
| 67 |
+
host=resolved_host,
|
| 68 |
+
port=port,
|
| 69 |
+
family=family,
|
| 70 |
+
proto=proto,
|
| 71 |
+
flags=_NUMERIC_SOCKET_FLAGS,
|
| 72 |
+
)
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
return hosts
|
| 76 |
+
|
| 77 |
+
async def close(self) -> None:
|
| 78 |
+
pass
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class AsyncResolver(AbstractResolver):
|
| 82 |
+
"""Use the `aiodns` package to make asynchronous DNS lookups"""
|
| 83 |
+
|
| 84 |
+
def __init__(
|
| 85 |
+
self,
|
| 86 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 87 |
+
*args: Any,
|
| 88 |
+
**kwargs: Any,
|
| 89 |
+
) -> None:
|
| 90 |
+
if aiodns is None:
|
| 91 |
+
raise RuntimeError("Resolver requires aiodns library")
|
| 92 |
+
|
| 93 |
+
self._resolver = aiodns.DNSResolver(*args, **kwargs)
|
| 94 |
+
|
| 95 |
+
if not hasattr(self._resolver, "gethostbyname"):
|
| 96 |
+
# aiodns 1.1 is not available, fallback to DNSResolver.query
|
| 97 |
+
self.resolve = self._resolve_with_query # type: ignore
|
| 98 |
+
|
| 99 |
+
async def resolve(
|
| 100 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 101 |
+
) -> List[ResolveResult]:
|
| 102 |
+
try:
|
| 103 |
+
resp = await self._resolver.getaddrinfo(
|
| 104 |
+
host,
|
| 105 |
+
port=port,
|
| 106 |
+
type=socket.SOCK_STREAM,
|
| 107 |
+
family=family,
|
| 108 |
+
flags=socket.AI_ADDRCONFIG,
|
| 109 |
+
)
|
| 110 |
+
except aiodns.error.DNSError as exc:
|
| 111 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
| 112 |
+
raise OSError(None, msg) from exc
|
| 113 |
+
hosts: List[ResolveResult] = []
|
| 114 |
+
for node in resp.nodes:
|
| 115 |
+
address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr
|
| 116 |
+
family = node.family
|
| 117 |
+
if family == socket.AF_INET6:
|
| 118 |
+
if len(address) > 3 and address[3]:
|
| 119 |
+
# This is essential for link-local IPv6 addresses.
|
| 120 |
+
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
| 121 |
+
# getnameinfo() unconditionally, but performance makes sense.
|
| 122 |
+
result = await self._resolver.getnameinfo(
|
| 123 |
+
(address[0].decode("ascii"), *address[1:]),
|
| 124 |
+
_NAME_SOCKET_FLAGS,
|
| 125 |
+
)
|
| 126 |
+
resolved_host = result.node
|
| 127 |
+
else:
|
| 128 |
+
resolved_host = address[0].decode("ascii")
|
| 129 |
+
port = address[1]
|
| 130 |
+
else: # IPv4
|
| 131 |
+
assert family == socket.AF_INET
|
| 132 |
+
resolved_host = address[0].decode("ascii")
|
| 133 |
+
port = address[1]
|
| 134 |
+
hosts.append(
|
| 135 |
+
ResolveResult(
|
| 136 |
+
hostname=host,
|
| 137 |
+
host=resolved_host,
|
| 138 |
+
port=port,
|
| 139 |
+
family=family,
|
| 140 |
+
proto=0,
|
| 141 |
+
flags=_NUMERIC_SOCKET_FLAGS,
|
| 142 |
+
)
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
if not hosts:
|
| 146 |
+
raise OSError(None, "DNS lookup failed")
|
| 147 |
+
|
| 148 |
+
return hosts
|
| 149 |
+
|
| 150 |
+
async def _resolve_with_query(
|
| 151 |
+
self, host: str, port: int = 0, family: int = socket.AF_INET
|
| 152 |
+
) -> List[Dict[str, Any]]:
|
| 153 |
+
if family == socket.AF_INET6:
|
| 154 |
+
qtype = "AAAA"
|
| 155 |
+
else:
|
| 156 |
+
qtype = "A"
|
| 157 |
+
|
| 158 |
+
try:
|
| 159 |
+
resp = await self._resolver.query(host, qtype)
|
| 160 |
+
except aiodns.error.DNSError as exc:
|
| 161 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
| 162 |
+
raise OSError(None, msg) from exc
|
| 163 |
+
|
| 164 |
+
hosts = []
|
| 165 |
+
for rr in resp:
|
| 166 |
+
hosts.append(
|
| 167 |
+
{
|
| 168 |
+
"hostname": host,
|
| 169 |
+
"host": rr.host,
|
| 170 |
+
"port": port,
|
| 171 |
+
"family": family,
|
| 172 |
+
"proto": 0,
|
| 173 |
+
"flags": socket.AI_NUMERICHOST,
|
| 174 |
+
}
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
if not hosts:
|
| 178 |
+
raise OSError(None, "DNS lookup failed")
|
| 179 |
+
|
| 180 |
+
return hosts
|
| 181 |
+
|
| 182 |
+
async def close(self) -> None:
|
| 183 |
+
self._resolver.cancel()
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
|
| 187 |
+
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/streams.py
ADDED
|
@@ -0,0 +1,726 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import collections
|
| 3 |
+
import warnings
|
| 4 |
+
from typing import (
|
| 5 |
+
Awaitable,
|
| 6 |
+
Callable,
|
| 7 |
+
Deque,
|
| 8 |
+
Final,
|
| 9 |
+
Generic,
|
| 10 |
+
List,
|
| 11 |
+
Optional,
|
| 12 |
+
Tuple,
|
| 13 |
+
TypeVar,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
from .base_protocol import BaseProtocol
|
| 17 |
+
from .helpers import (
|
| 18 |
+
_EXC_SENTINEL,
|
| 19 |
+
BaseTimerContext,
|
| 20 |
+
TimerNoop,
|
| 21 |
+
set_exception,
|
| 22 |
+
set_result,
|
| 23 |
+
)
|
| 24 |
+
from .log import internal_logger
|
| 25 |
+
|
| 26 |
+
__all__ = (
|
| 27 |
+
"EMPTY_PAYLOAD",
|
| 28 |
+
"EofStream",
|
| 29 |
+
"StreamReader",
|
| 30 |
+
"DataQueue",
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
_T = TypeVar("_T")
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class EofStream(Exception):
|
| 37 |
+
"""eof stream indication."""
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class AsyncStreamIterator(Generic[_T]):
|
| 41 |
+
|
| 42 |
+
__slots__ = ("read_func",)
|
| 43 |
+
|
| 44 |
+
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
|
| 45 |
+
self.read_func = read_func
|
| 46 |
+
|
| 47 |
+
def __aiter__(self) -> "AsyncStreamIterator[_T]":
|
| 48 |
+
return self
|
| 49 |
+
|
| 50 |
+
async def __anext__(self) -> _T:
|
| 51 |
+
try:
|
| 52 |
+
rv = await self.read_func()
|
| 53 |
+
except EofStream:
|
| 54 |
+
raise StopAsyncIteration
|
| 55 |
+
if rv == b"":
|
| 56 |
+
raise StopAsyncIteration
|
| 57 |
+
return rv
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class ChunkTupleAsyncStreamIterator:
|
| 61 |
+
|
| 62 |
+
__slots__ = ("_stream",)
|
| 63 |
+
|
| 64 |
+
def __init__(self, stream: "StreamReader") -> None:
|
| 65 |
+
self._stream = stream
|
| 66 |
+
|
| 67 |
+
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
|
| 68 |
+
return self
|
| 69 |
+
|
| 70 |
+
async def __anext__(self) -> Tuple[bytes, bool]:
|
| 71 |
+
rv = await self._stream.readchunk()
|
| 72 |
+
if rv == (b"", False):
|
| 73 |
+
raise StopAsyncIteration
|
| 74 |
+
return rv
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class AsyncStreamReaderMixin:
|
| 78 |
+
|
| 79 |
+
__slots__ = ()
|
| 80 |
+
|
| 81 |
+
def __aiter__(self) -> AsyncStreamIterator[bytes]:
|
| 82 |
+
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
|
| 83 |
+
|
| 84 |
+
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
|
| 85 |
+
"""Returns an asynchronous iterator that yields chunks of size n."""
|
| 86 |
+
return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined]
|
| 87 |
+
|
| 88 |
+
def iter_any(self) -> AsyncStreamIterator[bytes]:
|
| 89 |
+
"""Yield all available data as soon as it is received."""
|
| 90 |
+
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
|
| 91 |
+
|
| 92 |
+
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
|
| 93 |
+
"""Yield chunks of data as they are received by the server.
|
| 94 |
+
|
| 95 |
+
The yielded objects are tuples
|
| 96 |
+
of (bytes, bool) as returned by the StreamReader.readchunk method.
|
| 97 |
+
"""
|
| 98 |
+
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class StreamReader(AsyncStreamReaderMixin):
|
| 102 |
+
"""An enhancement of asyncio.StreamReader.
|
| 103 |
+
|
| 104 |
+
Supports asynchronous iteration by line, chunk or as available::
|
| 105 |
+
|
| 106 |
+
async for line in reader:
|
| 107 |
+
...
|
| 108 |
+
async for chunk in reader.iter_chunked(1024):
|
| 109 |
+
...
|
| 110 |
+
async for slice in reader.iter_any():
|
| 111 |
+
...
|
| 112 |
+
|
| 113 |
+
"""
|
| 114 |
+
|
| 115 |
+
__slots__ = (
|
| 116 |
+
"_protocol",
|
| 117 |
+
"_low_water",
|
| 118 |
+
"_high_water",
|
| 119 |
+
"_loop",
|
| 120 |
+
"_size",
|
| 121 |
+
"_cursor",
|
| 122 |
+
"_http_chunk_splits",
|
| 123 |
+
"_buffer",
|
| 124 |
+
"_buffer_offset",
|
| 125 |
+
"_eof",
|
| 126 |
+
"_waiter",
|
| 127 |
+
"_eof_waiter",
|
| 128 |
+
"_exception",
|
| 129 |
+
"_timer",
|
| 130 |
+
"_eof_callbacks",
|
| 131 |
+
"_eof_counter",
|
| 132 |
+
"total_bytes",
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
def __init__(
|
| 136 |
+
self,
|
| 137 |
+
protocol: BaseProtocol,
|
| 138 |
+
limit: int,
|
| 139 |
+
*,
|
| 140 |
+
timer: Optional[BaseTimerContext] = None,
|
| 141 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 142 |
+
) -> None:
|
| 143 |
+
self._protocol = protocol
|
| 144 |
+
self._low_water = limit
|
| 145 |
+
self._high_water = limit * 2
|
| 146 |
+
if loop is None:
|
| 147 |
+
loop = asyncio.get_event_loop()
|
| 148 |
+
self._loop = loop
|
| 149 |
+
self._size = 0
|
| 150 |
+
self._cursor = 0
|
| 151 |
+
self._http_chunk_splits: Optional[List[int]] = None
|
| 152 |
+
self._buffer: Deque[bytes] = collections.deque()
|
| 153 |
+
self._buffer_offset = 0
|
| 154 |
+
self._eof = False
|
| 155 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
| 156 |
+
self._eof_waiter: Optional[asyncio.Future[None]] = None
|
| 157 |
+
self._exception: Optional[BaseException] = None
|
| 158 |
+
self._timer = TimerNoop() if timer is None else timer
|
| 159 |
+
self._eof_callbacks: List[Callable[[], None]] = []
|
| 160 |
+
self._eof_counter = 0
|
| 161 |
+
self.total_bytes = 0
|
| 162 |
+
|
| 163 |
+
def __repr__(self) -> str:
|
| 164 |
+
info = [self.__class__.__name__]
|
| 165 |
+
if self._size:
|
| 166 |
+
info.append("%d bytes" % self._size)
|
| 167 |
+
if self._eof:
|
| 168 |
+
info.append("eof")
|
| 169 |
+
if self._low_water != 2**16: # default limit
|
| 170 |
+
info.append("low=%d high=%d" % (self._low_water, self._high_water))
|
| 171 |
+
if self._waiter:
|
| 172 |
+
info.append("w=%r" % self._waiter)
|
| 173 |
+
if self._exception:
|
| 174 |
+
info.append("e=%r" % self._exception)
|
| 175 |
+
return "<%s>" % " ".join(info)
|
| 176 |
+
|
| 177 |
+
def get_read_buffer_limits(self) -> Tuple[int, int]:
|
| 178 |
+
return (self._low_water, self._high_water)
|
| 179 |
+
|
| 180 |
+
def exception(self) -> Optional[BaseException]:
|
| 181 |
+
return self._exception
|
| 182 |
+
|
| 183 |
+
def set_exception(
|
| 184 |
+
self,
|
| 185 |
+
exc: BaseException,
|
| 186 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 187 |
+
) -> None:
|
| 188 |
+
self._exception = exc
|
| 189 |
+
self._eof_callbacks.clear()
|
| 190 |
+
|
| 191 |
+
waiter = self._waiter
|
| 192 |
+
if waiter is not None:
|
| 193 |
+
self._waiter = None
|
| 194 |
+
set_exception(waiter, exc, exc_cause)
|
| 195 |
+
|
| 196 |
+
waiter = self._eof_waiter
|
| 197 |
+
if waiter is not None:
|
| 198 |
+
self._eof_waiter = None
|
| 199 |
+
set_exception(waiter, exc, exc_cause)
|
| 200 |
+
|
| 201 |
+
def on_eof(self, callback: Callable[[], None]) -> None:
|
| 202 |
+
if self._eof:
|
| 203 |
+
try:
|
| 204 |
+
callback()
|
| 205 |
+
except Exception:
|
| 206 |
+
internal_logger.exception("Exception in eof callback")
|
| 207 |
+
else:
|
| 208 |
+
self._eof_callbacks.append(callback)
|
| 209 |
+
|
| 210 |
+
def feed_eof(self) -> None:
|
| 211 |
+
self._eof = True
|
| 212 |
+
|
| 213 |
+
waiter = self._waiter
|
| 214 |
+
if waiter is not None:
|
| 215 |
+
self._waiter = None
|
| 216 |
+
set_result(waiter, None)
|
| 217 |
+
|
| 218 |
+
waiter = self._eof_waiter
|
| 219 |
+
if waiter is not None:
|
| 220 |
+
self._eof_waiter = None
|
| 221 |
+
set_result(waiter, None)
|
| 222 |
+
|
| 223 |
+
if self._protocol._reading_paused:
|
| 224 |
+
self._protocol.resume_reading()
|
| 225 |
+
|
| 226 |
+
for cb in self._eof_callbacks:
|
| 227 |
+
try:
|
| 228 |
+
cb()
|
| 229 |
+
except Exception:
|
| 230 |
+
internal_logger.exception("Exception in eof callback")
|
| 231 |
+
|
| 232 |
+
self._eof_callbacks.clear()
|
| 233 |
+
|
| 234 |
+
def is_eof(self) -> bool:
|
| 235 |
+
"""Return True if 'feed_eof' was called."""
|
| 236 |
+
return self._eof
|
| 237 |
+
|
| 238 |
+
def at_eof(self) -> bool:
|
| 239 |
+
"""Return True if the buffer is empty and 'feed_eof' was called."""
|
| 240 |
+
return self._eof and not self._buffer
|
| 241 |
+
|
| 242 |
+
async def wait_eof(self) -> None:
|
| 243 |
+
if self._eof:
|
| 244 |
+
return
|
| 245 |
+
|
| 246 |
+
assert self._eof_waiter is None
|
| 247 |
+
self._eof_waiter = self._loop.create_future()
|
| 248 |
+
try:
|
| 249 |
+
await self._eof_waiter
|
| 250 |
+
finally:
|
| 251 |
+
self._eof_waiter = None
|
| 252 |
+
|
| 253 |
+
def unread_data(self, data: bytes) -> None:
|
| 254 |
+
"""rollback reading some data from stream, inserting it to buffer head."""
|
| 255 |
+
warnings.warn(
|
| 256 |
+
"unread_data() is deprecated "
|
| 257 |
+
"and will be removed in future releases (#3260)",
|
| 258 |
+
DeprecationWarning,
|
| 259 |
+
stacklevel=2,
|
| 260 |
+
)
|
| 261 |
+
if not data:
|
| 262 |
+
return
|
| 263 |
+
|
| 264 |
+
if self._buffer_offset:
|
| 265 |
+
self._buffer[0] = self._buffer[0][self._buffer_offset :]
|
| 266 |
+
self._buffer_offset = 0
|
| 267 |
+
self._size += len(data)
|
| 268 |
+
self._cursor -= len(data)
|
| 269 |
+
self._buffer.appendleft(data)
|
| 270 |
+
self._eof_counter = 0
|
| 271 |
+
|
| 272 |
+
# TODO: size is ignored, remove the param later
|
| 273 |
+
def feed_data(self, data: bytes, size: int = 0) -> None:
|
| 274 |
+
assert not self._eof, "feed_data after feed_eof"
|
| 275 |
+
|
| 276 |
+
if not data:
|
| 277 |
+
return
|
| 278 |
+
|
| 279 |
+
data_len = len(data)
|
| 280 |
+
self._size += data_len
|
| 281 |
+
self._buffer.append(data)
|
| 282 |
+
self.total_bytes += data_len
|
| 283 |
+
|
| 284 |
+
waiter = self._waiter
|
| 285 |
+
if waiter is not None:
|
| 286 |
+
self._waiter = None
|
| 287 |
+
set_result(waiter, None)
|
| 288 |
+
|
| 289 |
+
if self._size > self._high_water and not self._protocol._reading_paused:
|
| 290 |
+
self._protocol.pause_reading()
|
| 291 |
+
|
| 292 |
+
def begin_http_chunk_receiving(self) -> None:
|
| 293 |
+
if self._http_chunk_splits is None:
|
| 294 |
+
if self.total_bytes:
|
| 295 |
+
raise RuntimeError(
|
| 296 |
+
"Called begin_http_chunk_receiving when some data was already fed"
|
| 297 |
+
)
|
| 298 |
+
self._http_chunk_splits = []
|
| 299 |
+
|
| 300 |
+
def end_http_chunk_receiving(self) -> None:
|
| 301 |
+
if self._http_chunk_splits is None:
|
| 302 |
+
raise RuntimeError(
|
| 303 |
+
"Called end_chunk_receiving without calling "
|
| 304 |
+
"begin_chunk_receiving first"
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
+
# self._http_chunk_splits contains logical byte offsets from start of
|
| 308 |
+
# the body transfer. Each offset is the offset of the end of a chunk.
|
| 309 |
+
# "Logical" means bytes, accessible for a user.
|
| 310 |
+
# If no chunks containing logical data were received, current position
|
| 311 |
+
# is difinitely zero.
|
| 312 |
+
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
|
| 313 |
+
|
| 314 |
+
if self.total_bytes == pos:
|
| 315 |
+
# We should not add empty chunks here. So we check for that.
|
| 316 |
+
# Note, when chunked + gzip is used, we can receive a chunk
|
| 317 |
+
# of compressed data, but that data may not be enough for gzip FSM
|
| 318 |
+
# to yield any uncompressed data. That's why current position may
|
| 319 |
+
# not change after receiving a chunk.
|
| 320 |
+
return
|
| 321 |
+
|
| 322 |
+
self._http_chunk_splits.append(self.total_bytes)
|
| 323 |
+
|
| 324 |
+
# wake up readchunk when end of http chunk received
|
| 325 |
+
waiter = self._waiter
|
| 326 |
+
if waiter is not None:
|
| 327 |
+
self._waiter = None
|
| 328 |
+
set_result(waiter, None)
|
| 329 |
+
|
| 330 |
+
async def _wait(self, func_name: str) -> None:
|
| 331 |
+
if not self._protocol.connected:
|
| 332 |
+
raise RuntimeError("Connection closed.")
|
| 333 |
+
|
| 334 |
+
# StreamReader uses a future to link the protocol feed_data() method
|
| 335 |
+
# to a read coroutine. Running two read coroutines at the same time
|
| 336 |
+
# would have an unexpected behaviour. It would not possible to know
|
| 337 |
+
# which coroutine would get the next data.
|
| 338 |
+
if self._waiter is not None:
|
| 339 |
+
raise RuntimeError(
|
| 340 |
+
"%s() called while another coroutine is "
|
| 341 |
+
"already waiting for incoming data" % func_name
|
| 342 |
+
)
|
| 343 |
+
|
| 344 |
+
waiter = self._waiter = self._loop.create_future()
|
| 345 |
+
try:
|
| 346 |
+
with self._timer:
|
| 347 |
+
await waiter
|
| 348 |
+
finally:
|
| 349 |
+
self._waiter = None
|
| 350 |
+
|
| 351 |
+
async def readline(self) -> bytes:
|
| 352 |
+
return await self.readuntil()
|
| 353 |
+
|
| 354 |
+
async def readuntil(self, separator: bytes = b"\n") -> bytes:
|
| 355 |
+
seplen = len(separator)
|
| 356 |
+
if seplen == 0:
|
| 357 |
+
raise ValueError("Separator should be at least one-byte string")
|
| 358 |
+
|
| 359 |
+
if self._exception is not None:
|
| 360 |
+
raise self._exception
|
| 361 |
+
|
| 362 |
+
chunk = b""
|
| 363 |
+
chunk_size = 0
|
| 364 |
+
not_enough = True
|
| 365 |
+
|
| 366 |
+
while not_enough:
|
| 367 |
+
while self._buffer and not_enough:
|
| 368 |
+
offset = self._buffer_offset
|
| 369 |
+
ichar = self._buffer[0].find(separator, offset) + 1
|
| 370 |
+
# Read from current offset to found separator or to the end.
|
| 371 |
+
data = self._read_nowait_chunk(
|
| 372 |
+
ichar - offset + seplen - 1 if ichar else -1
|
| 373 |
+
)
|
| 374 |
+
chunk += data
|
| 375 |
+
chunk_size += len(data)
|
| 376 |
+
if ichar:
|
| 377 |
+
not_enough = False
|
| 378 |
+
|
| 379 |
+
if chunk_size > self._high_water:
|
| 380 |
+
raise ValueError("Chunk too big")
|
| 381 |
+
|
| 382 |
+
if self._eof:
|
| 383 |
+
break
|
| 384 |
+
|
| 385 |
+
if not_enough:
|
| 386 |
+
await self._wait("readuntil")
|
| 387 |
+
|
| 388 |
+
return chunk
|
| 389 |
+
|
| 390 |
+
async def read(self, n: int = -1) -> bytes:
|
| 391 |
+
if self._exception is not None:
|
| 392 |
+
raise self._exception
|
| 393 |
+
|
| 394 |
+
# migration problem; with DataQueue you have to catch
|
| 395 |
+
# EofStream exception, so common way is to run payload.read() inside
|
| 396 |
+
# infinite loop. what can cause real infinite loop with StreamReader
|
| 397 |
+
# lets keep this code one major release.
|
| 398 |
+
if __debug__:
|
| 399 |
+
if self._eof and not self._buffer:
|
| 400 |
+
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
|
| 401 |
+
if self._eof_counter > 5:
|
| 402 |
+
internal_logger.warning(
|
| 403 |
+
"Multiple access to StreamReader in eof state, "
|
| 404 |
+
"might be infinite loop.",
|
| 405 |
+
stack_info=True,
|
| 406 |
+
)
|
| 407 |
+
|
| 408 |
+
if not n:
|
| 409 |
+
return b""
|
| 410 |
+
|
| 411 |
+
if n < 0:
|
| 412 |
+
# This used to just loop creating a new waiter hoping to
|
| 413 |
+
# collect everything in self._buffer, but that would
|
| 414 |
+
# deadlock if the subprocess sends more than self.limit
|
| 415 |
+
# bytes. So just call self.readany() until EOF.
|
| 416 |
+
blocks = []
|
| 417 |
+
while True:
|
| 418 |
+
block = await self.readany()
|
| 419 |
+
if not block:
|
| 420 |
+
break
|
| 421 |
+
blocks.append(block)
|
| 422 |
+
return b"".join(blocks)
|
| 423 |
+
|
| 424 |
+
# TODO: should be `if` instead of `while`
|
| 425 |
+
# because waiter maybe triggered on chunk end,
|
| 426 |
+
# without feeding any data
|
| 427 |
+
while not self._buffer and not self._eof:
|
| 428 |
+
await self._wait("read")
|
| 429 |
+
|
| 430 |
+
return self._read_nowait(n)
|
| 431 |
+
|
| 432 |
+
async def readany(self) -> bytes:
|
| 433 |
+
if self._exception is not None:
|
| 434 |
+
raise self._exception
|
| 435 |
+
|
| 436 |
+
# TODO: should be `if` instead of `while`
|
| 437 |
+
# because waiter maybe triggered on chunk end,
|
| 438 |
+
# without feeding any data
|
| 439 |
+
while not self._buffer and not self._eof:
|
| 440 |
+
await self._wait("readany")
|
| 441 |
+
|
| 442 |
+
return self._read_nowait(-1)
|
| 443 |
+
|
| 444 |
+
async def readchunk(self) -> Tuple[bytes, bool]:
|
| 445 |
+
"""Returns a tuple of (data, end_of_http_chunk).
|
| 446 |
+
|
| 447 |
+
When chunked transfer
|
| 448 |
+
encoding is used, end_of_http_chunk is a boolean indicating if the end
|
| 449 |
+
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
| 450 |
+
always False.
|
| 451 |
+
"""
|
| 452 |
+
while True:
|
| 453 |
+
if self._exception is not None:
|
| 454 |
+
raise self._exception
|
| 455 |
+
|
| 456 |
+
while self._http_chunk_splits:
|
| 457 |
+
pos = self._http_chunk_splits.pop(0)
|
| 458 |
+
if pos == self._cursor:
|
| 459 |
+
return (b"", True)
|
| 460 |
+
if pos > self._cursor:
|
| 461 |
+
return (self._read_nowait(pos - self._cursor), True)
|
| 462 |
+
internal_logger.warning(
|
| 463 |
+
"Skipping HTTP chunk end due to data "
|
| 464 |
+
"consumption beyond chunk boundary"
|
| 465 |
+
)
|
| 466 |
+
|
| 467 |
+
if self._buffer:
|
| 468 |
+
return (self._read_nowait_chunk(-1), False)
|
| 469 |
+
# return (self._read_nowait(-1), False)
|
| 470 |
+
|
| 471 |
+
if self._eof:
|
| 472 |
+
# Special case for signifying EOF.
|
| 473 |
+
# (b'', True) is not a final return value actually.
|
| 474 |
+
return (b"", False)
|
| 475 |
+
|
| 476 |
+
await self._wait("readchunk")
|
| 477 |
+
|
| 478 |
+
async def readexactly(self, n: int) -> bytes:
|
| 479 |
+
if self._exception is not None:
|
| 480 |
+
raise self._exception
|
| 481 |
+
|
| 482 |
+
blocks: List[bytes] = []
|
| 483 |
+
while n > 0:
|
| 484 |
+
block = await self.read(n)
|
| 485 |
+
if not block:
|
| 486 |
+
partial = b"".join(blocks)
|
| 487 |
+
raise asyncio.IncompleteReadError(partial, len(partial) + n)
|
| 488 |
+
blocks.append(block)
|
| 489 |
+
n -= len(block)
|
| 490 |
+
|
| 491 |
+
return b"".join(blocks)
|
| 492 |
+
|
| 493 |
+
def read_nowait(self, n: int = -1) -> bytes:
|
| 494 |
+
# default was changed to be consistent with .read(-1)
|
| 495 |
+
#
|
| 496 |
+
# I believe the most users don't know about the method and
|
| 497 |
+
# they are not affected.
|
| 498 |
+
if self._exception is not None:
|
| 499 |
+
raise self._exception
|
| 500 |
+
|
| 501 |
+
if self._waiter and not self._waiter.done():
|
| 502 |
+
raise RuntimeError(
|
| 503 |
+
"Called while some coroutine is waiting for incoming data."
|
| 504 |
+
)
|
| 505 |
+
|
| 506 |
+
return self._read_nowait(n)
|
| 507 |
+
|
| 508 |
+
def _read_nowait_chunk(self, n: int) -> bytes:
|
| 509 |
+
first_buffer = self._buffer[0]
|
| 510 |
+
offset = self._buffer_offset
|
| 511 |
+
if n != -1 and len(first_buffer) - offset > n:
|
| 512 |
+
data = first_buffer[offset : offset + n]
|
| 513 |
+
self._buffer_offset += n
|
| 514 |
+
|
| 515 |
+
elif offset:
|
| 516 |
+
self._buffer.popleft()
|
| 517 |
+
data = first_buffer[offset:]
|
| 518 |
+
self._buffer_offset = 0
|
| 519 |
+
|
| 520 |
+
else:
|
| 521 |
+
data = self._buffer.popleft()
|
| 522 |
+
|
| 523 |
+
data_len = len(data)
|
| 524 |
+
self._size -= data_len
|
| 525 |
+
self._cursor += data_len
|
| 526 |
+
|
| 527 |
+
chunk_splits = self._http_chunk_splits
|
| 528 |
+
# Prevent memory leak: drop useless chunk splits
|
| 529 |
+
while chunk_splits and chunk_splits[0] < self._cursor:
|
| 530 |
+
chunk_splits.pop(0)
|
| 531 |
+
|
| 532 |
+
if self._size < self._low_water and self._protocol._reading_paused:
|
| 533 |
+
self._protocol.resume_reading()
|
| 534 |
+
return data
|
| 535 |
+
|
| 536 |
+
def _read_nowait(self, n: int) -> bytes:
|
| 537 |
+
"""Read not more than n bytes, or whole buffer if n == -1"""
|
| 538 |
+
self._timer.assert_timeout()
|
| 539 |
+
|
| 540 |
+
chunks = []
|
| 541 |
+
while self._buffer:
|
| 542 |
+
chunk = self._read_nowait_chunk(n)
|
| 543 |
+
chunks.append(chunk)
|
| 544 |
+
if n != -1:
|
| 545 |
+
n -= len(chunk)
|
| 546 |
+
if n == 0:
|
| 547 |
+
break
|
| 548 |
+
|
| 549 |
+
return b"".join(chunks) if chunks else b""
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
|
| 553 |
+
|
| 554 |
+
__slots__ = ("_read_eof_chunk",)
|
| 555 |
+
|
| 556 |
+
def __init__(self) -> None:
|
| 557 |
+
self._read_eof_chunk = False
|
| 558 |
+
|
| 559 |
+
def __repr__(self) -> str:
|
| 560 |
+
return "<%s>" % self.__class__.__name__
|
| 561 |
+
|
| 562 |
+
def exception(self) -> Optional[BaseException]:
|
| 563 |
+
return None
|
| 564 |
+
|
| 565 |
+
def set_exception(
|
| 566 |
+
self,
|
| 567 |
+
exc: BaseException,
|
| 568 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 569 |
+
) -> None:
|
| 570 |
+
pass
|
| 571 |
+
|
| 572 |
+
def on_eof(self, callback: Callable[[], None]) -> None:
|
| 573 |
+
try:
|
| 574 |
+
callback()
|
| 575 |
+
except Exception:
|
| 576 |
+
internal_logger.exception("Exception in eof callback")
|
| 577 |
+
|
| 578 |
+
def feed_eof(self) -> None:
|
| 579 |
+
pass
|
| 580 |
+
|
| 581 |
+
def is_eof(self) -> bool:
|
| 582 |
+
return True
|
| 583 |
+
|
| 584 |
+
def at_eof(self) -> bool:
|
| 585 |
+
return True
|
| 586 |
+
|
| 587 |
+
async def wait_eof(self) -> None:
|
| 588 |
+
return
|
| 589 |
+
|
| 590 |
+
def feed_data(self, data: bytes, n: int = 0) -> None:
|
| 591 |
+
pass
|
| 592 |
+
|
| 593 |
+
async def readline(self) -> bytes:
|
| 594 |
+
return b""
|
| 595 |
+
|
| 596 |
+
async def read(self, n: int = -1) -> bytes:
|
| 597 |
+
return b""
|
| 598 |
+
|
| 599 |
+
# TODO add async def readuntil
|
| 600 |
+
|
| 601 |
+
async def readany(self) -> bytes:
|
| 602 |
+
return b""
|
| 603 |
+
|
| 604 |
+
async def readchunk(self) -> Tuple[bytes, bool]:
|
| 605 |
+
if not self._read_eof_chunk:
|
| 606 |
+
self._read_eof_chunk = True
|
| 607 |
+
return (b"", False)
|
| 608 |
+
|
| 609 |
+
return (b"", True)
|
| 610 |
+
|
| 611 |
+
async def readexactly(self, n: int) -> bytes:
|
| 612 |
+
raise asyncio.IncompleteReadError(b"", n)
|
| 613 |
+
|
| 614 |
+
def read_nowait(self, n: int = -1) -> bytes:
|
| 615 |
+
return b""
|
| 616 |
+
|
| 617 |
+
|
| 618 |
+
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
|
| 619 |
+
|
| 620 |
+
|
| 621 |
+
class DataQueue(Generic[_T]):
|
| 622 |
+
"""DataQueue is a general-purpose blocking queue with one reader."""
|
| 623 |
+
|
| 624 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 625 |
+
self._loop = loop
|
| 626 |
+
self._eof = False
|
| 627 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
| 628 |
+
self._exception: Optional[BaseException] = None
|
| 629 |
+
self._buffer: Deque[Tuple[_T, int]] = collections.deque()
|
| 630 |
+
|
| 631 |
+
def __len__(self) -> int:
|
| 632 |
+
return len(self._buffer)
|
| 633 |
+
|
| 634 |
+
def is_eof(self) -> bool:
|
| 635 |
+
return self._eof
|
| 636 |
+
|
| 637 |
+
def at_eof(self) -> bool:
|
| 638 |
+
return self._eof and not self._buffer
|
| 639 |
+
|
| 640 |
+
def exception(self) -> Optional[BaseException]:
|
| 641 |
+
return self._exception
|
| 642 |
+
|
| 643 |
+
def set_exception(
|
| 644 |
+
self,
|
| 645 |
+
exc: BaseException,
|
| 646 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 647 |
+
) -> None:
|
| 648 |
+
self._eof = True
|
| 649 |
+
self._exception = exc
|
| 650 |
+
if (waiter := self._waiter) is not None:
|
| 651 |
+
self._waiter = None
|
| 652 |
+
set_exception(waiter, exc, exc_cause)
|
| 653 |
+
|
| 654 |
+
def feed_data(self, data: _T, size: int = 0) -> None:
|
| 655 |
+
self._buffer.append((data, size))
|
| 656 |
+
if (waiter := self._waiter) is not None:
|
| 657 |
+
self._waiter = None
|
| 658 |
+
set_result(waiter, None)
|
| 659 |
+
|
| 660 |
+
def feed_eof(self) -> None:
|
| 661 |
+
self._eof = True
|
| 662 |
+
if (waiter := self._waiter) is not None:
|
| 663 |
+
self._waiter = None
|
| 664 |
+
set_result(waiter, None)
|
| 665 |
+
|
| 666 |
+
async def read(self) -> _T:
|
| 667 |
+
if not self._buffer and not self._eof:
|
| 668 |
+
assert not self._waiter
|
| 669 |
+
self._waiter = self._loop.create_future()
|
| 670 |
+
try:
|
| 671 |
+
await self._waiter
|
| 672 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 673 |
+
self._waiter = None
|
| 674 |
+
raise
|
| 675 |
+
if self._buffer:
|
| 676 |
+
data, _ = self._buffer.popleft()
|
| 677 |
+
return data
|
| 678 |
+
if self._exception is not None:
|
| 679 |
+
raise self._exception
|
| 680 |
+
raise EofStream
|
| 681 |
+
|
| 682 |
+
def __aiter__(self) -> AsyncStreamIterator[_T]:
|
| 683 |
+
return AsyncStreamIterator(self.read)
|
| 684 |
+
|
| 685 |
+
|
| 686 |
+
class FlowControlDataQueue(DataQueue[_T]):
|
| 687 |
+
"""FlowControlDataQueue resumes and pauses an underlying stream.
|
| 688 |
+
|
| 689 |
+
It is a destination for parsed data.
|
| 690 |
+
|
| 691 |
+
This class is deprecated and will be removed in version 4.0.
|
| 692 |
+
"""
|
| 693 |
+
|
| 694 |
+
def __init__(
|
| 695 |
+
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
| 696 |
+
) -> None:
|
| 697 |
+
super().__init__(loop=loop)
|
| 698 |
+
self._size = 0
|
| 699 |
+
self._protocol = protocol
|
| 700 |
+
self._limit = limit * 2
|
| 701 |
+
|
| 702 |
+
def feed_data(self, data: _T, size: int = 0) -> None:
|
| 703 |
+
super().feed_data(data, size)
|
| 704 |
+
self._size += size
|
| 705 |
+
|
| 706 |
+
if self._size > self._limit and not self._protocol._reading_paused:
|
| 707 |
+
self._protocol.pause_reading()
|
| 708 |
+
|
| 709 |
+
async def read(self) -> _T:
|
| 710 |
+
if not self._buffer and not self._eof:
|
| 711 |
+
assert not self._waiter
|
| 712 |
+
self._waiter = self._loop.create_future()
|
| 713 |
+
try:
|
| 714 |
+
await self._waiter
|
| 715 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 716 |
+
self._waiter = None
|
| 717 |
+
raise
|
| 718 |
+
if self._buffer:
|
| 719 |
+
data, size = self._buffer.popleft()
|
| 720 |
+
self._size -= size
|
| 721 |
+
if self._size < self._limit and self._protocol._reading_paused:
|
| 722 |
+
self._protocol.resume_reading()
|
| 723 |
+
return data
|
| 724 |
+
if self._exception is not None:
|
| 725 |
+
raise self._exception
|
| 726 |
+
raise EofStream
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/tcp_helpers.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper methods to tune a TCP connection"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import socket
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from typing import Optional # noqa
|
| 7 |
+
|
| 8 |
+
__all__ = ("tcp_keepalive", "tcp_nodelay")
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
if hasattr(socket, "SO_KEEPALIVE"):
|
| 12 |
+
|
| 13 |
+
def tcp_keepalive(transport: asyncio.Transport) -> None:
|
| 14 |
+
sock = transport.get_extra_info("socket")
|
| 15 |
+
if sock is not None:
|
| 16 |
+
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
| 17 |
+
|
| 18 |
+
else:
|
| 19 |
+
|
| 20 |
+
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
|
| 21 |
+
pass
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
|
| 25 |
+
sock = transport.get_extra_info("socket")
|
| 26 |
+
|
| 27 |
+
if sock is None:
|
| 28 |
+
return
|
| 29 |
+
|
| 30 |
+
if sock.family not in (socket.AF_INET, socket.AF_INET6):
|
| 31 |
+
return
|
| 32 |
+
|
| 33 |
+
value = bool(value)
|
| 34 |
+
|
| 35 |
+
# socket may be closed already, on windows OSError get raised
|
| 36 |
+
with suppress(OSError):
|
| 37 |
+
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/test_utils.py
ADDED
|
@@ -0,0 +1,770 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utilities shared by tests."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import contextlib
|
| 5 |
+
import gc
|
| 6 |
+
import inspect
|
| 7 |
+
import ipaddress
|
| 8 |
+
import os
|
| 9 |
+
import socket
|
| 10 |
+
import sys
|
| 11 |
+
import warnings
|
| 12 |
+
from abc import ABC, abstractmethod
|
| 13 |
+
from types import TracebackType
|
| 14 |
+
from typing import (
|
| 15 |
+
TYPE_CHECKING,
|
| 16 |
+
Any,
|
| 17 |
+
Callable,
|
| 18 |
+
Generic,
|
| 19 |
+
Iterator,
|
| 20 |
+
List,
|
| 21 |
+
Optional,
|
| 22 |
+
Type,
|
| 23 |
+
TypeVar,
|
| 24 |
+
cast,
|
| 25 |
+
overload,
|
| 26 |
+
)
|
| 27 |
+
from unittest import IsolatedAsyncioTestCase, mock
|
| 28 |
+
|
| 29 |
+
from aiosignal import Signal
|
| 30 |
+
from multidict import CIMultiDict, CIMultiDictProxy
|
| 31 |
+
from yarl import URL
|
| 32 |
+
|
| 33 |
+
import aiohttp
|
| 34 |
+
from aiohttp.client import (
|
| 35 |
+
_RequestContextManager,
|
| 36 |
+
_RequestOptions,
|
| 37 |
+
_WSRequestContextManager,
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
from . import ClientSession, hdrs
|
| 41 |
+
from .abc import AbstractCookieJar
|
| 42 |
+
from .client_reqrep import ClientResponse
|
| 43 |
+
from .client_ws import ClientWebSocketResponse
|
| 44 |
+
from .helpers import sentinel
|
| 45 |
+
from .http import HttpVersion, RawRequestMessage
|
| 46 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 47 |
+
from .typedefs import StrOrURL
|
| 48 |
+
from .web import (
|
| 49 |
+
Application,
|
| 50 |
+
AppRunner,
|
| 51 |
+
BaseRequest,
|
| 52 |
+
BaseRunner,
|
| 53 |
+
Request,
|
| 54 |
+
Server,
|
| 55 |
+
ServerRunner,
|
| 56 |
+
SockSite,
|
| 57 |
+
UrlMappingMatchInfo,
|
| 58 |
+
)
|
| 59 |
+
from .web_protocol import _RequestHandler
|
| 60 |
+
|
| 61 |
+
if TYPE_CHECKING:
|
| 62 |
+
from ssl import SSLContext
|
| 63 |
+
else:
|
| 64 |
+
SSLContext = None
|
| 65 |
+
|
| 66 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 67 |
+
from typing import Unpack
|
| 68 |
+
|
| 69 |
+
if sys.version_info >= (3, 11):
|
| 70 |
+
from typing import Self
|
| 71 |
+
else:
|
| 72 |
+
Self = Any
|
| 73 |
+
|
| 74 |
+
_ApplicationNone = TypeVar("_ApplicationNone", Application, None)
|
| 75 |
+
_Request = TypeVar("_Request", bound=BaseRequest)
|
| 76 |
+
|
| 77 |
+
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def get_unused_port_socket(
|
| 81 |
+
host: str, family: socket.AddressFamily = socket.AF_INET
|
| 82 |
+
) -> socket.socket:
|
| 83 |
+
return get_port_socket(host, 0, family)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def get_port_socket(
|
| 87 |
+
host: str, port: int, family: socket.AddressFamily
|
| 88 |
+
) -> socket.socket:
|
| 89 |
+
s = socket.socket(family, socket.SOCK_STREAM)
|
| 90 |
+
if REUSE_ADDRESS:
|
| 91 |
+
# Windows has different semantics for SO_REUSEADDR,
|
| 92 |
+
# so don't set it. Ref:
|
| 93 |
+
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
|
| 94 |
+
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
| 95 |
+
s.bind((host, port))
|
| 96 |
+
return s
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def unused_port() -> int:
|
| 100 |
+
"""Return a port that is unused on the current host."""
|
| 101 |
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
| 102 |
+
s.bind(("127.0.0.1", 0))
|
| 103 |
+
return cast(int, s.getsockname()[1])
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class BaseTestServer(ABC):
|
| 107 |
+
__test__ = False
|
| 108 |
+
|
| 109 |
+
def __init__(
|
| 110 |
+
self,
|
| 111 |
+
*,
|
| 112 |
+
scheme: str = "",
|
| 113 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 114 |
+
host: str = "127.0.0.1",
|
| 115 |
+
port: Optional[int] = None,
|
| 116 |
+
skip_url_asserts: bool = False,
|
| 117 |
+
socket_factory: Callable[
|
| 118 |
+
[str, int, socket.AddressFamily], socket.socket
|
| 119 |
+
] = get_port_socket,
|
| 120 |
+
**kwargs: Any,
|
| 121 |
+
) -> None:
|
| 122 |
+
self._loop = loop
|
| 123 |
+
self.runner: Optional[BaseRunner] = None
|
| 124 |
+
self._root: Optional[URL] = None
|
| 125 |
+
self.host = host
|
| 126 |
+
self.port = port
|
| 127 |
+
self._closed = False
|
| 128 |
+
self.scheme = scheme
|
| 129 |
+
self.skip_url_asserts = skip_url_asserts
|
| 130 |
+
self.socket_factory = socket_factory
|
| 131 |
+
|
| 132 |
+
async def start_server(
|
| 133 |
+
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
|
| 134 |
+
) -> None:
|
| 135 |
+
if self.runner:
|
| 136 |
+
return
|
| 137 |
+
self._loop = loop
|
| 138 |
+
self._ssl = kwargs.pop("ssl", None)
|
| 139 |
+
self.runner = await self._make_runner(handler_cancellation=True, **kwargs)
|
| 140 |
+
await self.runner.setup()
|
| 141 |
+
if not self.port:
|
| 142 |
+
self.port = 0
|
| 143 |
+
absolute_host = self.host
|
| 144 |
+
try:
|
| 145 |
+
version = ipaddress.ip_address(self.host).version
|
| 146 |
+
except ValueError:
|
| 147 |
+
version = 4
|
| 148 |
+
if version == 6:
|
| 149 |
+
absolute_host = f"[{self.host}]"
|
| 150 |
+
family = socket.AF_INET6 if version == 6 else socket.AF_INET
|
| 151 |
+
_sock = self.socket_factory(self.host, self.port, family)
|
| 152 |
+
self.host, self.port = _sock.getsockname()[:2]
|
| 153 |
+
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
|
| 154 |
+
await site.start()
|
| 155 |
+
server = site._server
|
| 156 |
+
assert server is not None
|
| 157 |
+
sockets = server.sockets # type: ignore[attr-defined]
|
| 158 |
+
assert sockets is not None
|
| 159 |
+
self.port = sockets[0].getsockname()[1]
|
| 160 |
+
if not self.scheme:
|
| 161 |
+
self.scheme = "https" if self._ssl else "http"
|
| 162 |
+
self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}")
|
| 163 |
+
|
| 164 |
+
@abstractmethod # pragma: no cover
|
| 165 |
+
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
| 166 |
+
pass
|
| 167 |
+
|
| 168 |
+
def make_url(self, path: StrOrURL) -> URL:
|
| 169 |
+
assert self._root is not None
|
| 170 |
+
url = URL(path)
|
| 171 |
+
if not self.skip_url_asserts:
|
| 172 |
+
assert not url.absolute
|
| 173 |
+
return self._root.join(url)
|
| 174 |
+
else:
|
| 175 |
+
return URL(str(self._root) + str(path))
|
| 176 |
+
|
| 177 |
+
@property
|
| 178 |
+
def started(self) -> bool:
|
| 179 |
+
return self.runner is not None
|
| 180 |
+
|
| 181 |
+
@property
|
| 182 |
+
def closed(self) -> bool:
|
| 183 |
+
return self._closed
|
| 184 |
+
|
| 185 |
+
@property
|
| 186 |
+
def handler(self) -> Server:
|
| 187 |
+
# for backward compatibility
|
| 188 |
+
# web.Server instance
|
| 189 |
+
runner = self.runner
|
| 190 |
+
assert runner is not None
|
| 191 |
+
assert runner.server is not None
|
| 192 |
+
return runner.server
|
| 193 |
+
|
| 194 |
+
async def close(self) -> None:
|
| 195 |
+
"""Close all fixtures created by the test client.
|
| 196 |
+
|
| 197 |
+
After that point, the TestClient is no longer usable.
|
| 198 |
+
|
| 199 |
+
This is an idempotent function: running close multiple times
|
| 200 |
+
will not have any additional effects.
|
| 201 |
+
|
| 202 |
+
close is also run when the object is garbage collected, and on
|
| 203 |
+
exit when used as a context manager.
|
| 204 |
+
|
| 205 |
+
"""
|
| 206 |
+
if self.started and not self.closed:
|
| 207 |
+
assert self.runner is not None
|
| 208 |
+
await self.runner.cleanup()
|
| 209 |
+
self._root = None
|
| 210 |
+
self.port = None
|
| 211 |
+
self._closed = True
|
| 212 |
+
|
| 213 |
+
def __enter__(self) -> None:
|
| 214 |
+
raise TypeError("Use async with instead")
|
| 215 |
+
|
| 216 |
+
def __exit__(
|
| 217 |
+
self,
|
| 218 |
+
exc_type: Optional[Type[BaseException]],
|
| 219 |
+
exc_value: Optional[BaseException],
|
| 220 |
+
traceback: Optional[TracebackType],
|
| 221 |
+
) -> None:
|
| 222 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
| 223 |
+
pass # pragma: no cover
|
| 224 |
+
|
| 225 |
+
async def __aenter__(self) -> "BaseTestServer":
|
| 226 |
+
await self.start_server(loop=self._loop)
|
| 227 |
+
return self
|
| 228 |
+
|
| 229 |
+
async def __aexit__(
|
| 230 |
+
self,
|
| 231 |
+
exc_type: Optional[Type[BaseException]],
|
| 232 |
+
exc_value: Optional[BaseException],
|
| 233 |
+
traceback: Optional[TracebackType],
|
| 234 |
+
) -> None:
|
| 235 |
+
await self.close()
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
class TestServer(BaseTestServer):
|
| 239 |
+
def __init__(
|
| 240 |
+
self,
|
| 241 |
+
app: Application,
|
| 242 |
+
*,
|
| 243 |
+
scheme: str = "",
|
| 244 |
+
host: str = "127.0.0.1",
|
| 245 |
+
port: Optional[int] = None,
|
| 246 |
+
**kwargs: Any,
|
| 247 |
+
):
|
| 248 |
+
self.app = app
|
| 249 |
+
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
| 250 |
+
|
| 251 |
+
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
| 252 |
+
return AppRunner(self.app, **kwargs)
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
class RawTestServer(BaseTestServer):
|
| 256 |
+
def __init__(
|
| 257 |
+
self,
|
| 258 |
+
handler: _RequestHandler,
|
| 259 |
+
*,
|
| 260 |
+
scheme: str = "",
|
| 261 |
+
host: str = "127.0.0.1",
|
| 262 |
+
port: Optional[int] = None,
|
| 263 |
+
**kwargs: Any,
|
| 264 |
+
) -> None:
|
| 265 |
+
self._handler = handler
|
| 266 |
+
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
| 267 |
+
|
| 268 |
+
async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
|
| 269 |
+
srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
|
| 270 |
+
return ServerRunner(srv, debug=debug, **kwargs)
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
class TestClient(Generic[_Request, _ApplicationNone]):
|
| 274 |
+
"""
|
| 275 |
+
A test client implementation.
|
| 276 |
+
|
| 277 |
+
To write functional tests for aiohttp based servers.
|
| 278 |
+
|
| 279 |
+
"""
|
| 280 |
+
|
| 281 |
+
__test__ = False
|
| 282 |
+
|
| 283 |
+
@overload
|
| 284 |
+
def __init__(
|
| 285 |
+
self: "TestClient[Request, Application]",
|
| 286 |
+
server: TestServer,
|
| 287 |
+
*,
|
| 288 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 289 |
+
**kwargs: Any,
|
| 290 |
+
) -> None: ...
|
| 291 |
+
@overload
|
| 292 |
+
def __init__(
|
| 293 |
+
self: "TestClient[_Request, None]",
|
| 294 |
+
server: BaseTestServer,
|
| 295 |
+
*,
|
| 296 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 297 |
+
**kwargs: Any,
|
| 298 |
+
) -> None: ...
|
| 299 |
+
def __init__(
|
| 300 |
+
self,
|
| 301 |
+
server: BaseTestServer,
|
| 302 |
+
*,
|
| 303 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 304 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 305 |
+
**kwargs: Any,
|
| 306 |
+
) -> None:
|
| 307 |
+
if not isinstance(server, BaseTestServer):
|
| 308 |
+
raise TypeError(
|
| 309 |
+
"server must be TestServer instance, found type: %r" % type(server)
|
| 310 |
+
)
|
| 311 |
+
self._server = server
|
| 312 |
+
self._loop = loop
|
| 313 |
+
if cookie_jar is None:
|
| 314 |
+
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
|
| 315 |
+
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
|
| 316 |
+
self._session._retry_connection = False
|
| 317 |
+
self._closed = False
|
| 318 |
+
self._responses: List[ClientResponse] = []
|
| 319 |
+
self._websockets: List[ClientWebSocketResponse] = []
|
| 320 |
+
|
| 321 |
+
async def start_server(self) -> None:
|
| 322 |
+
await self._server.start_server(loop=self._loop)
|
| 323 |
+
|
| 324 |
+
@property
|
| 325 |
+
def host(self) -> str:
|
| 326 |
+
return self._server.host
|
| 327 |
+
|
| 328 |
+
@property
|
| 329 |
+
def port(self) -> Optional[int]:
|
| 330 |
+
return self._server.port
|
| 331 |
+
|
| 332 |
+
@property
|
| 333 |
+
def server(self) -> BaseTestServer:
|
| 334 |
+
return self._server
|
| 335 |
+
|
| 336 |
+
@property
|
| 337 |
+
def app(self) -> _ApplicationNone:
|
| 338 |
+
return getattr(self._server, "app", None) # type: ignore[return-value]
|
| 339 |
+
|
| 340 |
+
@property
|
| 341 |
+
def session(self) -> ClientSession:
|
| 342 |
+
"""An internal aiohttp.ClientSession.
|
| 343 |
+
|
| 344 |
+
Unlike the methods on the TestClient, client session requests
|
| 345 |
+
do not automatically include the host in the url queried, and
|
| 346 |
+
will require an absolute path to the resource.
|
| 347 |
+
|
| 348 |
+
"""
|
| 349 |
+
return self._session
|
| 350 |
+
|
| 351 |
+
def make_url(self, path: StrOrURL) -> URL:
|
| 352 |
+
return self._server.make_url(path)
|
| 353 |
+
|
| 354 |
+
async def _request(
|
| 355 |
+
self, method: str, path: StrOrURL, **kwargs: Any
|
| 356 |
+
) -> ClientResponse:
|
| 357 |
+
resp = await self._session.request(method, self.make_url(path), **kwargs)
|
| 358 |
+
# save it to close later
|
| 359 |
+
self._responses.append(resp)
|
| 360 |
+
return resp
|
| 361 |
+
|
| 362 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 363 |
+
|
| 364 |
+
def request(
|
| 365 |
+
self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions]
|
| 366 |
+
) -> _RequestContextManager: ...
|
| 367 |
+
|
| 368 |
+
def get(
|
| 369 |
+
self,
|
| 370 |
+
path: StrOrURL,
|
| 371 |
+
**kwargs: Unpack[_RequestOptions],
|
| 372 |
+
) -> _RequestContextManager: ...
|
| 373 |
+
|
| 374 |
+
def options(
|
| 375 |
+
self,
|
| 376 |
+
path: StrOrURL,
|
| 377 |
+
**kwargs: Unpack[_RequestOptions],
|
| 378 |
+
) -> _RequestContextManager: ...
|
| 379 |
+
|
| 380 |
+
def head(
|
| 381 |
+
self,
|
| 382 |
+
path: StrOrURL,
|
| 383 |
+
**kwargs: Unpack[_RequestOptions],
|
| 384 |
+
) -> _RequestContextManager: ...
|
| 385 |
+
|
| 386 |
+
def post(
|
| 387 |
+
self,
|
| 388 |
+
path: StrOrURL,
|
| 389 |
+
**kwargs: Unpack[_RequestOptions],
|
| 390 |
+
) -> _RequestContextManager: ...
|
| 391 |
+
|
| 392 |
+
def put(
|
| 393 |
+
self,
|
| 394 |
+
path: StrOrURL,
|
| 395 |
+
**kwargs: Unpack[_RequestOptions],
|
| 396 |
+
) -> _RequestContextManager: ...
|
| 397 |
+
|
| 398 |
+
def patch(
|
| 399 |
+
self,
|
| 400 |
+
path: StrOrURL,
|
| 401 |
+
**kwargs: Unpack[_RequestOptions],
|
| 402 |
+
) -> _RequestContextManager: ...
|
| 403 |
+
|
| 404 |
+
def delete(
|
| 405 |
+
self,
|
| 406 |
+
path: StrOrURL,
|
| 407 |
+
**kwargs: Unpack[_RequestOptions],
|
| 408 |
+
) -> _RequestContextManager: ...
|
| 409 |
+
|
| 410 |
+
else:
|
| 411 |
+
|
| 412 |
+
def request(
|
| 413 |
+
self, method: str, path: StrOrURL, **kwargs: Any
|
| 414 |
+
) -> _RequestContextManager:
|
| 415 |
+
"""Routes a request to tested http server.
|
| 416 |
+
|
| 417 |
+
The interface is identical to aiohttp.ClientSession.request,
|
| 418 |
+
except the loop kwarg is overridden by the instance used by the
|
| 419 |
+
test server.
|
| 420 |
+
|
| 421 |
+
"""
|
| 422 |
+
return _RequestContextManager(self._request(method, path, **kwargs))
|
| 423 |
+
|
| 424 |
+
def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 425 |
+
"""Perform an HTTP GET request."""
|
| 426 |
+
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
|
| 427 |
+
|
| 428 |
+
def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 429 |
+
"""Perform an HTTP POST request."""
|
| 430 |
+
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
|
| 431 |
+
|
| 432 |
+
def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 433 |
+
"""Perform an HTTP OPTIONS request."""
|
| 434 |
+
return _RequestContextManager(
|
| 435 |
+
self._request(hdrs.METH_OPTIONS, path, **kwargs)
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 439 |
+
"""Perform an HTTP HEAD request."""
|
| 440 |
+
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
|
| 441 |
+
|
| 442 |
+
def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 443 |
+
"""Perform an HTTP PUT request."""
|
| 444 |
+
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
|
| 445 |
+
|
| 446 |
+
def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 447 |
+
"""Perform an HTTP PATCH request."""
|
| 448 |
+
return _RequestContextManager(
|
| 449 |
+
self._request(hdrs.METH_PATCH, path, **kwargs)
|
| 450 |
+
)
|
| 451 |
+
|
| 452 |
+
def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 453 |
+
"""Perform an HTTP PATCH request."""
|
| 454 |
+
return _RequestContextManager(
|
| 455 |
+
self._request(hdrs.METH_DELETE, path, **kwargs)
|
| 456 |
+
)
|
| 457 |
+
|
| 458 |
+
def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager:
|
| 459 |
+
"""Initiate websocket connection.
|
| 460 |
+
|
| 461 |
+
The api corresponds to aiohttp.ClientSession.ws_connect.
|
| 462 |
+
|
| 463 |
+
"""
|
| 464 |
+
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
|
| 465 |
+
|
| 466 |
+
async def _ws_connect(
|
| 467 |
+
self, path: StrOrURL, **kwargs: Any
|
| 468 |
+
) -> ClientWebSocketResponse:
|
| 469 |
+
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
|
| 470 |
+
self._websockets.append(ws)
|
| 471 |
+
return ws
|
| 472 |
+
|
| 473 |
+
async def close(self) -> None:
|
| 474 |
+
"""Close all fixtures created by the test client.
|
| 475 |
+
|
| 476 |
+
After that point, the TestClient is no longer usable.
|
| 477 |
+
|
| 478 |
+
This is an idempotent function: running close multiple times
|
| 479 |
+
will not have any additional effects.
|
| 480 |
+
|
| 481 |
+
close is also run on exit when used as a(n) (asynchronous)
|
| 482 |
+
context manager.
|
| 483 |
+
|
| 484 |
+
"""
|
| 485 |
+
if not self._closed:
|
| 486 |
+
for resp in self._responses:
|
| 487 |
+
resp.close()
|
| 488 |
+
for ws in self._websockets:
|
| 489 |
+
await ws.close()
|
| 490 |
+
await self._session.close()
|
| 491 |
+
await self._server.close()
|
| 492 |
+
self._closed = True
|
| 493 |
+
|
| 494 |
+
def __enter__(self) -> None:
|
| 495 |
+
raise TypeError("Use async with instead")
|
| 496 |
+
|
| 497 |
+
def __exit__(
|
| 498 |
+
self,
|
| 499 |
+
exc_type: Optional[Type[BaseException]],
|
| 500 |
+
exc: Optional[BaseException],
|
| 501 |
+
tb: Optional[TracebackType],
|
| 502 |
+
) -> None:
|
| 503 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
| 504 |
+
pass # pragma: no cover
|
| 505 |
+
|
| 506 |
+
async def __aenter__(self) -> Self:
|
| 507 |
+
await self.start_server()
|
| 508 |
+
return self
|
| 509 |
+
|
| 510 |
+
async def __aexit__(
|
| 511 |
+
self,
|
| 512 |
+
exc_type: Optional[Type[BaseException]],
|
| 513 |
+
exc: Optional[BaseException],
|
| 514 |
+
tb: Optional[TracebackType],
|
| 515 |
+
) -> None:
|
| 516 |
+
await self.close()
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
class AioHTTPTestCase(IsolatedAsyncioTestCase):
|
| 520 |
+
"""A base class to allow for unittest web applications using aiohttp.
|
| 521 |
+
|
| 522 |
+
Provides the following:
|
| 523 |
+
|
| 524 |
+
* self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
|
| 525 |
+
* self.loop (asyncio.BaseEventLoop): the event loop in which the
|
| 526 |
+
application and server are running.
|
| 527 |
+
* self.app (aiohttp.web.Application): the application returned by
|
| 528 |
+
self.get_application()
|
| 529 |
+
|
| 530 |
+
Note that the TestClient's methods are asynchronous: you have to
|
| 531 |
+
execute function on the test client using asynchronous methods.
|
| 532 |
+
"""
|
| 533 |
+
|
| 534 |
+
async def get_application(self) -> Application:
|
| 535 |
+
"""Get application.
|
| 536 |
+
|
| 537 |
+
This method should be overridden
|
| 538 |
+
to return the aiohttp.web.Application
|
| 539 |
+
object to test.
|
| 540 |
+
"""
|
| 541 |
+
return self.get_app()
|
| 542 |
+
|
| 543 |
+
def get_app(self) -> Application:
|
| 544 |
+
"""Obsolete method used to constructing web application.
|
| 545 |
+
|
| 546 |
+
Use .get_application() coroutine instead.
|
| 547 |
+
"""
|
| 548 |
+
raise RuntimeError("Did you forget to define get_application()?")
|
| 549 |
+
|
| 550 |
+
async def asyncSetUp(self) -> None:
|
| 551 |
+
self.loop = asyncio.get_running_loop()
|
| 552 |
+
return await self.setUpAsync()
|
| 553 |
+
|
| 554 |
+
async def setUpAsync(self) -> None:
|
| 555 |
+
self.app = await self.get_application()
|
| 556 |
+
self.server = await self.get_server(self.app)
|
| 557 |
+
self.client = await self.get_client(self.server)
|
| 558 |
+
|
| 559 |
+
await self.client.start_server()
|
| 560 |
+
|
| 561 |
+
async def asyncTearDown(self) -> None:
|
| 562 |
+
return await self.tearDownAsync()
|
| 563 |
+
|
| 564 |
+
async def tearDownAsync(self) -> None:
|
| 565 |
+
await self.client.close()
|
| 566 |
+
|
| 567 |
+
async def get_server(self, app: Application) -> TestServer:
|
| 568 |
+
"""Return a TestServer instance."""
|
| 569 |
+
return TestServer(app, loop=self.loop)
|
| 570 |
+
|
| 571 |
+
async def get_client(self, server: TestServer) -> TestClient[Request, Application]:
|
| 572 |
+
"""Return a TestClient instance."""
|
| 573 |
+
return TestClient(server, loop=self.loop)
|
| 574 |
+
|
| 575 |
+
|
| 576 |
+
def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
|
| 577 |
+
"""
|
| 578 |
+
A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
|
| 579 |
+
|
| 580 |
+
In 3.8+, this does nothing.
|
| 581 |
+
"""
|
| 582 |
+
warnings.warn(
|
| 583 |
+
"Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
|
| 584 |
+
DeprecationWarning,
|
| 585 |
+
stacklevel=2,
|
| 586 |
+
)
|
| 587 |
+
return func
|
| 588 |
+
|
| 589 |
+
|
| 590 |
+
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
|
| 591 |
+
|
| 592 |
+
|
| 593 |
+
@contextlib.contextmanager
|
| 594 |
+
def loop_context(
|
| 595 |
+
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
|
| 596 |
+
) -> Iterator[asyncio.AbstractEventLoop]:
|
| 597 |
+
"""A contextmanager that creates an event_loop, for test purposes.
|
| 598 |
+
|
| 599 |
+
Handles the creation and cleanup of a test loop.
|
| 600 |
+
"""
|
| 601 |
+
loop = setup_test_loop(loop_factory)
|
| 602 |
+
yield loop
|
| 603 |
+
teardown_test_loop(loop, fast=fast)
|
| 604 |
+
|
| 605 |
+
|
| 606 |
+
def setup_test_loop(
|
| 607 |
+
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
|
| 608 |
+
) -> asyncio.AbstractEventLoop:
|
| 609 |
+
"""Create and return an asyncio.BaseEventLoop instance.
|
| 610 |
+
|
| 611 |
+
The caller should also call teardown_test_loop,
|
| 612 |
+
once they are done with the loop.
|
| 613 |
+
"""
|
| 614 |
+
loop = loop_factory()
|
| 615 |
+
asyncio.set_event_loop(loop)
|
| 616 |
+
return loop
|
| 617 |
+
|
| 618 |
+
|
| 619 |
+
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
|
| 620 |
+
"""Teardown and cleanup an event_loop created by setup_test_loop."""
|
| 621 |
+
closed = loop.is_closed()
|
| 622 |
+
if not closed:
|
| 623 |
+
loop.call_soon(loop.stop)
|
| 624 |
+
loop.run_forever()
|
| 625 |
+
loop.close()
|
| 626 |
+
|
| 627 |
+
if not fast:
|
| 628 |
+
gc.collect()
|
| 629 |
+
|
| 630 |
+
asyncio.set_event_loop(None)
|
| 631 |
+
|
| 632 |
+
|
| 633 |
+
def _create_app_mock() -> mock.MagicMock:
|
| 634 |
+
def get_dict(app: Any, key: str) -> Any:
|
| 635 |
+
return app.__app_dict[key]
|
| 636 |
+
|
| 637 |
+
def set_dict(app: Any, key: str, value: Any) -> None:
|
| 638 |
+
app.__app_dict[key] = value
|
| 639 |
+
|
| 640 |
+
app = mock.MagicMock(spec=Application)
|
| 641 |
+
app.__app_dict = {}
|
| 642 |
+
app.__getitem__ = get_dict
|
| 643 |
+
app.__setitem__ = set_dict
|
| 644 |
+
|
| 645 |
+
app._debug = False
|
| 646 |
+
app.on_response_prepare = Signal(app)
|
| 647 |
+
app.on_response_prepare.freeze()
|
| 648 |
+
return app
|
| 649 |
+
|
| 650 |
+
|
| 651 |
+
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
|
| 652 |
+
transport = mock.Mock()
|
| 653 |
+
|
| 654 |
+
def get_extra_info(key: str) -> Optional[SSLContext]:
|
| 655 |
+
if key == "sslcontext":
|
| 656 |
+
return sslcontext
|
| 657 |
+
else:
|
| 658 |
+
return None
|
| 659 |
+
|
| 660 |
+
transport.get_extra_info.side_effect = get_extra_info
|
| 661 |
+
return transport
|
| 662 |
+
|
| 663 |
+
|
| 664 |
+
def make_mocked_request(
|
| 665 |
+
method: str,
|
| 666 |
+
path: str,
|
| 667 |
+
headers: Any = None,
|
| 668 |
+
*,
|
| 669 |
+
match_info: Any = sentinel,
|
| 670 |
+
version: HttpVersion = HttpVersion(1, 1),
|
| 671 |
+
closing: bool = False,
|
| 672 |
+
app: Any = None,
|
| 673 |
+
writer: Any = sentinel,
|
| 674 |
+
protocol: Any = sentinel,
|
| 675 |
+
transport: Any = sentinel,
|
| 676 |
+
payload: StreamReader = EMPTY_PAYLOAD,
|
| 677 |
+
sslcontext: Optional[SSLContext] = None,
|
| 678 |
+
client_max_size: int = 1024**2,
|
| 679 |
+
loop: Any = ...,
|
| 680 |
+
) -> Request:
|
| 681 |
+
"""Creates mocked web.Request testing purposes.
|
| 682 |
+
|
| 683 |
+
Useful in unit tests, when spinning full web server is overkill or
|
| 684 |
+
specific conditions and errors are hard to trigger.
|
| 685 |
+
"""
|
| 686 |
+
task = mock.Mock()
|
| 687 |
+
if loop is ...:
|
| 688 |
+
# no loop passed, try to get the current one if
|
| 689 |
+
# its is running as we need a real loop to create
|
| 690 |
+
# executor jobs to be able to do testing
|
| 691 |
+
# with a real executor
|
| 692 |
+
try:
|
| 693 |
+
loop = asyncio.get_running_loop()
|
| 694 |
+
except RuntimeError:
|
| 695 |
+
loop = mock.Mock()
|
| 696 |
+
loop.create_future.return_value = ()
|
| 697 |
+
|
| 698 |
+
if version < HttpVersion(1, 1):
|
| 699 |
+
closing = True
|
| 700 |
+
|
| 701 |
+
if headers:
|
| 702 |
+
headers = CIMultiDictProxy(CIMultiDict(headers))
|
| 703 |
+
raw_hdrs = tuple(
|
| 704 |
+
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
|
| 705 |
+
)
|
| 706 |
+
else:
|
| 707 |
+
headers = CIMultiDictProxy(CIMultiDict())
|
| 708 |
+
raw_hdrs = ()
|
| 709 |
+
|
| 710 |
+
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
| 711 |
+
|
| 712 |
+
message = RawRequestMessage(
|
| 713 |
+
method,
|
| 714 |
+
path,
|
| 715 |
+
version,
|
| 716 |
+
headers,
|
| 717 |
+
raw_hdrs,
|
| 718 |
+
closing,
|
| 719 |
+
None,
|
| 720 |
+
False,
|
| 721 |
+
chunked,
|
| 722 |
+
URL(path),
|
| 723 |
+
)
|
| 724 |
+
if app is None:
|
| 725 |
+
app = _create_app_mock()
|
| 726 |
+
|
| 727 |
+
if transport is sentinel:
|
| 728 |
+
transport = _create_transport(sslcontext)
|
| 729 |
+
|
| 730 |
+
if protocol is sentinel:
|
| 731 |
+
protocol = mock.Mock()
|
| 732 |
+
protocol.transport = transport
|
| 733 |
+
|
| 734 |
+
if writer is sentinel:
|
| 735 |
+
writer = mock.Mock()
|
| 736 |
+
writer.write_headers = make_mocked_coro(None)
|
| 737 |
+
writer.write = make_mocked_coro(None)
|
| 738 |
+
writer.write_eof = make_mocked_coro(None)
|
| 739 |
+
writer.drain = make_mocked_coro(None)
|
| 740 |
+
writer.transport = transport
|
| 741 |
+
|
| 742 |
+
protocol.transport = transport
|
| 743 |
+
protocol.writer = writer
|
| 744 |
+
|
| 745 |
+
req = Request(
|
| 746 |
+
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
|
| 747 |
+
)
|
| 748 |
+
|
| 749 |
+
match_info = UrlMappingMatchInfo(
|
| 750 |
+
{} if match_info is sentinel else match_info, mock.Mock()
|
| 751 |
+
)
|
| 752 |
+
match_info.add_app(app)
|
| 753 |
+
req._match_info = match_info
|
| 754 |
+
|
| 755 |
+
return req
|
| 756 |
+
|
| 757 |
+
|
| 758 |
+
def make_mocked_coro(
|
| 759 |
+
return_value: Any = sentinel, raise_exception: Any = sentinel
|
| 760 |
+
) -> Any:
|
| 761 |
+
"""Creates a coroutine mock."""
|
| 762 |
+
|
| 763 |
+
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
|
| 764 |
+
if raise_exception is not sentinel:
|
| 765 |
+
raise raise_exception
|
| 766 |
+
if not inspect.isawaitable(return_value):
|
| 767 |
+
return return_value
|
| 768 |
+
await return_value
|
| 769 |
+
|
| 770 |
+
return mock.Mock(wraps=mock_coro)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/typedefs.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import os
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
Any,
|
| 6 |
+
Awaitable,
|
| 7 |
+
Callable,
|
| 8 |
+
Iterable,
|
| 9 |
+
Mapping,
|
| 10 |
+
Protocol,
|
| 11 |
+
Tuple,
|
| 12 |
+
Union,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
|
| 16 |
+
from yarl import URL, Query as _Query
|
| 17 |
+
|
| 18 |
+
Query = _Query
|
| 19 |
+
|
| 20 |
+
DEFAULT_JSON_ENCODER = json.dumps
|
| 21 |
+
DEFAULT_JSON_DECODER = json.loads
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING:
|
| 24 |
+
_CIMultiDict = CIMultiDict[str]
|
| 25 |
+
_CIMultiDictProxy = CIMultiDictProxy[str]
|
| 26 |
+
_MultiDict = MultiDict[str]
|
| 27 |
+
_MultiDictProxy = MultiDictProxy[str]
|
| 28 |
+
from http.cookies import BaseCookie, Morsel
|
| 29 |
+
|
| 30 |
+
from .web import Request, StreamResponse
|
| 31 |
+
else:
|
| 32 |
+
_CIMultiDict = CIMultiDict
|
| 33 |
+
_CIMultiDictProxy = CIMultiDictProxy
|
| 34 |
+
_MultiDict = MultiDict
|
| 35 |
+
_MultiDictProxy = MultiDictProxy
|
| 36 |
+
|
| 37 |
+
Byteish = Union[bytes, bytearray, memoryview]
|
| 38 |
+
JSONEncoder = Callable[[Any], str]
|
| 39 |
+
JSONDecoder = Callable[[str], Any]
|
| 40 |
+
LooseHeaders = Union[
|
| 41 |
+
Mapping[str, str],
|
| 42 |
+
Mapping[istr, str],
|
| 43 |
+
_CIMultiDict,
|
| 44 |
+
_CIMultiDictProxy,
|
| 45 |
+
Iterable[Tuple[Union[str, istr], str]],
|
| 46 |
+
]
|
| 47 |
+
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
| 48 |
+
StrOrURL = Union[str, URL]
|
| 49 |
+
|
| 50 |
+
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 51 |
+
LooseCookiesIterables = Iterable[
|
| 52 |
+
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 53 |
+
]
|
| 54 |
+
LooseCookies = Union[
|
| 55 |
+
LooseCookiesMappings,
|
| 56 |
+
LooseCookiesIterables,
|
| 57 |
+
"BaseCookie[str]",
|
| 58 |
+
]
|
| 59 |
+
|
| 60 |
+
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class Middleware(Protocol):
|
| 64 |
+
def __call__(
|
| 65 |
+
self, request: "Request", handler: Handler
|
| 66 |
+
) -> Awaitable["StreamResponse"]: ...
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
PathLike = Union[str, "os.PathLike[str]"]
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web.py
ADDED
|
@@ -0,0 +1,605 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import socket
|
| 5 |
+
import sys
|
| 6 |
+
import warnings
|
| 7 |
+
from argparse import ArgumentParser
|
| 8 |
+
from collections.abc import Iterable
|
| 9 |
+
from contextlib import suppress
|
| 10 |
+
from importlib import import_module
|
| 11 |
+
from typing import (
|
| 12 |
+
TYPE_CHECKING,
|
| 13 |
+
Any,
|
| 14 |
+
Awaitable,
|
| 15 |
+
Callable,
|
| 16 |
+
Iterable as TypingIterable,
|
| 17 |
+
List,
|
| 18 |
+
Optional,
|
| 19 |
+
Set,
|
| 20 |
+
Type,
|
| 21 |
+
Union,
|
| 22 |
+
cast,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
from .abc import AbstractAccessLogger
|
| 26 |
+
from .helpers import AppKey as AppKey
|
| 27 |
+
from .log import access_logger
|
| 28 |
+
from .typedefs import PathLike
|
| 29 |
+
from .web_app import Application as Application, CleanupError as CleanupError
|
| 30 |
+
from .web_exceptions import (
|
| 31 |
+
HTTPAccepted as HTTPAccepted,
|
| 32 |
+
HTTPBadGateway as HTTPBadGateway,
|
| 33 |
+
HTTPBadRequest as HTTPBadRequest,
|
| 34 |
+
HTTPClientError as HTTPClientError,
|
| 35 |
+
HTTPConflict as HTTPConflict,
|
| 36 |
+
HTTPCreated as HTTPCreated,
|
| 37 |
+
HTTPError as HTTPError,
|
| 38 |
+
HTTPException as HTTPException,
|
| 39 |
+
HTTPExpectationFailed as HTTPExpectationFailed,
|
| 40 |
+
HTTPFailedDependency as HTTPFailedDependency,
|
| 41 |
+
HTTPForbidden as HTTPForbidden,
|
| 42 |
+
HTTPFound as HTTPFound,
|
| 43 |
+
HTTPGatewayTimeout as HTTPGatewayTimeout,
|
| 44 |
+
HTTPGone as HTTPGone,
|
| 45 |
+
HTTPInsufficientStorage as HTTPInsufficientStorage,
|
| 46 |
+
HTTPInternalServerError as HTTPInternalServerError,
|
| 47 |
+
HTTPLengthRequired as HTTPLengthRequired,
|
| 48 |
+
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
|
| 49 |
+
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
|
| 50 |
+
HTTPMove as HTTPMove,
|
| 51 |
+
HTTPMovedPermanently as HTTPMovedPermanently,
|
| 52 |
+
HTTPMultipleChoices as HTTPMultipleChoices,
|
| 53 |
+
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
|
| 54 |
+
HTTPNoContent as HTTPNoContent,
|
| 55 |
+
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
|
| 56 |
+
HTTPNotAcceptable as HTTPNotAcceptable,
|
| 57 |
+
HTTPNotExtended as HTTPNotExtended,
|
| 58 |
+
HTTPNotFound as HTTPNotFound,
|
| 59 |
+
HTTPNotImplemented as HTTPNotImplemented,
|
| 60 |
+
HTTPNotModified as HTTPNotModified,
|
| 61 |
+
HTTPOk as HTTPOk,
|
| 62 |
+
HTTPPartialContent as HTTPPartialContent,
|
| 63 |
+
HTTPPaymentRequired as HTTPPaymentRequired,
|
| 64 |
+
HTTPPermanentRedirect as HTTPPermanentRedirect,
|
| 65 |
+
HTTPPreconditionFailed as HTTPPreconditionFailed,
|
| 66 |
+
HTTPPreconditionRequired as HTTPPreconditionRequired,
|
| 67 |
+
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
|
| 68 |
+
HTTPRedirection as HTTPRedirection,
|
| 69 |
+
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
|
| 70 |
+
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
|
| 71 |
+
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
|
| 72 |
+
HTTPRequestTimeout as HTTPRequestTimeout,
|
| 73 |
+
HTTPRequestURITooLong as HTTPRequestURITooLong,
|
| 74 |
+
HTTPResetContent as HTTPResetContent,
|
| 75 |
+
HTTPSeeOther as HTTPSeeOther,
|
| 76 |
+
HTTPServerError as HTTPServerError,
|
| 77 |
+
HTTPServiceUnavailable as HTTPServiceUnavailable,
|
| 78 |
+
HTTPSuccessful as HTTPSuccessful,
|
| 79 |
+
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
|
| 80 |
+
HTTPTooManyRequests as HTTPTooManyRequests,
|
| 81 |
+
HTTPUnauthorized as HTTPUnauthorized,
|
| 82 |
+
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
|
| 83 |
+
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
|
| 84 |
+
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
|
| 85 |
+
HTTPUpgradeRequired as HTTPUpgradeRequired,
|
| 86 |
+
HTTPUseProxy as HTTPUseProxy,
|
| 87 |
+
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
|
| 88 |
+
HTTPVersionNotSupported as HTTPVersionNotSupported,
|
| 89 |
+
NotAppKeyWarning as NotAppKeyWarning,
|
| 90 |
+
)
|
| 91 |
+
from .web_fileresponse import FileResponse as FileResponse
|
| 92 |
+
from .web_log import AccessLogger
|
| 93 |
+
from .web_middlewares import (
|
| 94 |
+
middleware as middleware,
|
| 95 |
+
normalize_path_middleware as normalize_path_middleware,
|
| 96 |
+
)
|
| 97 |
+
from .web_protocol import (
|
| 98 |
+
PayloadAccessError as PayloadAccessError,
|
| 99 |
+
RequestHandler as RequestHandler,
|
| 100 |
+
RequestPayloadError as RequestPayloadError,
|
| 101 |
+
)
|
| 102 |
+
from .web_request import (
|
| 103 |
+
BaseRequest as BaseRequest,
|
| 104 |
+
FileField as FileField,
|
| 105 |
+
Request as Request,
|
| 106 |
+
)
|
| 107 |
+
from .web_response import (
|
| 108 |
+
ContentCoding as ContentCoding,
|
| 109 |
+
Response as Response,
|
| 110 |
+
StreamResponse as StreamResponse,
|
| 111 |
+
json_response as json_response,
|
| 112 |
+
)
|
| 113 |
+
from .web_routedef import (
|
| 114 |
+
AbstractRouteDef as AbstractRouteDef,
|
| 115 |
+
RouteDef as RouteDef,
|
| 116 |
+
RouteTableDef as RouteTableDef,
|
| 117 |
+
StaticDef as StaticDef,
|
| 118 |
+
delete as delete,
|
| 119 |
+
get as get,
|
| 120 |
+
head as head,
|
| 121 |
+
options as options,
|
| 122 |
+
patch as patch,
|
| 123 |
+
post as post,
|
| 124 |
+
put as put,
|
| 125 |
+
route as route,
|
| 126 |
+
static as static,
|
| 127 |
+
view as view,
|
| 128 |
+
)
|
| 129 |
+
from .web_runner import (
|
| 130 |
+
AppRunner as AppRunner,
|
| 131 |
+
BaseRunner as BaseRunner,
|
| 132 |
+
BaseSite as BaseSite,
|
| 133 |
+
GracefulExit as GracefulExit,
|
| 134 |
+
NamedPipeSite as NamedPipeSite,
|
| 135 |
+
ServerRunner as ServerRunner,
|
| 136 |
+
SockSite as SockSite,
|
| 137 |
+
TCPSite as TCPSite,
|
| 138 |
+
UnixSite as UnixSite,
|
| 139 |
+
)
|
| 140 |
+
from .web_server import Server as Server
|
| 141 |
+
from .web_urldispatcher import (
|
| 142 |
+
AbstractResource as AbstractResource,
|
| 143 |
+
AbstractRoute as AbstractRoute,
|
| 144 |
+
DynamicResource as DynamicResource,
|
| 145 |
+
PlainResource as PlainResource,
|
| 146 |
+
PrefixedSubAppResource as PrefixedSubAppResource,
|
| 147 |
+
Resource as Resource,
|
| 148 |
+
ResourceRoute as ResourceRoute,
|
| 149 |
+
StaticResource as StaticResource,
|
| 150 |
+
UrlDispatcher as UrlDispatcher,
|
| 151 |
+
UrlMappingMatchInfo as UrlMappingMatchInfo,
|
| 152 |
+
View as View,
|
| 153 |
+
)
|
| 154 |
+
from .web_ws import (
|
| 155 |
+
WebSocketReady as WebSocketReady,
|
| 156 |
+
WebSocketResponse as WebSocketResponse,
|
| 157 |
+
WSMsgType as WSMsgType,
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
__all__ = (
|
| 161 |
+
# web_app
|
| 162 |
+
"AppKey",
|
| 163 |
+
"Application",
|
| 164 |
+
"CleanupError",
|
| 165 |
+
# web_exceptions
|
| 166 |
+
"NotAppKeyWarning",
|
| 167 |
+
"HTTPAccepted",
|
| 168 |
+
"HTTPBadGateway",
|
| 169 |
+
"HTTPBadRequest",
|
| 170 |
+
"HTTPClientError",
|
| 171 |
+
"HTTPConflict",
|
| 172 |
+
"HTTPCreated",
|
| 173 |
+
"HTTPError",
|
| 174 |
+
"HTTPException",
|
| 175 |
+
"HTTPExpectationFailed",
|
| 176 |
+
"HTTPFailedDependency",
|
| 177 |
+
"HTTPForbidden",
|
| 178 |
+
"HTTPFound",
|
| 179 |
+
"HTTPGatewayTimeout",
|
| 180 |
+
"HTTPGone",
|
| 181 |
+
"HTTPInsufficientStorage",
|
| 182 |
+
"HTTPInternalServerError",
|
| 183 |
+
"HTTPLengthRequired",
|
| 184 |
+
"HTTPMethodNotAllowed",
|
| 185 |
+
"HTTPMisdirectedRequest",
|
| 186 |
+
"HTTPMove",
|
| 187 |
+
"HTTPMovedPermanently",
|
| 188 |
+
"HTTPMultipleChoices",
|
| 189 |
+
"HTTPNetworkAuthenticationRequired",
|
| 190 |
+
"HTTPNoContent",
|
| 191 |
+
"HTTPNonAuthoritativeInformation",
|
| 192 |
+
"HTTPNotAcceptable",
|
| 193 |
+
"HTTPNotExtended",
|
| 194 |
+
"HTTPNotFound",
|
| 195 |
+
"HTTPNotImplemented",
|
| 196 |
+
"HTTPNotModified",
|
| 197 |
+
"HTTPOk",
|
| 198 |
+
"HTTPPartialContent",
|
| 199 |
+
"HTTPPaymentRequired",
|
| 200 |
+
"HTTPPermanentRedirect",
|
| 201 |
+
"HTTPPreconditionFailed",
|
| 202 |
+
"HTTPPreconditionRequired",
|
| 203 |
+
"HTTPProxyAuthenticationRequired",
|
| 204 |
+
"HTTPRedirection",
|
| 205 |
+
"HTTPRequestEntityTooLarge",
|
| 206 |
+
"HTTPRequestHeaderFieldsTooLarge",
|
| 207 |
+
"HTTPRequestRangeNotSatisfiable",
|
| 208 |
+
"HTTPRequestTimeout",
|
| 209 |
+
"HTTPRequestURITooLong",
|
| 210 |
+
"HTTPResetContent",
|
| 211 |
+
"HTTPSeeOther",
|
| 212 |
+
"HTTPServerError",
|
| 213 |
+
"HTTPServiceUnavailable",
|
| 214 |
+
"HTTPSuccessful",
|
| 215 |
+
"HTTPTemporaryRedirect",
|
| 216 |
+
"HTTPTooManyRequests",
|
| 217 |
+
"HTTPUnauthorized",
|
| 218 |
+
"HTTPUnavailableForLegalReasons",
|
| 219 |
+
"HTTPUnprocessableEntity",
|
| 220 |
+
"HTTPUnsupportedMediaType",
|
| 221 |
+
"HTTPUpgradeRequired",
|
| 222 |
+
"HTTPUseProxy",
|
| 223 |
+
"HTTPVariantAlsoNegotiates",
|
| 224 |
+
"HTTPVersionNotSupported",
|
| 225 |
+
# web_fileresponse
|
| 226 |
+
"FileResponse",
|
| 227 |
+
# web_middlewares
|
| 228 |
+
"middleware",
|
| 229 |
+
"normalize_path_middleware",
|
| 230 |
+
# web_protocol
|
| 231 |
+
"PayloadAccessError",
|
| 232 |
+
"RequestHandler",
|
| 233 |
+
"RequestPayloadError",
|
| 234 |
+
# web_request
|
| 235 |
+
"BaseRequest",
|
| 236 |
+
"FileField",
|
| 237 |
+
"Request",
|
| 238 |
+
# web_response
|
| 239 |
+
"ContentCoding",
|
| 240 |
+
"Response",
|
| 241 |
+
"StreamResponse",
|
| 242 |
+
"json_response",
|
| 243 |
+
# web_routedef
|
| 244 |
+
"AbstractRouteDef",
|
| 245 |
+
"RouteDef",
|
| 246 |
+
"RouteTableDef",
|
| 247 |
+
"StaticDef",
|
| 248 |
+
"delete",
|
| 249 |
+
"get",
|
| 250 |
+
"head",
|
| 251 |
+
"options",
|
| 252 |
+
"patch",
|
| 253 |
+
"post",
|
| 254 |
+
"put",
|
| 255 |
+
"route",
|
| 256 |
+
"static",
|
| 257 |
+
"view",
|
| 258 |
+
# web_runner
|
| 259 |
+
"AppRunner",
|
| 260 |
+
"BaseRunner",
|
| 261 |
+
"BaseSite",
|
| 262 |
+
"GracefulExit",
|
| 263 |
+
"ServerRunner",
|
| 264 |
+
"SockSite",
|
| 265 |
+
"TCPSite",
|
| 266 |
+
"UnixSite",
|
| 267 |
+
"NamedPipeSite",
|
| 268 |
+
# web_server
|
| 269 |
+
"Server",
|
| 270 |
+
# web_urldispatcher
|
| 271 |
+
"AbstractResource",
|
| 272 |
+
"AbstractRoute",
|
| 273 |
+
"DynamicResource",
|
| 274 |
+
"PlainResource",
|
| 275 |
+
"PrefixedSubAppResource",
|
| 276 |
+
"Resource",
|
| 277 |
+
"ResourceRoute",
|
| 278 |
+
"StaticResource",
|
| 279 |
+
"UrlDispatcher",
|
| 280 |
+
"UrlMappingMatchInfo",
|
| 281 |
+
"View",
|
| 282 |
+
# web_ws
|
| 283 |
+
"WebSocketReady",
|
| 284 |
+
"WebSocketResponse",
|
| 285 |
+
"WSMsgType",
|
| 286 |
+
# web
|
| 287 |
+
"run_app",
|
| 288 |
+
)
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
if TYPE_CHECKING:
|
| 292 |
+
from ssl import SSLContext
|
| 293 |
+
else:
|
| 294 |
+
try:
|
| 295 |
+
from ssl import SSLContext
|
| 296 |
+
except ImportError: # pragma: no cover
|
| 297 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 298 |
+
|
| 299 |
+
# Only display warning when using -Wdefault, -We, -X dev or similar.
|
| 300 |
+
warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
|
| 301 |
+
|
| 302 |
+
HostSequence = TypingIterable[str]
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
async def _run_app(
|
| 306 |
+
app: Union[Application, Awaitable[Application]],
|
| 307 |
+
*,
|
| 308 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 309 |
+
port: Optional[int] = None,
|
| 310 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
| 311 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
| 312 |
+
shutdown_timeout: float = 60.0,
|
| 313 |
+
keepalive_timeout: float = 75.0,
|
| 314 |
+
ssl_context: Optional[SSLContext] = None,
|
| 315 |
+
print: Optional[Callable[..., None]] = print,
|
| 316 |
+
backlog: int = 128,
|
| 317 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 318 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 319 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 320 |
+
handle_signals: bool = True,
|
| 321 |
+
reuse_address: Optional[bool] = None,
|
| 322 |
+
reuse_port: Optional[bool] = None,
|
| 323 |
+
handler_cancellation: bool = False,
|
| 324 |
+
) -> None:
|
| 325 |
+
# An internal function to actually do all dirty job for application running
|
| 326 |
+
if asyncio.iscoroutine(app):
|
| 327 |
+
app = await app
|
| 328 |
+
|
| 329 |
+
app = cast(Application, app)
|
| 330 |
+
|
| 331 |
+
runner = AppRunner(
|
| 332 |
+
app,
|
| 333 |
+
handle_signals=handle_signals,
|
| 334 |
+
access_log_class=access_log_class,
|
| 335 |
+
access_log_format=access_log_format,
|
| 336 |
+
access_log=access_log,
|
| 337 |
+
keepalive_timeout=keepalive_timeout,
|
| 338 |
+
shutdown_timeout=shutdown_timeout,
|
| 339 |
+
handler_cancellation=handler_cancellation,
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
await runner.setup()
|
| 343 |
+
|
| 344 |
+
sites: List[BaseSite] = []
|
| 345 |
+
|
| 346 |
+
try:
|
| 347 |
+
if host is not None:
|
| 348 |
+
if isinstance(host, (str, bytes, bytearray, memoryview)):
|
| 349 |
+
sites.append(
|
| 350 |
+
TCPSite(
|
| 351 |
+
runner,
|
| 352 |
+
host,
|
| 353 |
+
port,
|
| 354 |
+
ssl_context=ssl_context,
|
| 355 |
+
backlog=backlog,
|
| 356 |
+
reuse_address=reuse_address,
|
| 357 |
+
reuse_port=reuse_port,
|
| 358 |
+
)
|
| 359 |
+
)
|
| 360 |
+
else:
|
| 361 |
+
for h in host:
|
| 362 |
+
sites.append(
|
| 363 |
+
TCPSite(
|
| 364 |
+
runner,
|
| 365 |
+
h,
|
| 366 |
+
port,
|
| 367 |
+
ssl_context=ssl_context,
|
| 368 |
+
backlog=backlog,
|
| 369 |
+
reuse_address=reuse_address,
|
| 370 |
+
reuse_port=reuse_port,
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
elif path is None and sock is None or port is not None:
|
| 374 |
+
sites.append(
|
| 375 |
+
TCPSite(
|
| 376 |
+
runner,
|
| 377 |
+
port=port,
|
| 378 |
+
ssl_context=ssl_context,
|
| 379 |
+
backlog=backlog,
|
| 380 |
+
reuse_address=reuse_address,
|
| 381 |
+
reuse_port=reuse_port,
|
| 382 |
+
)
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
if path is not None:
|
| 386 |
+
if isinstance(path, (str, os.PathLike)):
|
| 387 |
+
sites.append(
|
| 388 |
+
UnixSite(
|
| 389 |
+
runner,
|
| 390 |
+
path,
|
| 391 |
+
ssl_context=ssl_context,
|
| 392 |
+
backlog=backlog,
|
| 393 |
+
)
|
| 394 |
+
)
|
| 395 |
+
else:
|
| 396 |
+
for p in path:
|
| 397 |
+
sites.append(
|
| 398 |
+
UnixSite(
|
| 399 |
+
runner,
|
| 400 |
+
p,
|
| 401 |
+
ssl_context=ssl_context,
|
| 402 |
+
backlog=backlog,
|
| 403 |
+
)
|
| 404 |
+
)
|
| 405 |
+
|
| 406 |
+
if sock is not None:
|
| 407 |
+
if not isinstance(sock, Iterable):
|
| 408 |
+
sites.append(
|
| 409 |
+
SockSite(
|
| 410 |
+
runner,
|
| 411 |
+
sock,
|
| 412 |
+
ssl_context=ssl_context,
|
| 413 |
+
backlog=backlog,
|
| 414 |
+
)
|
| 415 |
+
)
|
| 416 |
+
else:
|
| 417 |
+
for s in sock:
|
| 418 |
+
sites.append(
|
| 419 |
+
SockSite(
|
| 420 |
+
runner,
|
| 421 |
+
s,
|
| 422 |
+
ssl_context=ssl_context,
|
| 423 |
+
backlog=backlog,
|
| 424 |
+
)
|
| 425 |
+
)
|
| 426 |
+
for site in sites:
|
| 427 |
+
await site.start()
|
| 428 |
+
|
| 429 |
+
if print: # pragma: no branch
|
| 430 |
+
names = sorted(str(s.name) for s in runner.sites)
|
| 431 |
+
print(
|
| 432 |
+
"======== Running on {} ========\n"
|
| 433 |
+
"(Press CTRL+C to quit)".format(", ".join(names))
|
| 434 |
+
)
|
| 435 |
+
|
| 436 |
+
# sleep forever by 1 hour intervals,
|
| 437 |
+
while True:
|
| 438 |
+
await asyncio.sleep(3600)
|
| 439 |
+
finally:
|
| 440 |
+
await runner.cleanup()
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
def _cancel_tasks(
|
| 444 |
+
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
|
| 445 |
+
) -> None:
|
| 446 |
+
if not to_cancel:
|
| 447 |
+
return
|
| 448 |
+
|
| 449 |
+
for task in to_cancel:
|
| 450 |
+
task.cancel()
|
| 451 |
+
|
| 452 |
+
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
|
| 453 |
+
|
| 454 |
+
for task in to_cancel:
|
| 455 |
+
if task.cancelled():
|
| 456 |
+
continue
|
| 457 |
+
if task.exception() is not None:
|
| 458 |
+
loop.call_exception_handler(
|
| 459 |
+
{
|
| 460 |
+
"message": "unhandled exception during asyncio.run() shutdown",
|
| 461 |
+
"exception": task.exception(),
|
| 462 |
+
"task": task,
|
| 463 |
+
}
|
| 464 |
+
)
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def run_app(
|
| 468 |
+
app: Union[Application, Awaitable[Application]],
|
| 469 |
+
*,
|
| 470 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 471 |
+
port: Optional[int] = None,
|
| 472 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
| 473 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
| 474 |
+
shutdown_timeout: float = 60.0,
|
| 475 |
+
keepalive_timeout: float = 75.0,
|
| 476 |
+
ssl_context: Optional[SSLContext] = None,
|
| 477 |
+
print: Optional[Callable[..., None]] = print,
|
| 478 |
+
backlog: int = 128,
|
| 479 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 480 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 481 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 482 |
+
handle_signals: bool = True,
|
| 483 |
+
reuse_address: Optional[bool] = None,
|
| 484 |
+
reuse_port: Optional[bool] = None,
|
| 485 |
+
handler_cancellation: bool = False,
|
| 486 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 487 |
+
) -> None:
|
| 488 |
+
"""Run an app locally"""
|
| 489 |
+
if loop is None:
|
| 490 |
+
loop = asyncio.new_event_loop()
|
| 491 |
+
|
| 492 |
+
# Configure if and only if in debugging mode and using the default logger
|
| 493 |
+
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
|
| 494 |
+
if access_log.level == logging.NOTSET:
|
| 495 |
+
access_log.setLevel(logging.DEBUG)
|
| 496 |
+
if not access_log.hasHandlers():
|
| 497 |
+
access_log.addHandler(logging.StreamHandler())
|
| 498 |
+
|
| 499 |
+
main_task = loop.create_task(
|
| 500 |
+
_run_app(
|
| 501 |
+
app,
|
| 502 |
+
host=host,
|
| 503 |
+
port=port,
|
| 504 |
+
path=path,
|
| 505 |
+
sock=sock,
|
| 506 |
+
shutdown_timeout=shutdown_timeout,
|
| 507 |
+
keepalive_timeout=keepalive_timeout,
|
| 508 |
+
ssl_context=ssl_context,
|
| 509 |
+
print=print,
|
| 510 |
+
backlog=backlog,
|
| 511 |
+
access_log_class=access_log_class,
|
| 512 |
+
access_log_format=access_log_format,
|
| 513 |
+
access_log=access_log,
|
| 514 |
+
handle_signals=handle_signals,
|
| 515 |
+
reuse_address=reuse_address,
|
| 516 |
+
reuse_port=reuse_port,
|
| 517 |
+
handler_cancellation=handler_cancellation,
|
| 518 |
+
)
|
| 519 |
+
)
|
| 520 |
+
|
| 521 |
+
try:
|
| 522 |
+
asyncio.set_event_loop(loop)
|
| 523 |
+
loop.run_until_complete(main_task)
|
| 524 |
+
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
|
| 525 |
+
pass
|
| 526 |
+
finally:
|
| 527 |
+
try:
|
| 528 |
+
main_task.cancel()
|
| 529 |
+
with suppress(asyncio.CancelledError):
|
| 530 |
+
loop.run_until_complete(main_task)
|
| 531 |
+
finally:
|
| 532 |
+
_cancel_tasks(asyncio.all_tasks(loop), loop)
|
| 533 |
+
loop.run_until_complete(loop.shutdown_asyncgens())
|
| 534 |
+
loop.close()
|
| 535 |
+
|
| 536 |
+
|
| 537 |
+
def main(argv: List[str]) -> None:
|
| 538 |
+
arg_parser = ArgumentParser(
|
| 539 |
+
description="aiohttp.web Application server", prog="aiohttp.web"
|
| 540 |
+
)
|
| 541 |
+
arg_parser.add_argument(
|
| 542 |
+
"entry_func",
|
| 543 |
+
help=(
|
| 544 |
+
"Callable returning the `aiohttp.web.Application` instance to "
|
| 545 |
+
"run. Should be specified in the 'module:function' syntax."
|
| 546 |
+
),
|
| 547 |
+
metavar="entry-func",
|
| 548 |
+
)
|
| 549 |
+
arg_parser.add_argument(
|
| 550 |
+
"-H",
|
| 551 |
+
"--hostname",
|
| 552 |
+
help="TCP/IP hostname to serve on (default: localhost)",
|
| 553 |
+
default=None,
|
| 554 |
+
)
|
| 555 |
+
arg_parser.add_argument(
|
| 556 |
+
"-P",
|
| 557 |
+
"--port",
|
| 558 |
+
help="TCP/IP port to serve on (default: %(default)r)",
|
| 559 |
+
type=int,
|
| 560 |
+
default=8080,
|
| 561 |
+
)
|
| 562 |
+
arg_parser.add_argument(
|
| 563 |
+
"-U",
|
| 564 |
+
"--path",
|
| 565 |
+
help="Unix file system path to serve on. Can be combined with hostname "
|
| 566 |
+
"to serve on both Unix and TCP.",
|
| 567 |
+
)
|
| 568 |
+
args, extra_argv = arg_parser.parse_known_args(argv)
|
| 569 |
+
|
| 570 |
+
# Import logic
|
| 571 |
+
mod_str, _, func_str = args.entry_func.partition(":")
|
| 572 |
+
if not func_str or not mod_str:
|
| 573 |
+
arg_parser.error("'entry-func' not in 'module:function' syntax")
|
| 574 |
+
if mod_str.startswith("."):
|
| 575 |
+
arg_parser.error("relative module names not supported")
|
| 576 |
+
try:
|
| 577 |
+
module = import_module(mod_str)
|
| 578 |
+
except ImportError as ex:
|
| 579 |
+
arg_parser.error(f"unable to import {mod_str}: {ex}")
|
| 580 |
+
try:
|
| 581 |
+
func = getattr(module, func_str)
|
| 582 |
+
except AttributeError:
|
| 583 |
+
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
|
| 584 |
+
|
| 585 |
+
# Compatibility logic
|
| 586 |
+
if args.path is not None and not hasattr(socket, "AF_UNIX"):
|
| 587 |
+
arg_parser.error(
|
| 588 |
+
"file system paths not supported by your operating environment"
|
| 589 |
+
)
|
| 590 |
+
|
| 591 |
+
logging.basicConfig(level=logging.DEBUG)
|
| 592 |
+
|
| 593 |
+
if args.path and args.hostname is None:
|
| 594 |
+
host = port = None
|
| 595 |
+
else:
|
| 596 |
+
host = args.hostname or "localhost"
|
| 597 |
+
port = args.port
|
| 598 |
+
|
| 599 |
+
app = func(extra_argv)
|
| 600 |
+
run_app(app, host=host, port=port, path=args.path)
|
| 601 |
+
arg_parser.exit(message="Stopped\n")
|
| 602 |
+
|
| 603 |
+
|
| 604 |
+
if __name__ == "__main__": # pragma: no branch
|
| 605 |
+
main(sys.argv[1:]) # pragma: no cover
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_fileresponse.py
ADDED
|
@@ -0,0 +1,418 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import io
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
import sys
|
| 6 |
+
from contextlib import suppress
|
| 7 |
+
from enum import Enum, auto
|
| 8 |
+
from mimetypes import MimeTypes
|
| 9 |
+
from stat import S_ISREG
|
| 10 |
+
from types import MappingProxyType
|
| 11 |
+
from typing import ( # noqa
|
| 12 |
+
IO,
|
| 13 |
+
TYPE_CHECKING,
|
| 14 |
+
Any,
|
| 15 |
+
Awaitable,
|
| 16 |
+
Callable,
|
| 17 |
+
Final,
|
| 18 |
+
Iterator,
|
| 19 |
+
List,
|
| 20 |
+
Optional,
|
| 21 |
+
Set,
|
| 22 |
+
Tuple,
|
| 23 |
+
Union,
|
| 24 |
+
cast,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
from . import hdrs
|
| 28 |
+
from .abc import AbstractStreamWriter
|
| 29 |
+
from .helpers import ETAG_ANY, ETag, must_be_empty_body
|
| 30 |
+
from .typedefs import LooseHeaders, PathLike
|
| 31 |
+
from .web_exceptions import (
|
| 32 |
+
HTTPForbidden,
|
| 33 |
+
HTTPNotFound,
|
| 34 |
+
HTTPNotModified,
|
| 35 |
+
HTTPPartialContent,
|
| 36 |
+
HTTPPreconditionFailed,
|
| 37 |
+
HTTPRequestRangeNotSatisfiable,
|
| 38 |
+
)
|
| 39 |
+
from .web_response import StreamResponse
|
| 40 |
+
|
| 41 |
+
__all__ = ("FileResponse",)
|
| 42 |
+
|
| 43 |
+
if TYPE_CHECKING:
|
| 44 |
+
from .web_request import BaseRequest
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
|
| 51 |
+
|
| 52 |
+
CONTENT_TYPES: Final[MimeTypes] = MimeTypes()
|
| 53 |
+
|
| 54 |
+
# File extension to IANA encodings map that will be checked in the order defined.
|
| 55 |
+
ENCODING_EXTENSIONS = MappingProxyType(
|
| 56 |
+
{ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")}
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
FALLBACK_CONTENT_TYPE = "application/octet-stream"
|
| 60 |
+
|
| 61 |
+
# Provide additional MIME type/extension pairs to be recognized.
|
| 62 |
+
# https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only
|
| 63 |
+
ADDITIONAL_CONTENT_TYPES = MappingProxyType(
|
| 64 |
+
{
|
| 65 |
+
"application/gzip": ".gz",
|
| 66 |
+
"application/x-brotli": ".br",
|
| 67 |
+
"application/x-bzip2": ".bz2",
|
| 68 |
+
"application/x-compress": ".Z",
|
| 69 |
+
"application/x-xz": ".xz",
|
| 70 |
+
}
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class _FileResponseResult(Enum):
|
| 75 |
+
"""The result of the file response."""
|
| 76 |
+
|
| 77 |
+
SEND_FILE = auto() # Ie a regular file to send
|
| 78 |
+
NOT_ACCEPTABLE = auto() # Ie a socket, or non-regular file
|
| 79 |
+
PRE_CONDITION_FAILED = auto() # Ie If-Match or If-None-Match failed
|
| 80 |
+
NOT_MODIFIED = auto() # 304 Not Modified
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
# Add custom pairs and clear the encodings map so guess_type ignores them.
|
| 84 |
+
CONTENT_TYPES.encodings_map.clear()
|
| 85 |
+
for content_type, extension in ADDITIONAL_CONTENT_TYPES.items():
|
| 86 |
+
CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined]
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
_CLOSE_FUTURES: Set[asyncio.Future[None]] = set()
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class FileResponse(StreamResponse):
|
| 93 |
+
"""A response object can be used to send files."""
|
| 94 |
+
|
| 95 |
+
def __init__(
|
| 96 |
+
self,
|
| 97 |
+
path: PathLike,
|
| 98 |
+
chunk_size: int = 256 * 1024,
|
| 99 |
+
status: int = 200,
|
| 100 |
+
reason: Optional[str] = None,
|
| 101 |
+
headers: Optional[LooseHeaders] = None,
|
| 102 |
+
) -> None:
|
| 103 |
+
super().__init__(status=status, reason=reason, headers=headers)
|
| 104 |
+
|
| 105 |
+
self._path = pathlib.Path(path)
|
| 106 |
+
self._chunk_size = chunk_size
|
| 107 |
+
|
| 108 |
+
def _seek_and_read(self, fobj: IO[Any], offset: int, chunk_size: int) -> bytes:
|
| 109 |
+
fobj.seek(offset)
|
| 110 |
+
return fobj.read(chunk_size) # type: ignore[no-any-return]
|
| 111 |
+
|
| 112 |
+
async def _sendfile_fallback(
|
| 113 |
+
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
|
| 114 |
+
) -> AbstractStreamWriter:
|
| 115 |
+
# To keep memory usage low,fobj is transferred in chunks
|
| 116 |
+
# controlled by the constructor's chunk_size argument.
|
| 117 |
+
|
| 118 |
+
chunk_size = self._chunk_size
|
| 119 |
+
loop = asyncio.get_event_loop()
|
| 120 |
+
chunk = await loop.run_in_executor(
|
| 121 |
+
None, self._seek_and_read, fobj, offset, chunk_size
|
| 122 |
+
)
|
| 123 |
+
while chunk:
|
| 124 |
+
await writer.write(chunk)
|
| 125 |
+
count = count - chunk_size
|
| 126 |
+
if count <= 0:
|
| 127 |
+
break
|
| 128 |
+
chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
|
| 129 |
+
|
| 130 |
+
await writer.drain()
|
| 131 |
+
return writer
|
| 132 |
+
|
| 133 |
+
async def _sendfile(
|
| 134 |
+
self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
|
| 135 |
+
) -> AbstractStreamWriter:
|
| 136 |
+
writer = await super().prepare(request)
|
| 137 |
+
assert writer is not None
|
| 138 |
+
|
| 139 |
+
if NOSENDFILE or self.compression:
|
| 140 |
+
return await self._sendfile_fallback(writer, fobj, offset, count)
|
| 141 |
+
|
| 142 |
+
loop = request._loop
|
| 143 |
+
transport = request.transport
|
| 144 |
+
assert transport is not None
|
| 145 |
+
|
| 146 |
+
try:
|
| 147 |
+
await loop.sendfile(transport, fobj, offset, count)
|
| 148 |
+
except NotImplementedError:
|
| 149 |
+
return await self._sendfile_fallback(writer, fobj, offset, count)
|
| 150 |
+
|
| 151 |
+
await super().write_eof()
|
| 152 |
+
return writer
|
| 153 |
+
|
| 154 |
+
@staticmethod
|
| 155 |
+
def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool:
|
| 156 |
+
if len(etags) == 1 and etags[0].value == ETAG_ANY:
|
| 157 |
+
return True
|
| 158 |
+
return any(
|
| 159 |
+
etag.value == etag_value for etag in etags if weak or not etag.is_weak
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
async def _not_modified(
|
| 163 |
+
self, request: "BaseRequest", etag_value: str, last_modified: float
|
| 164 |
+
) -> Optional[AbstractStreamWriter]:
|
| 165 |
+
self.set_status(HTTPNotModified.status_code)
|
| 166 |
+
self._length_check = False
|
| 167 |
+
self.etag = etag_value # type: ignore[assignment]
|
| 168 |
+
self.last_modified = last_modified # type: ignore[assignment]
|
| 169 |
+
# Delete any Content-Length headers provided by user. HTTP 304
|
| 170 |
+
# should always have empty response body
|
| 171 |
+
return await super().prepare(request)
|
| 172 |
+
|
| 173 |
+
async def _precondition_failed(
|
| 174 |
+
self, request: "BaseRequest"
|
| 175 |
+
) -> Optional[AbstractStreamWriter]:
|
| 176 |
+
self.set_status(HTTPPreconditionFailed.status_code)
|
| 177 |
+
self.content_length = 0
|
| 178 |
+
return await super().prepare(request)
|
| 179 |
+
|
| 180 |
+
def _make_response(
|
| 181 |
+
self, request: "BaseRequest", accept_encoding: str
|
| 182 |
+
) -> Tuple[
|
| 183 |
+
_FileResponseResult, Optional[io.BufferedReader], os.stat_result, Optional[str]
|
| 184 |
+
]:
|
| 185 |
+
"""Return the response result, io object, stat result, and encoding.
|
| 186 |
+
|
| 187 |
+
If an uncompressed file is returned, the encoding is set to
|
| 188 |
+
:py:data:`None`.
|
| 189 |
+
|
| 190 |
+
This method should be called from a thread executor
|
| 191 |
+
since it calls os.stat which may block.
|
| 192 |
+
"""
|
| 193 |
+
file_path, st, file_encoding = self._get_file_path_stat_encoding(
|
| 194 |
+
accept_encoding
|
| 195 |
+
)
|
| 196 |
+
if not file_path:
|
| 197 |
+
return _FileResponseResult.NOT_ACCEPTABLE, None, st, None
|
| 198 |
+
|
| 199 |
+
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
|
| 200 |
+
|
| 201 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2
|
| 202 |
+
if (ifmatch := request.if_match) is not None and not self._etag_match(
|
| 203 |
+
etag_value, ifmatch, weak=False
|
| 204 |
+
):
|
| 205 |
+
return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding
|
| 206 |
+
|
| 207 |
+
if (
|
| 208 |
+
(unmodsince := request.if_unmodified_since) is not None
|
| 209 |
+
and ifmatch is None
|
| 210 |
+
and st.st_mtime > unmodsince.timestamp()
|
| 211 |
+
):
|
| 212 |
+
return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding
|
| 213 |
+
|
| 214 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2
|
| 215 |
+
if (ifnonematch := request.if_none_match) is not None and self._etag_match(
|
| 216 |
+
etag_value, ifnonematch, weak=True
|
| 217 |
+
):
|
| 218 |
+
return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding
|
| 219 |
+
|
| 220 |
+
if (
|
| 221 |
+
(modsince := request.if_modified_since) is not None
|
| 222 |
+
and ifnonematch is None
|
| 223 |
+
and st.st_mtime <= modsince.timestamp()
|
| 224 |
+
):
|
| 225 |
+
return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding
|
| 226 |
+
|
| 227 |
+
fobj = file_path.open("rb")
|
| 228 |
+
with suppress(OSError):
|
| 229 |
+
# fstat() may not be available on all platforms
|
| 230 |
+
# Once we open the file, we want the fstat() to ensure
|
| 231 |
+
# the file has not changed between the first stat()
|
| 232 |
+
# and the open().
|
| 233 |
+
st = os.stat(fobj.fileno())
|
| 234 |
+
return _FileResponseResult.SEND_FILE, fobj, st, file_encoding
|
| 235 |
+
|
| 236 |
+
def _get_file_path_stat_encoding(
|
| 237 |
+
self, accept_encoding: str
|
| 238 |
+
) -> Tuple[Optional[pathlib.Path], os.stat_result, Optional[str]]:
|
| 239 |
+
file_path = self._path
|
| 240 |
+
for file_extension, file_encoding in ENCODING_EXTENSIONS.items():
|
| 241 |
+
if file_encoding not in accept_encoding:
|
| 242 |
+
continue
|
| 243 |
+
|
| 244 |
+
compressed_path = file_path.with_suffix(file_path.suffix + file_extension)
|
| 245 |
+
with suppress(OSError):
|
| 246 |
+
# Do not follow symlinks and ignore any non-regular files.
|
| 247 |
+
st = compressed_path.lstat()
|
| 248 |
+
if S_ISREG(st.st_mode):
|
| 249 |
+
return compressed_path, st, file_encoding
|
| 250 |
+
|
| 251 |
+
# Fallback to the uncompressed file
|
| 252 |
+
st = file_path.stat()
|
| 253 |
+
return file_path if S_ISREG(st.st_mode) else None, st, None
|
| 254 |
+
|
| 255 |
+
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
| 256 |
+
loop = asyncio.get_running_loop()
|
| 257 |
+
# Encoding comparisons should be case-insensitive
|
| 258 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
|
| 259 |
+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
|
| 260 |
+
try:
|
| 261 |
+
response_result, fobj, st, file_encoding = await loop.run_in_executor(
|
| 262 |
+
None, self._make_response, request, accept_encoding
|
| 263 |
+
)
|
| 264 |
+
except PermissionError:
|
| 265 |
+
self.set_status(HTTPForbidden.status_code)
|
| 266 |
+
return await super().prepare(request)
|
| 267 |
+
except OSError:
|
| 268 |
+
# Most likely to be FileNotFoundError or OSError for circular
|
| 269 |
+
# symlinks in python >= 3.13, so respond with 404.
|
| 270 |
+
self.set_status(HTTPNotFound.status_code)
|
| 271 |
+
return await super().prepare(request)
|
| 272 |
+
|
| 273 |
+
# Forbid special files like sockets, pipes, devices, etc.
|
| 274 |
+
if response_result is _FileResponseResult.NOT_ACCEPTABLE:
|
| 275 |
+
self.set_status(HTTPForbidden.status_code)
|
| 276 |
+
return await super().prepare(request)
|
| 277 |
+
|
| 278 |
+
if response_result is _FileResponseResult.PRE_CONDITION_FAILED:
|
| 279 |
+
return await self._precondition_failed(request)
|
| 280 |
+
|
| 281 |
+
if response_result is _FileResponseResult.NOT_MODIFIED:
|
| 282 |
+
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
|
| 283 |
+
last_modified = st.st_mtime
|
| 284 |
+
return await self._not_modified(request, etag_value, last_modified)
|
| 285 |
+
|
| 286 |
+
assert fobj is not None
|
| 287 |
+
try:
|
| 288 |
+
return await self._prepare_open_file(request, fobj, st, file_encoding)
|
| 289 |
+
finally:
|
| 290 |
+
# We do not await here because we do not want to wait
|
| 291 |
+
# for the executor to finish before returning the response
|
| 292 |
+
# so the connection can begin servicing another request
|
| 293 |
+
# as soon as possible.
|
| 294 |
+
close_future = loop.run_in_executor(None, fobj.close)
|
| 295 |
+
# Hold a strong reference to the future to prevent it from being
|
| 296 |
+
# garbage collected before it completes.
|
| 297 |
+
_CLOSE_FUTURES.add(close_future)
|
| 298 |
+
close_future.add_done_callback(_CLOSE_FUTURES.remove)
|
| 299 |
+
|
| 300 |
+
async def _prepare_open_file(
|
| 301 |
+
self,
|
| 302 |
+
request: "BaseRequest",
|
| 303 |
+
fobj: io.BufferedReader,
|
| 304 |
+
st: os.stat_result,
|
| 305 |
+
file_encoding: Optional[str],
|
| 306 |
+
) -> Optional[AbstractStreamWriter]:
|
| 307 |
+
status = self._status
|
| 308 |
+
file_size: int = st.st_size
|
| 309 |
+
file_mtime: float = st.st_mtime
|
| 310 |
+
count: int = file_size
|
| 311 |
+
start: Optional[int] = None
|
| 312 |
+
|
| 313 |
+
if (ifrange := request.if_range) is None or file_mtime <= ifrange.timestamp():
|
| 314 |
+
# If-Range header check:
|
| 315 |
+
# condition = cached date >= last modification date
|
| 316 |
+
# return 206 if True else 200.
|
| 317 |
+
# if False:
|
| 318 |
+
# Range header would not be processed, return 200
|
| 319 |
+
# if True but Range header missing
|
| 320 |
+
# return 200
|
| 321 |
+
try:
|
| 322 |
+
rng = request.http_range
|
| 323 |
+
start = rng.start
|
| 324 |
+
end: Optional[int] = rng.stop
|
| 325 |
+
except ValueError:
|
| 326 |
+
# https://tools.ietf.org/html/rfc7233:
|
| 327 |
+
# A server generating a 416 (Range Not Satisfiable) response to
|
| 328 |
+
# a byte-range request SHOULD send a Content-Range header field
|
| 329 |
+
# with an unsatisfied-range value.
|
| 330 |
+
# The complete-length in a 416 response indicates the current
|
| 331 |
+
# length of the selected representation.
|
| 332 |
+
#
|
| 333 |
+
# Will do the same below. Many servers ignore this and do not
|
| 334 |
+
# send a Content-Range header with HTTP 416
|
| 335 |
+
self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
| 336 |
+
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
| 337 |
+
return await super().prepare(request)
|
| 338 |
+
|
| 339 |
+
# If a range request has been made, convert start, end slice
|
| 340 |
+
# notation into file pointer offset and count
|
| 341 |
+
if start is not None:
|
| 342 |
+
if start < 0 and end is None: # return tail of file
|
| 343 |
+
start += file_size
|
| 344 |
+
if start < 0:
|
| 345 |
+
# if Range:bytes=-1000 in request header but file size
|
| 346 |
+
# is only 200, there would be trouble without this
|
| 347 |
+
start = 0
|
| 348 |
+
count = file_size - start
|
| 349 |
+
else:
|
| 350 |
+
# rfc7233:If the last-byte-pos value is
|
| 351 |
+
# absent, or if the value is greater than or equal to
|
| 352 |
+
# the current length of the representation data,
|
| 353 |
+
# the byte range is interpreted as the remainder
|
| 354 |
+
# of the representation (i.e., the server replaces the
|
| 355 |
+
# value of last-byte-pos with a value that is one less than
|
| 356 |
+
# the current length of the selected representation).
|
| 357 |
+
count = (
|
| 358 |
+
min(end if end is not None else file_size, file_size) - start
|
| 359 |
+
)
|
| 360 |
+
|
| 361 |
+
if start >= file_size:
|
| 362 |
+
# HTTP 416 should be returned in this case.
|
| 363 |
+
#
|
| 364 |
+
# According to https://tools.ietf.org/html/rfc7233:
|
| 365 |
+
# If a valid byte-range-set includes at least one
|
| 366 |
+
# byte-range-spec with a first-byte-pos that is less than
|
| 367 |
+
# the current length of the representation, or at least one
|
| 368 |
+
# suffix-byte-range-spec with a non-zero suffix-length,
|
| 369 |
+
# then the byte-range-set is satisfiable. Otherwise, the
|
| 370 |
+
# byte-range-set is unsatisfiable.
|
| 371 |
+
self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
| 372 |
+
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
| 373 |
+
return await super().prepare(request)
|
| 374 |
+
|
| 375 |
+
status = HTTPPartialContent.status_code
|
| 376 |
+
# Even though you are sending the whole file, you should still
|
| 377 |
+
# return a HTTP 206 for a Range request.
|
| 378 |
+
self.set_status(status)
|
| 379 |
+
|
| 380 |
+
# If the Content-Type header is not already set, guess it based on the
|
| 381 |
+
# extension of the request path. The encoding returned by guess_type
|
| 382 |
+
# can be ignored since the map was cleared above.
|
| 383 |
+
if hdrs.CONTENT_TYPE not in self._headers:
|
| 384 |
+
if sys.version_info >= (3, 13):
|
| 385 |
+
guesser = CONTENT_TYPES.guess_file_type
|
| 386 |
+
else:
|
| 387 |
+
guesser = CONTENT_TYPES.guess_type
|
| 388 |
+
self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE
|
| 389 |
+
|
| 390 |
+
if file_encoding:
|
| 391 |
+
self._headers[hdrs.CONTENT_ENCODING] = file_encoding
|
| 392 |
+
self._headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
|
| 393 |
+
# Disable compression if we are already sending
|
| 394 |
+
# a compressed file since we don't want to double
|
| 395 |
+
# compress.
|
| 396 |
+
self._compression = False
|
| 397 |
+
|
| 398 |
+
self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}" # type: ignore[assignment]
|
| 399 |
+
self.last_modified = file_mtime # type: ignore[assignment]
|
| 400 |
+
self.content_length = count
|
| 401 |
+
|
| 402 |
+
self._headers[hdrs.ACCEPT_RANGES] = "bytes"
|
| 403 |
+
|
| 404 |
+
if status == HTTPPartialContent.status_code:
|
| 405 |
+
real_start = start
|
| 406 |
+
assert real_start is not None
|
| 407 |
+
self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
|
| 408 |
+
real_start, real_start + count - 1, file_size
|
| 409 |
+
)
|
| 410 |
+
|
| 411 |
+
# If we are sending 0 bytes calling sendfile() will throw a ValueError
|
| 412 |
+
if count == 0 or must_be_empty_body(request.method, status):
|
| 413 |
+
return await super().prepare(request)
|
| 414 |
+
|
| 415 |
+
# be aware that start could be None or int=0 here.
|
| 416 |
+
offset = start or 0
|
| 417 |
+
|
| 418 |
+
return await self._sendfile(request, fobj, offset, count)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_protocol.py
ADDED
|
@@ -0,0 +1,746 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import asyncio.streams
|
| 3 |
+
import sys
|
| 4 |
+
import traceback
|
| 5 |
+
import warnings
|
| 6 |
+
from collections import deque
|
| 7 |
+
from contextlib import suppress
|
| 8 |
+
from html import escape as html_escape
|
| 9 |
+
from http import HTTPStatus
|
| 10 |
+
from logging import Logger
|
| 11 |
+
from typing import (
|
| 12 |
+
TYPE_CHECKING,
|
| 13 |
+
Any,
|
| 14 |
+
Awaitable,
|
| 15 |
+
Callable,
|
| 16 |
+
Deque,
|
| 17 |
+
Optional,
|
| 18 |
+
Sequence,
|
| 19 |
+
Tuple,
|
| 20 |
+
Type,
|
| 21 |
+
Union,
|
| 22 |
+
cast,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
import attr
|
| 26 |
+
import yarl
|
| 27 |
+
|
| 28 |
+
from .abc import AbstractAccessLogger, AbstractStreamWriter
|
| 29 |
+
from .base_protocol import BaseProtocol
|
| 30 |
+
from .helpers import ceil_timeout
|
| 31 |
+
from .http import (
|
| 32 |
+
HttpProcessingError,
|
| 33 |
+
HttpRequestParser,
|
| 34 |
+
HttpVersion10,
|
| 35 |
+
RawRequestMessage,
|
| 36 |
+
StreamWriter,
|
| 37 |
+
)
|
| 38 |
+
from .http_exceptions import BadHttpMethod
|
| 39 |
+
from .log import access_logger, server_logger
|
| 40 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 41 |
+
from .tcp_helpers import tcp_keepalive
|
| 42 |
+
from .web_exceptions import HTTPException, HTTPInternalServerError
|
| 43 |
+
from .web_log import AccessLogger
|
| 44 |
+
from .web_request import BaseRequest
|
| 45 |
+
from .web_response import Response, StreamResponse
|
| 46 |
+
|
| 47 |
+
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
|
| 48 |
+
|
| 49 |
+
if TYPE_CHECKING:
|
| 50 |
+
from .web_server import Server
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
_RequestFactory = Callable[
|
| 54 |
+
[
|
| 55 |
+
RawRequestMessage,
|
| 56 |
+
StreamReader,
|
| 57 |
+
"RequestHandler",
|
| 58 |
+
AbstractStreamWriter,
|
| 59 |
+
"asyncio.Task[None]",
|
| 60 |
+
],
|
| 61 |
+
BaseRequest,
|
| 62 |
+
]
|
| 63 |
+
|
| 64 |
+
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
|
| 65 |
+
|
| 66 |
+
ERROR = RawRequestMessage(
|
| 67 |
+
"UNKNOWN",
|
| 68 |
+
"/",
|
| 69 |
+
HttpVersion10,
|
| 70 |
+
{}, # type: ignore[arg-type]
|
| 71 |
+
{}, # type: ignore[arg-type]
|
| 72 |
+
True,
|
| 73 |
+
None,
|
| 74 |
+
False,
|
| 75 |
+
False,
|
| 76 |
+
yarl.URL("/"),
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class RequestPayloadError(Exception):
|
| 81 |
+
"""Payload parsing error."""
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class PayloadAccessError(Exception):
|
| 85 |
+
"""Payload was accessed after response was sent."""
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
_PAYLOAD_ACCESS_ERROR = PayloadAccessError()
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 92 |
+
class _ErrInfo:
|
| 93 |
+
status: int
|
| 94 |
+
exc: BaseException
|
| 95 |
+
message: str
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class RequestHandler(BaseProtocol):
|
| 102 |
+
"""HTTP protocol implementation.
|
| 103 |
+
|
| 104 |
+
RequestHandler handles incoming HTTP request. It reads request line,
|
| 105 |
+
request headers and request payload and calls handle_request() method.
|
| 106 |
+
By default it always returns with 404 response.
|
| 107 |
+
|
| 108 |
+
RequestHandler handles errors in incoming request, like bad
|
| 109 |
+
status line, bad headers or incomplete payload. If any error occurs,
|
| 110 |
+
connection gets closed.
|
| 111 |
+
|
| 112 |
+
keepalive_timeout -- number of seconds before closing
|
| 113 |
+
keep-alive connection
|
| 114 |
+
|
| 115 |
+
tcp_keepalive -- TCP keep-alive is on, default is on
|
| 116 |
+
|
| 117 |
+
debug -- enable debug mode
|
| 118 |
+
|
| 119 |
+
logger -- custom logger object
|
| 120 |
+
|
| 121 |
+
access_log_class -- custom class for access_logger
|
| 122 |
+
|
| 123 |
+
access_log -- custom logging object
|
| 124 |
+
|
| 125 |
+
access_log_format -- access log format string
|
| 126 |
+
|
| 127 |
+
loop -- Optional event loop
|
| 128 |
+
|
| 129 |
+
max_line_size -- Optional maximum header line size
|
| 130 |
+
|
| 131 |
+
max_field_size -- Optional maximum header field size
|
| 132 |
+
|
| 133 |
+
max_headers -- Optional maximum header size
|
| 134 |
+
|
| 135 |
+
timeout_ceil_threshold -- Optional value to specify
|
| 136 |
+
threshold to ceil() timeout
|
| 137 |
+
values
|
| 138 |
+
|
| 139 |
+
"""
|
| 140 |
+
|
| 141 |
+
__slots__ = (
|
| 142 |
+
"_request_count",
|
| 143 |
+
"_keepalive",
|
| 144 |
+
"_manager",
|
| 145 |
+
"_request_handler",
|
| 146 |
+
"_request_factory",
|
| 147 |
+
"_tcp_keepalive",
|
| 148 |
+
"_next_keepalive_close_time",
|
| 149 |
+
"_keepalive_handle",
|
| 150 |
+
"_keepalive_timeout",
|
| 151 |
+
"_lingering_time",
|
| 152 |
+
"_messages",
|
| 153 |
+
"_message_tail",
|
| 154 |
+
"_handler_waiter",
|
| 155 |
+
"_waiter",
|
| 156 |
+
"_task_handler",
|
| 157 |
+
"_upgrade",
|
| 158 |
+
"_payload_parser",
|
| 159 |
+
"_request_parser",
|
| 160 |
+
"_reading_paused",
|
| 161 |
+
"logger",
|
| 162 |
+
"debug",
|
| 163 |
+
"access_log",
|
| 164 |
+
"access_logger",
|
| 165 |
+
"_close",
|
| 166 |
+
"_force_close",
|
| 167 |
+
"_current_request",
|
| 168 |
+
"_timeout_ceil_threshold",
|
| 169 |
+
"_request_in_progress",
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
def __init__(
|
| 173 |
+
self,
|
| 174 |
+
manager: "Server",
|
| 175 |
+
*,
|
| 176 |
+
loop: asyncio.AbstractEventLoop,
|
| 177 |
+
# Default should be high enough that it's likely longer than a reverse proxy.
|
| 178 |
+
keepalive_timeout: float = 3630,
|
| 179 |
+
tcp_keepalive: bool = True,
|
| 180 |
+
logger: Logger = server_logger,
|
| 181 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 182 |
+
access_log: Logger = access_logger,
|
| 183 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 184 |
+
debug: bool = False,
|
| 185 |
+
max_line_size: int = 8190,
|
| 186 |
+
max_headers: int = 32768,
|
| 187 |
+
max_field_size: int = 8190,
|
| 188 |
+
lingering_time: float = 10.0,
|
| 189 |
+
read_bufsize: int = 2**16,
|
| 190 |
+
auto_decompress: bool = True,
|
| 191 |
+
timeout_ceil_threshold: float = 5,
|
| 192 |
+
):
|
| 193 |
+
super().__init__(loop)
|
| 194 |
+
|
| 195 |
+
# _request_count is the number of requests processed with the same connection.
|
| 196 |
+
self._request_count = 0
|
| 197 |
+
self._keepalive = False
|
| 198 |
+
self._current_request: Optional[BaseRequest] = None
|
| 199 |
+
self._manager: Optional[Server] = manager
|
| 200 |
+
self._request_handler: Optional[_RequestHandler] = manager.request_handler
|
| 201 |
+
self._request_factory: Optional[_RequestFactory] = manager.request_factory
|
| 202 |
+
|
| 203 |
+
self._tcp_keepalive = tcp_keepalive
|
| 204 |
+
# placeholder to be replaced on keepalive timeout setup
|
| 205 |
+
self._next_keepalive_close_time = 0.0
|
| 206 |
+
self._keepalive_handle: Optional[asyncio.Handle] = None
|
| 207 |
+
self._keepalive_timeout = keepalive_timeout
|
| 208 |
+
self._lingering_time = float(lingering_time)
|
| 209 |
+
|
| 210 |
+
self._messages: Deque[_MsgType] = deque()
|
| 211 |
+
self._message_tail = b""
|
| 212 |
+
|
| 213 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
| 214 |
+
self._handler_waiter: Optional[asyncio.Future[None]] = None
|
| 215 |
+
self._task_handler: Optional[asyncio.Task[None]] = None
|
| 216 |
+
|
| 217 |
+
self._upgrade = False
|
| 218 |
+
self._payload_parser: Any = None
|
| 219 |
+
self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
|
| 220 |
+
self,
|
| 221 |
+
loop,
|
| 222 |
+
read_bufsize,
|
| 223 |
+
max_line_size=max_line_size,
|
| 224 |
+
max_field_size=max_field_size,
|
| 225 |
+
max_headers=max_headers,
|
| 226 |
+
payload_exception=RequestPayloadError,
|
| 227 |
+
auto_decompress=auto_decompress,
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
self._timeout_ceil_threshold: float = 5
|
| 231 |
+
try:
|
| 232 |
+
self._timeout_ceil_threshold = float(timeout_ceil_threshold)
|
| 233 |
+
except (TypeError, ValueError):
|
| 234 |
+
pass
|
| 235 |
+
|
| 236 |
+
self.logger = logger
|
| 237 |
+
self.debug = debug
|
| 238 |
+
self.access_log = access_log
|
| 239 |
+
if access_log:
|
| 240 |
+
self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
|
| 241 |
+
access_log, access_log_format
|
| 242 |
+
)
|
| 243 |
+
else:
|
| 244 |
+
self.access_logger = None
|
| 245 |
+
|
| 246 |
+
self._close = False
|
| 247 |
+
self._force_close = False
|
| 248 |
+
self._request_in_progress = False
|
| 249 |
+
|
| 250 |
+
def __repr__(self) -> str:
|
| 251 |
+
return "<{} {}>".format(
|
| 252 |
+
self.__class__.__name__,
|
| 253 |
+
"connected" if self.transport is not None else "disconnected",
|
| 254 |
+
)
|
| 255 |
+
|
| 256 |
+
@property
|
| 257 |
+
def keepalive_timeout(self) -> float:
|
| 258 |
+
return self._keepalive_timeout
|
| 259 |
+
|
| 260 |
+
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
|
| 261 |
+
"""Do worker process exit preparations.
|
| 262 |
+
|
| 263 |
+
We need to clean up everything and stop accepting requests.
|
| 264 |
+
It is especially important for keep-alive connections.
|
| 265 |
+
"""
|
| 266 |
+
self._force_close = True
|
| 267 |
+
|
| 268 |
+
if self._keepalive_handle is not None:
|
| 269 |
+
self._keepalive_handle.cancel()
|
| 270 |
+
|
| 271 |
+
# Wait for graceful handler completion
|
| 272 |
+
if self._request_in_progress:
|
| 273 |
+
# The future is only created when we are shutting
|
| 274 |
+
# down while the handler is still processing a request
|
| 275 |
+
# to avoid creating a future for every request.
|
| 276 |
+
self._handler_waiter = self._loop.create_future()
|
| 277 |
+
try:
|
| 278 |
+
async with ceil_timeout(timeout):
|
| 279 |
+
await self._handler_waiter
|
| 280 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 281 |
+
self._handler_waiter = None
|
| 282 |
+
if (
|
| 283 |
+
sys.version_info >= (3, 11)
|
| 284 |
+
and (task := asyncio.current_task())
|
| 285 |
+
and task.cancelling()
|
| 286 |
+
):
|
| 287 |
+
raise
|
| 288 |
+
# Then cancel handler and wait
|
| 289 |
+
try:
|
| 290 |
+
async with ceil_timeout(timeout):
|
| 291 |
+
if self._current_request is not None:
|
| 292 |
+
self._current_request._cancel(asyncio.CancelledError())
|
| 293 |
+
|
| 294 |
+
if self._task_handler is not None and not self._task_handler.done():
|
| 295 |
+
await asyncio.shield(self._task_handler)
|
| 296 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 297 |
+
if (
|
| 298 |
+
sys.version_info >= (3, 11)
|
| 299 |
+
and (task := asyncio.current_task())
|
| 300 |
+
and task.cancelling()
|
| 301 |
+
):
|
| 302 |
+
raise
|
| 303 |
+
|
| 304 |
+
# force-close non-idle handler
|
| 305 |
+
if self._task_handler is not None:
|
| 306 |
+
self._task_handler.cancel()
|
| 307 |
+
|
| 308 |
+
self.force_close()
|
| 309 |
+
|
| 310 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
| 311 |
+
super().connection_made(transport)
|
| 312 |
+
|
| 313 |
+
real_transport = cast(asyncio.Transport, transport)
|
| 314 |
+
if self._tcp_keepalive:
|
| 315 |
+
tcp_keepalive(real_transport)
|
| 316 |
+
|
| 317 |
+
assert self._manager is not None
|
| 318 |
+
self._manager.connection_made(self, real_transport)
|
| 319 |
+
|
| 320 |
+
loop = self._loop
|
| 321 |
+
if sys.version_info >= (3, 12):
|
| 322 |
+
task = asyncio.Task(self.start(), loop=loop, eager_start=True)
|
| 323 |
+
else:
|
| 324 |
+
task = loop.create_task(self.start())
|
| 325 |
+
self._task_handler = task
|
| 326 |
+
|
| 327 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 328 |
+
if self._manager is None:
|
| 329 |
+
return
|
| 330 |
+
self._manager.connection_lost(self, exc)
|
| 331 |
+
|
| 332 |
+
# Grab value before setting _manager to None.
|
| 333 |
+
handler_cancellation = self._manager.handler_cancellation
|
| 334 |
+
|
| 335 |
+
self.force_close()
|
| 336 |
+
super().connection_lost(exc)
|
| 337 |
+
self._manager = None
|
| 338 |
+
self._request_factory = None
|
| 339 |
+
self._request_handler = None
|
| 340 |
+
self._request_parser = None
|
| 341 |
+
|
| 342 |
+
if self._keepalive_handle is not None:
|
| 343 |
+
self._keepalive_handle.cancel()
|
| 344 |
+
|
| 345 |
+
if self._current_request is not None:
|
| 346 |
+
if exc is None:
|
| 347 |
+
exc = ConnectionResetError("Connection lost")
|
| 348 |
+
self._current_request._cancel(exc)
|
| 349 |
+
|
| 350 |
+
if handler_cancellation and self._task_handler is not None:
|
| 351 |
+
self._task_handler.cancel()
|
| 352 |
+
|
| 353 |
+
self._task_handler = None
|
| 354 |
+
|
| 355 |
+
if self._payload_parser is not None:
|
| 356 |
+
self._payload_parser.feed_eof()
|
| 357 |
+
self._payload_parser = None
|
| 358 |
+
|
| 359 |
+
def set_parser(self, parser: Any) -> None:
|
| 360 |
+
# Actual type is WebReader
|
| 361 |
+
assert self._payload_parser is None
|
| 362 |
+
|
| 363 |
+
self._payload_parser = parser
|
| 364 |
+
|
| 365 |
+
if self._message_tail:
|
| 366 |
+
self._payload_parser.feed_data(self._message_tail)
|
| 367 |
+
self._message_tail = b""
|
| 368 |
+
|
| 369 |
+
def eof_received(self) -> None:
|
| 370 |
+
pass
|
| 371 |
+
|
| 372 |
+
def data_received(self, data: bytes) -> None:
|
| 373 |
+
if self._force_close or self._close:
|
| 374 |
+
return
|
| 375 |
+
# parse http messages
|
| 376 |
+
messages: Sequence[_MsgType]
|
| 377 |
+
if self._payload_parser is None and not self._upgrade:
|
| 378 |
+
assert self._request_parser is not None
|
| 379 |
+
try:
|
| 380 |
+
messages, upgraded, tail = self._request_parser.feed_data(data)
|
| 381 |
+
except HttpProcessingError as exc:
|
| 382 |
+
messages = [
|
| 383 |
+
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
|
| 384 |
+
]
|
| 385 |
+
upgraded = False
|
| 386 |
+
tail = b""
|
| 387 |
+
|
| 388 |
+
for msg, payload in messages or ():
|
| 389 |
+
self._request_count += 1
|
| 390 |
+
self._messages.append((msg, payload))
|
| 391 |
+
|
| 392 |
+
waiter = self._waiter
|
| 393 |
+
if messages and waiter is not None and not waiter.done():
|
| 394 |
+
# don't set result twice
|
| 395 |
+
waiter.set_result(None)
|
| 396 |
+
|
| 397 |
+
self._upgrade = upgraded
|
| 398 |
+
if upgraded and tail:
|
| 399 |
+
self._message_tail = tail
|
| 400 |
+
|
| 401 |
+
# no parser, just store
|
| 402 |
+
elif self._payload_parser is None and self._upgrade and data:
|
| 403 |
+
self._message_tail += data
|
| 404 |
+
|
| 405 |
+
# feed payload
|
| 406 |
+
elif data:
|
| 407 |
+
eof, tail = self._payload_parser.feed_data(data)
|
| 408 |
+
if eof:
|
| 409 |
+
self.close()
|
| 410 |
+
|
| 411 |
+
def keep_alive(self, val: bool) -> None:
|
| 412 |
+
"""Set keep-alive connection mode.
|
| 413 |
+
|
| 414 |
+
:param bool val: new state.
|
| 415 |
+
"""
|
| 416 |
+
self._keepalive = val
|
| 417 |
+
if self._keepalive_handle:
|
| 418 |
+
self._keepalive_handle.cancel()
|
| 419 |
+
self._keepalive_handle = None
|
| 420 |
+
|
| 421 |
+
def close(self) -> None:
|
| 422 |
+
"""Close connection.
|
| 423 |
+
|
| 424 |
+
Stop accepting new pipelining messages and close
|
| 425 |
+
connection when handlers done processing messages.
|
| 426 |
+
"""
|
| 427 |
+
self._close = True
|
| 428 |
+
if self._waiter:
|
| 429 |
+
self._waiter.cancel()
|
| 430 |
+
|
| 431 |
+
def force_close(self) -> None:
|
| 432 |
+
"""Forcefully close connection."""
|
| 433 |
+
self._force_close = True
|
| 434 |
+
if self._waiter:
|
| 435 |
+
self._waiter.cancel()
|
| 436 |
+
if self.transport is not None:
|
| 437 |
+
self.transport.close()
|
| 438 |
+
self.transport = None
|
| 439 |
+
|
| 440 |
+
def log_access(
|
| 441 |
+
self, request: BaseRequest, response: StreamResponse, time: float
|
| 442 |
+
) -> None:
|
| 443 |
+
if self.access_logger is not None and self.access_logger.enabled:
|
| 444 |
+
self.access_logger.log(request, response, self._loop.time() - time)
|
| 445 |
+
|
| 446 |
+
def log_debug(self, *args: Any, **kw: Any) -> None:
|
| 447 |
+
if self.debug:
|
| 448 |
+
self.logger.debug(*args, **kw)
|
| 449 |
+
|
| 450 |
+
def log_exception(self, *args: Any, **kw: Any) -> None:
|
| 451 |
+
self.logger.exception(*args, **kw)
|
| 452 |
+
|
| 453 |
+
def _process_keepalive(self) -> None:
|
| 454 |
+
self._keepalive_handle = None
|
| 455 |
+
if self._force_close or not self._keepalive:
|
| 456 |
+
return
|
| 457 |
+
|
| 458 |
+
loop = self._loop
|
| 459 |
+
now = loop.time()
|
| 460 |
+
close_time = self._next_keepalive_close_time
|
| 461 |
+
if now < close_time:
|
| 462 |
+
# Keep alive close check fired too early, reschedule
|
| 463 |
+
self._keepalive_handle = loop.call_at(close_time, self._process_keepalive)
|
| 464 |
+
return
|
| 465 |
+
|
| 466 |
+
# handler in idle state
|
| 467 |
+
if self._waiter and not self._waiter.done():
|
| 468 |
+
self.force_close()
|
| 469 |
+
|
| 470 |
+
async def _handle_request(
|
| 471 |
+
self,
|
| 472 |
+
request: BaseRequest,
|
| 473 |
+
start_time: float,
|
| 474 |
+
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
|
| 475 |
+
) -> Tuple[StreamResponse, bool]:
|
| 476 |
+
self._request_in_progress = True
|
| 477 |
+
try:
|
| 478 |
+
try:
|
| 479 |
+
self._current_request = request
|
| 480 |
+
resp = await request_handler(request)
|
| 481 |
+
finally:
|
| 482 |
+
self._current_request = None
|
| 483 |
+
except HTTPException as exc:
|
| 484 |
+
resp = exc
|
| 485 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 486 |
+
except asyncio.CancelledError:
|
| 487 |
+
raise
|
| 488 |
+
except asyncio.TimeoutError as exc:
|
| 489 |
+
self.log_debug("Request handler timed out.", exc_info=exc)
|
| 490 |
+
resp = self.handle_error(request, 504)
|
| 491 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 492 |
+
except Exception as exc:
|
| 493 |
+
resp = self.handle_error(request, 500, exc)
|
| 494 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 495 |
+
else:
|
| 496 |
+
# Deprecation warning (See #2415)
|
| 497 |
+
if getattr(resp, "__http_exception__", False):
|
| 498 |
+
warnings.warn(
|
| 499 |
+
"returning HTTPException object is deprecated "
|
| 500 |
+
"(#2415) and will be removed, "
|
| 501 |
+
"please raise the exception instead",
|
| 502 |
+
DeprecationWarning,
|
| 503 |
+
)
|
| 504 |
+
|
| 505 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 506 |
+
finally:
|
| 507 |
+
self._request_in_progress = False
|
| 508 |
+
if self._handler_waiter is not None:
|
| 509 |
+
self._handler_waiter.set_result(None)
|
| 510 |
+
|
| 511 |
+
return resp, reset
|
| 512 |
+
|
| 513 |
+
async def start(self) -> None:
|
| 514 |
+
"""Process incoming request.
|
| 515 |
+
|
| 516 |
+
It reads request line, request headers and request payload, then
|
| 517 |
+
calls handle_request() method. Subclass has to override
|
| 518 |
+
handle_request(). start() handles various exceptions in request
|
| 519 |
+
or response handling. Connection is being closed always unless
|
| 520 |
+
keep_alive(True) specified.
|
| 521 |
+
"""
|
| 522 |
+
loop = self._loop
|
| 523 |
+
handler = asyncio.current_task(loop)
|
| 524 |
+
assert handler is not None
|
| 525 |
+
manager = self._manager
|
| 526 |
+
assert manager is not None
|
| 527 |
+
keepalive_timeout = self._keepalive_timeout
|
| 528 |
+
resp = None
|
| 529 |
+
assert self._request_factory is not None
|
| 530 |
+
assert self._request_handler is not None
|
| 531 |
+
|
| 532 |
+
while not self._force_close:
|
| 533 |
+
if not self._messages:
|
| 534 |
+
try:
|
| 535 |
+
# wait for next request
|
| 536 |
+
self._waiter = loop.create_future()
|
| 537 |
+
await self._waiter
|
| 538 |
+
finally:
|
| 539 |
+
self._waiter = None
|
| 540 |
+
|
| 541 |
+
message, payload = self._messages.popleft()
|
| 542 |
+
|
| 543 |
+
start = loop.time()
|
| 544 |
+
|
| 545 |
+
manager.requests_count += 1
|
| 546 |
+
writer = StreamWriter(self, loop)
|
| 547 |
+
if isinstance(message, _ErrInfo):
|
| 548 |
+
# make request_factory work
|
| 549 |
+
request_handler = self._make_error_handler(message)
|
| 550 |
+
message = ERROR
|
| 551 |
+
else:
|
| 552 |
+
request_handler = self._request_handler
|
| 553 |
+
|
| 554 |
+
request = self._request_factory(message, payload, self, writer, handler)
|
| 555 |
+
try:
|
| 556 |
+
# a new task is used for copy context vars (#3406)
|
| 557 |
+
coro = self._handle_request(request, start, request_handler)
|
| 558 |
+
if sys.version_info >= (3, 12):
|
| 559 |
+
task = asyncio.Task(coro, loop=loop, eager_start=True)
|
| 560 |
+
else:
|
| 561 |
+
task = loop.create_task(coro)
|
| 562 |
+
try:
|
| 563 |
+
resp, reset = await task
|
| 564 |
+
except ConnectionError:
|
| 565 |
+
self.log_debug("Ignored premature client disconnection")
|
| 566 |
+
break
|
| 567 |
+
|
| 568 |
+
# Drop the processed task from asyncio.Task.all_tasks() early
|
| 569 |
+
del task
|
| 570 |
+
if reset:
|
| 571 |
+
self.log_debug("Ignored premature client disconnection 2")
|
| 572 |
+
break
|
| 573 |
+
|
| 574 |
+
# notify server about keep-alive
|
| 575 |
+
self._keepalive = bool(resp.keep_alive)
|
| 576 |
+
|
| 577 |
+
# check payload
|
| 578 |
+
if not payload.is_eof():
|
| 579 |
+
lingering_time = self._lingering_time
|
| 580 |
+
if not self._force_close and lingering_time:
|
| 581 |
+
self.log_debug(
|
| 582 |
+
"Start lingering close timer for %s sec.", lingering_time
|
| 583 |
+
)
|
| 584 |
+
|
| 585 |
+
now = loop.time()
|
| 586 |
+
end_t = now + lingering_time
|
| 587 |
+
|
| 588 |
+
try:
|
| 589 |
+
while not payload.is_eof() and now < end_t:
|
| 590 |
+
async with ceil_timeout(end_t - now):
|
| 591 |
+
# read and ignore
|
| 592 |
+
await payload.readany()
|
| 593 |
+
now = loop.time()
|
| 594 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 595 |
+
if (
|
| 596 |
+
sys.version_info >= (3, 11)
|
| 597 |
+
and (t := asyncio.current_task())
|
| 598 |
+
and t.cancelling()
|
| 599 |
+
):
|
| 600 |
+
raise
|
| 601 |
+
|
| 602 |
+
# if payload still uncompleted
|
| 603 |
+
if not payload.is_eof() and not self._force_close:
|
| 604 |
+
self.log_debug("Uncompleted request.")
|
| 605 |
+
self.close()
|
| 606 |
+
|
| 607 |
+
payload.set_exception(_PAYLOAD_ACCESS_ERROR)
|
| 608 |
+
|
| 609 |
+
except asyncio.CancelledError:
|
| 610 |
+
self.log_debug("Ignored premature client disconnection")
|
| 611 |
+
raise
|
| 612 |
+
except Exception as exc:
|
| 613 |
+
self.log_exception("Unhandled exception", exc_info=exc)
|
| 614 |
+
self.force_close()
|
| 615 |
+
finally:
|
| 616 |
+
if self.transport is None and resp is not None:
|
| 617 |
+
self.log_debug("Ignored premature client disconnection.")
|
| 618 |
+
elif not self._force_close:
|
| 619 |
+
if self._keepalive and not self._close:
|
| 620 |
+
# start keep-alive timer
|
| 621 |
+
if keepalive_timeout is not None:
|
| 622 |
+
now = loop.time()
|
| 623 |
+
close_time = now + keepalive_timeout
|
| 624 |
+
self._next_keepalive_close_time = close_time
|
| 625 |
+
if self._keepalive_handle is None:
|
| 626 |
+
self._keepalive_handle = loop.call_at(
|
| 627 |
+
close_time, self._process_keepalive
|
| 628 |
+
)
|
| 629 |
+
else:
|
| 630 |
+
break
|
| 631 |
+
|
| 632 |
+
# remove handler, close transport if no handlers left
|
| 633 |
+
if not self._force_close:
|
| 634 |
+
self._task_handler = None
|
| 635 |
+
if self.transport is not None:
|
| 636 |
+
self.transport.close()
|
| 637 |
+
|
| 638 |
+
async def finish_response(
|
| 639 |
+
self, request: BaseRequest, resp: StreamResponse, start_time: float
|
| 640 |
+
) -> Tuple[StreamResponse, bool]:
|
| 641 |
+
"""Prepare the response and write_eof, then log access.
|
| 642 |
+
|
| 643 |
+
This has to
|
| 644 |
+
be called within the context of any exception so the access logger
|
| 645 |
+
can get exception information. Returns True if the client disconnects
|
| 646 |
+
prematurely.
|
| 647 |
+
"""
|
| 648 |
+
request._finish()
|
| 649 |
+
if self._request_parser is not None:
|
| 650 |
+
self._request_parser.set_upgraded(False)
|
| 651 |
+
self._upgrade = False
|
| 652 |
+
if self._message_tail:
|
| 653 |
+
self._request_parser.feed_data(self._message_tail)
|
| 654 |
+
self._message_tail = b""
|
| 655 |
+
try:
|
| 656 |
+
prepare_meth = resp.prepare
|
| 657 |
+
except AttributeError:
|
| 658 |
+
if resp is None:
|
| 659 |
+
self.log_exception("Missing return statement on request handler")
|
| 660 |
+
else:
|
| 661 |
+
self.log_exception(
|
| 662 |
+
"Web-handler should return a response instance, "
|
| 663 |
+
"got {!r}".format(resp)
|
| 664 |
+
)
|
| 665 |
+
exc = HTTPInternalServerError()
|
| 666 |
+
resp = Response(
|
| 667 |
+
status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
|
| 668 |
+
)
|
| 669 |
+
prepare_meth = resp.prepare
|
| 670 |
+
try:
|
| 671 |
+
await prepare_meth(request)
|
| 672 |
+
await resp.write_eof()
|
| 673 |
+
except ConnectionError:
|
| 674 |
+
self.log_access(request, resp, start_time)
|
| 675 |
+
return resp, True
|
| 676 |
+
|
| 677 |
+
self.log_access(request, resp, start_time)
|
| 678 |
+
return resp, False
|
| 679 |
+
|
| 680 |
+
def handle_error(
|
| 681 |
+
self,
|
| 682 |
+
request: BaseRequest,
|
| 683 |
+
status: int = 500,
|
| 684 |
+
exc: Optional[BaseException] = None,
|
| 685 |
+
message: Optional[str] = None,
|
| 686 |
+
) -> StreamResponse:
|
| 687 |
+
"""Handle errors.
|
| 688 |
+
|
| 689 |
+
Returns HTTP response with specific status code. Logs additional
|
| 690 |
+
information. It always closes current connection.
|
| 691 |
+
"""
|
| 692 |
+
if self._request_count == 1 and isinstance(exc, BadHttpMethod):
|
| 693 |
+
# BadHttpMethod is common when a client sends non-HTTP
|
| 694 |
+
# or encrypted traffic to an HTTP port. This is expected
|
| 695 |
+
# to happen when connected to the public internet so we log
|
| 696 |
+
# it at the debug level as to not fill logs with noise.
|
| 697 |
+
self.logger.debug("Error handling request", exc_info=exc)
|
| 698 |
+
else:
|
| 699 |
+
self.log_exception("Error handling request", exc_info=exc)
|
| 700 |
+
|
| 701 |
+
# some data already got sent, connection is broken
|
| 702 |
+
if request.writer.output_size > 0:
|
| 703 |
+
raise ConnectionError(
|
| 704 |
+
"Response is sent already, cannot send another response "
|
| 705 |
+
"with the error message"
|
| 706 |
+
)
|
| 707 |
+
|
| 708 |
+
ct = "text/plain"
|
| 709 |
+
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
|
| 710 |
+
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
|
| 711 |
+
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
|
| 712 |
+
tb = None
|
| 713 |
+
if self.debug:
|
| 714 |
+
with suppress(Exception):
|
| 715 |
+
tb = traceback.format_exc()
|
| 716 |
+
|
| 717 |
+
if "text/html" in request.headers.get("Accept", ""):
|
| 718 |
+
if tb:
|
| 719 |
+
tb = html_escape(tb)
|
| 720 |
+
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
|
| 721 |
+
message = (
|
| 722 |
+
"<html><head>"
|
| 723 |
+
"<title>{title}</title>"
|
| 724 |
+
"</head><body>\n<h1>{title}</h1>"
|
| 725 |
+
"\n{msg}\n</body></html>\n"
|
| 726 |
+
).format(title=title, msg=msg)
|
| 727 |
+
ct = "text/html"
|
| 728 |
+
else:
|
| 729 |
+
if tb:
|
| 730 |
+
msg = tb
|
| 731 |
+
message = title + "\n\n" + msg
|
| 732 |
+
|
| 733 |
+
resp = Response(status=status, text=message, content_type=ct)
|
| 734 |
+
resp.force_close()
|
| 735 |
+
|
| 736 |
+
return resp
|
| 737 |
+
|
| 738 |
+
def _make_error_handler(
|
| 739 |
+
self, err_info: _ErrInfo
|
| 740 |
+
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
|
| 741 |
+
async def handler(request: BaseRequest) -> StreamResponse:
|
| 742 |
+
return self.handle_error(
|
| 743 |
+
request, err_info.status, err_info.exc, err_info.message
|
| 744 |
+
)
|
| 745 |
+
|
| 746 |
+
return handler
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_routedef.py
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import os # noqa
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
Any,
|
| 6 |
+
Callable,
|
| 7 |
+
Dict,
|
| 8 |
+
Iterator,
|
| 9 |
+
List,
|
| 10 |
+
Optional,
|
| 11 |
+
Sequence,
|
| 12 |
+
Type,
|
| 13 |
+
Union,
|
| 14 |
+
overload,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
import attr
|
| 18 |
+
|
| 19 |
+
from . import hdrs
|
| 20 |
+
from .abc import AbstractView
|
| 21 |
+
from .typedefs import Handler, PathLike
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING:
|
| 24 |
+
from .web_request import Request
|
| 25 |
+
from .web_response import StreamResponse
|
| 26 |
+
from .web_urldispatcher import AbstractRoute, UrlDispatcher
|
| 27 |
+
else:
|
| 28 |
+
Request = StreamResponse = UrlDispatcher = AbstractRoute = None
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
__all__ = (
|
| 32 |
+
"AbstractRouteDef",
|
| 33 |
+
"RouteDef",
|
| 34 |
+
"StaticDef",
|
| 35 |
+
"RouteTableDef",
|
| 36 |
+
"head",
|
| 37 |
+
"options",
|
| 38 |
+
"get",
|
| 39 |
+
"post",
|
| 40 |
+
"patch",
|
| 41 |
+
"put",
|
| 42 |
+
"delete",
|
| 43 |
+
"route",
|
| 44 |
+
"view",
|
| 45 |
+
"static",
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class AbstractRouteDef(abc.ABC):
|
| 50 |
+
@abc.abstractmethod
|
| 51 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 52 |
+
pass # pragma: no cover
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
_HandlerType = Union[Type[AbstractView], Handler]
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
| 59 |
+
class RouteDef(AbstractRouteDef):
|
| 60 |
+
method: str
|
| 61 |
+
path: str
|
| 62 |
+
handler: _HandlerType
|
| 63 |
+
kwargs: Dict[str, Any]
|
| 64 |
+
|
| 65 |
+
def __repr__(self) -> str:
|
| 66 |
+
info = []
|
| 67 |
+
for name, value in sorted(self.kwargs.items()):
|
| 68 |
+
info.append(f", {name}={value!r}")
|
| 69 |
+
return "<RouteDef {method} {path} -> {handler.__name__!r}{info}>".format(
|
| 70 |
+
method=self.method, path=self.path, handler=self.handler, info="".join(info)
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 74 |
+
if self.method in hdrs.METH_ALL:
|
| 75 |
+
reg = getattr(router, "add_" + self.method.lower())
|
| 76 |
+
return [reg(self.path, self.handler, **self.kwargs)]
|
| 77 |
+
else:
|
| 78 |
+
return [
|
| 79 |
+
router.add_route(self.method, self.path, self.handler, **self.kwargs)
|
| 80 |
+
]
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
| 84 |
+
class StaticDef(AbstractRouteDef):
|
| 85 |
+
prefix: str
|
| 86 |
+
path: PathLike
|
| 87 |
+
kwargs: Dict[str, Any]
|
| 88 |
+
|
| 89 |
+
def __repr__(self) -> str:
|
| 90 |
+
info = []
|
| 91 |
+
for name, value in sorted(self.kwargs.items()):
|
| 92 |
+
info.append(f", {name}={value!r}")
|
| 93 |
+
return "<StaticDef {prefix} -> {path}{info}>".format(
|
| 94 |
+
prefix=self.prefix, path=self.path, info="".join(info)
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 98 |
+
resource = router.add_static(self.prefix, self.path, **self.kwargs)
|
| 99 |
+
routes = resource.get_info().get("routes", {})
|
| 100 |
+
return list(routes.values())
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 104 |
+
return RouteDef(method, path, handler, kwargs)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 108 |
+
return route(hdrs.METH_HEAD, path, handler, **kwargs)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 112 |
+
return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def get(
|
| 116 |
+
path: str,
|
| 117 |
+
handler: _HandlerType,
|
| 118 |
+
*,
|
| 119 |
+
name: Optional[str] = None,
|
| 120 |
+
allow_head: bool = True,
|
| 121 |
+
**kwargs: Any,
|
| 122 |
+
) -> RouteDef:
|
| 123 |
+
return route(
|
| 124 |
+
hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 129 |
+
return route(hdrs.METH_POST, path, handler, **kwargs)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 133 |
+
return route(hdrs.METH_PUT, path, handler, **kwargs)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 137 |
+
return route(hdrs.METH_PATCH, path, handler, **kwargs)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 141 |
+
return route(hdrs.METH_DELETE, path, handler, **kwargs)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
|
| 145 |
+
return route(hdrs.METH_ANY, path, handler, **kwargs)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
|
| 149 |
+
return StaticDef(prefix, path, kwargs)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
_Deco = Callable[[_HandlerType], _HandlerType]
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class RouteTableDef(Sequence[AbstractRouteDef]):
|
| 156 |
+
"""Route definition table"""
|
| 157 |
+
|
| 158 |
+
def __init__(self) -> None:
|
| 159 |
+
self._items: List[AbstractRouteDef] = []
|
| 160 |
+
|
| 161 |
+
def __repr__(self) -> str:
|
| 162 |
+
return f"<RouteTableDef count={len(self._items)}>"
|
| 163 |
+
|
| 164 |
+
@overload
|
| 165 |
+
def __getitem__(self, index: int) -> AbstractRouteDef: ...
|
| 166 |
+
|
| 167 |
+
@overload
|
| 168 |
+
def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ...
|
| 169 |
+
|
| 170 |
+
def __getitem__(self, index): # type: ignore[no-untyped-def]
|
| 171 |
+
return self._items[index]
|
| 172 |
+
|
| 173 |
+
def __iter__(self) -> Iterator[AbstractRouteDef]:
|
| 174 |
+
return iter(self._items)
|
| 175 |
+
|
| 176 |
+
def __len__(self) -> int:
|
| 177 |
+
return len(self._items)
|
| 178 |
+
|
| 179 |
+
def __contains__(self, item: object) -> bool:
|
| 180 |
+
return item in self._items
|
| 181 |
+
|
| 182 |
+
def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
|
| 183 |
+
def inner(handler: _HandlerType) -> _HandlerType:
|
| 184 |
+
self._items.append(RouteDef(method, path, handler, kwargs))
|
| 185 |
+
return handler
|
| 186 |
+
|
| 187 |
+
return inner
|
| 188 |
+
|
| 189 |
+
def head(self, path: str, **kwargs: Any) -> _Deco:
|
| 190 |
+
return self.route(hdrs.METH_HEAD, path, **kwargs)
|
| 191 |
+
|
| 192 |
+
def get(self, path: str, **kwargs: Any) -> _Deco:
|
| 193 |
+
return self.route(hdrs.METH_GET, path, **kwargs)
|
| 194 |
+
|
| 195 |
+
def post(self, path: str, **kwargs: Any) -> _Deco:
|
| 196 |
+
return self.route(hdrs.METH_POST, path, **kwargs)
|
| 197 |
+
|
| 198 |
+
def put(self, path: str, **kwargs: Any) -> _Deco:
|
| 199 |
+
return self.route(hdrs.METH_PUT, path, **kwargs)
|
| 200 |
+
|
| 201 |
+
def patch(self, path: str, **kwargs: Any) -> _Deco:
|
| 202 |
+
return self.route(hdrs.METH_PATCH, path, **kwargs)
|
| 203 |
+
|
| 204 |
+
def delete(self, path: str, **kwargs: Any) -> _Deco:
|
| 205 |
+
return self.route(hdrs.METH_DELETE, path, **kwargs)
|
| 206 |
+
|
| 207 |
+
def options(self, path: str, **kwargs: Any) -> _Deco:
|
| 208 |
+
return self.route(hdrs.METH_OPTIONS, path, **kwargs)
|
| 209 |
+
|
| 210 |
+
def view(self, path: str, **kwargs: Any) -> _Deco:
|
| 211 |
+
return self.route(hdrs.METH_ANY, path, **kwargs)
|
| 212 |
+
|
| 213 |
+
def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
|
| 214 |
+
self._items.append(StaticDef(prefix, path, kwargs))
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_runner.py
ADDED
|
@@ -0,0 +1,399 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import signal
|
| 3 |
+
import socket
|
| 4 |
+
import warnings
|
| 5 |
+
from abc import ABC, abstractmethod
|
| 6 |
+
from typing import TYPE_CHECKING, Any, List, Optional, Set
|
| 7 |
+
|
| 8 |
+
from yarl import URL
|
| 9 |
+
|
| 10 |
+
from .typedefs import PathLike
|
| 11 |
+
from .web_app import Application
|
| 12 |
+
from .web_server import Server
|
| 13 |
+
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
from ssl import SSLContext
|
| 16 |
+
else:
|
| 17 |
+
try:
|
| 18 |
+
from ssl import SSLContext
|
| 19 |
+
except ImportError: # pragma: no cover
|
| 20 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 21 |
+
|
| 22 |
+
__all__ = (
|
| 23 |
+
"BaseSite",
|
| 24 |
+
"TCPSite",
|
| 25 |
+
"UnixSite",
|
| 26 |
+
"NamedPipeSite",
|
| 27 |
+
"SockSite",
|
| 28 |
+
"BaseRunner",
|
| 29 |
+
"AppRunner",
|
| 30 |
+
"ServerRunner",
|
| 31 |
+
"GracefulExit",
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class GracefulExit(SystemExit):
|
| 36 |
+
code = 1
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _raise_graceful_exit() -> None:
|
| 40 |
+
raise GracefulExit()
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class BaseSite(ABC):
|
| 44 |
+
__slots__ = ("_runner", "_ssl_context", "_backlog", "_server")
|
| 45 |
+
|
| 46 |
+
def __init__(
|
| 47 |
+
self,
|
| 48 |
+
runner: "BaseRunner",
|
| 49 |
+
*,
|
| 50 |
+
shutdown_timeout: float = 60.0,
|
| 51 |
+
ssl_context: Optional[SSLContext] = None,
|
| 52 |
+
backlog: int = 128,
|
| 53 |
+
) -> None:
|
| 54 |
+
if runner.server is None:
|
| 55 |
+
raise RuntimeError("Call runner.setup() before making a site")
|
| 56 |
+
if shutdown_timeout != 60.0:
|
| 57 |
+
msg = "shutdown_timeout should be set on BaseRunner"
|
| 58 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
| 59 |
+
runner._shutdown_timeout = shutdown_timeout
|
| 60 |
+
self._runner = runner
|
| 61 |
+
self._ssl_context = ssl_context
|
| 62 |
+
self._backlog = backlog
|
| 63 |
+
self._server: Optional[asyncio.AbstractServer] = None
|
| 64 |
+
|
| 65 |
+
@property
|
| 66 |
+
@abstractmethod
|
| 67 |
+
def name(self) -> str:
|
| 68 |
+
pass # pragma: no cover
|
| 69 |
+
|
| 70 |
+
@abstractmethod
|
| 71 |
+
async def start(self) -> None:
|
| 72 |
+
self._runner._reg_site(self)
|
| 73 |
+
|
| 74 |
+
async def stop(self) -> None:
|
| 75 |
+
self._runner._check_site(self)
|
| 76 |
+
if self._server is not None: # Maybe not started yet
|
| 77 |
+
self._server.close()
|
| 78 |
+
|
| 79 |
+
self._runner._unreg_site(self)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class TCPSite(BaseSite):
|
| 83 |
+
__slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
|
| 84 |
+
|
| 85 |
+
def __init__(
|
| 86 |
+
self,
|
| 87 |
+
runner: "BaseRunner",
|
| 88 |
+
host: Optional[str] = None,
|
| 89 |
+
port: Optional[int] = None,
|
| 90 |
+
*,
|
| 91 |
+
shutdown_timeout: float = 60.0,
|
| 92 |
+
ssl_context: Optional[SSLContext] = None,
|
| 93 |
+
backlog: int = 128,
|
| 94 |
+
reuse_address: Optional[bool] = None,
|
| 95 |
+
reuse_port: Optional[bool] = None,
|
| 96 |
+
) -> None:
|
| 97 |
+
super().__init__(
|
| 98 |
+
runner,
|
| 99 |
+
shutdown_timeout=shutdown_timeout,
|
| 100 |
+
ssl_context=ssl_context,
|
| 101 |
+
backlog=backlog,
|
| 102 |
+
)
|
| 103 |
+
self._host = host
|
| 104 |
+
if port is None:
|
| 105 |
+
port = 8443 if self._ssl_context else 8080
|
| 106 |
+
self._port = port
|
| 107 |
+
self._reuse_address = reuse_address
|
| 108 |
+
self._reuse_port = reuse_port
|
| 109 |
+
|
| 110 |
+
@property
|
| 111 |
+
def name(self) -> str:
|
| 112 |
+
scheme = "https" if self._ssl_context else "http"
|
| 113 |
+
host = "0.0.0.0" if not self._host else self._host
|
| 114 |
+
return str(URL.build(scheme=scheme, host=host, port=self._port))
|
| 115 |
+
|
| 116 |
+
async def start(self) -> None:
|
| 117 |
+
await super().start()
|
| 118 |
+
loop = asyncio.get_event_loop()
|
| 119 |
+
server = self._runner.server
|
| 120 |
+
assert server is not None
|
| 121 |
+
self._server = await loop.create_server(
|
| 122 |
+
server,
|
| 123 |
+
self._host,
|
| 124 |
+
self._port,
|
| 125 |
+
ssl=self._ssl_context,
|
| 126 |
+
backlog=self._backlog,
|
| 127 |
+
reuse_address=self._reuse_address,
|
| 128 |
+
reuse_port=self._reuse_port,
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class UnixSite(BaseSite):
|
| 133 |
+
__slots__ = ("_path",)
|
| 134 |
+
|
| 135 |
+
def __init__(
|
| 136 |
+
self,
|
| 137 |
+
runner: "BaseRunner",
|
| 138 |
+
path: PathLike,
|
| 139 |
+
*,
|
| 140 |
+
shutdown_timeout: float = 60.0,
|
| 141 |
+
ssl_context: Optional[SSLContext] = None,
|
| 142 |
+
backlog: int = 128,
|
| 143 |
+
) -> None:
|
| 144 |
+
super().__init__(
|
| 145 |
+
runner,
|
| 146 |
+
shutdown_timeout=shutdown_timeout,
|
| 147 |
+
ssl_context=ssl_context,
|
| 148 |
+
backlog=backlog,
|
| 149 |
+
)
|
| 150 |
+
self._path = path
|
| 151 |
+
|
| 152 |
+
@property
|
| 153 |
+
def name(self) -> str:
|
| 154 |
+
scheme = "https" if self._ssl_context else "http"
|
| 155 |
+
return f"{scheme}://unix:{self._path}:"
|
| 156 |
+
|
| 157 |
+
async def start(self) -> None:
|
| 158 |
+
await super().start()
|
| 159 |
+
loop = asyncio.get_event_loop()
|
| 160 |
+
server = self._runner.server
|
| 161 |
+
assert server is not None
|
| 162 |
+
self._server = await loop.create_unix_server(
|
| 163 |
+
server,
|
| 164 |
+
self._path,
|
| 165 |
+
ssl=self._ssl_context,
|
| 166 |
+
backlog=self._backlog,
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class NamedPipeSite(BaseSite):
|
| 171 |
+
__slots__ = ("_path",)
|
| 172 |
+
|
| 173 |
+
def __init__(
|
| 174 |
+
self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
|
| 175 |
+
) -> None:
|
| 176 |
+
loop = asyncio.get_event_loop()
|
| 177 |
+
if not isinstance(
|
| 178 |
+
loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
| 179 |
+
):
|
| 180 |
+
raise RuntimeError(
|
| 181 |
+
"Named Pipes only available in proactor loop under windows"
|
| 182 |
+
)
|
| 183 |
+
super().__init__(runner, shutdown_timeout=shutdown_timeout)
|
| 184 |
+
self._path = path
|
| 185 |
+
|
| 186 |
+
@property
|
| 187 |
+
def name(self) -> str:
|
| 188 |
+
return self._path
|
| 189 |
+
|
| 190 |
+
async def start(self) -> None:
|
| 191 |
+
await super().start()
|
| 192 |
+
loop = asyncio.get_event_loop()
|
| 193 |
+
server = self._runner.server
|
| 194 |
+
assert server is not None
|
| 195 |
+
_server = await loop.start_serving_pipe( # type: ignore[attr-defined]
|
| 196 |
+
server, self._path
|
| 197 |
+
)
|
| 198 |
+
self._server = _server[0]
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class SockSite(BaseSite):
|
| 202 |
+
__slots__ = ("_sock", "_name")
|
| 203 |
+
|
| 204 |
+
def __init__(
|
| 205 |
+
self,
|
| 206 |
+
runner: "BaseRunner",
|
| 207 |
+
sock: socket.socket,
|
| 208 |
+
*,
|
| 209 |
+
shutdown_timeout: float = 60.0,
|
| 210 |
+
ssl_context: Optional[SSLContext] = None,
|
| 211 |
+
backlog: int = 128,
|
| 212 |
+
) -> None:
|
| 213 |
+
super().__init__(
|
| 214 |
+
runner,
|
| 215 |
+
shutdown_timeout=shutdown_timeout,
|
| 216 |
+
ssl_context=ssl_context,
|
| 217 |
+
backlog=backlog,
|
| 218 |
+
)
|
| 219 |
+
self._sock = sock
|
| 220 |
+
scheme = "https" if self._ssl_context else "http"
|
| 221 |
+
if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
|
| 222 |
+
name = f"{scheme}://unix:{sock.getsockname()}:"
|
| 223 |
+
else:
|
| 224 |
+
host, port = sock.getsockname()[:2]
|
| 225 |
+
name = str(URL.build(scheme=scheme, host=host, port=port))
|
| 226 |
+
self._name = name
|
| 227 |
+
|
| 228 |
+
@property
|
| 229 |
+
def name(self) -> str:
|
| 230 |
+
return self._name
|
| 231 |
+
|
| 232 |
+
async def start(self) -> None:
|
| 233 |
+
await super().start()
|
| 234 |
+
loop = asyncio.get_event_loop()
|
| 235 |
+
server = self._runner.server
|
| 236 |
+
assert server is not None
|
| 237 |
+
self._server = await loop.create_server(
|
| 238 |
+
server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
|
| 239 |
+
)
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
class BaseRunner(ABC):
|
| 243 |
+
__slots__ = ("_handle_signals", "_kwargs", "_server", "_sites", "_shutdown_timeout")
|
| 244 |
+
|
| 245 |
+
def __init__(
|
| 246 |
+
self,
|
| 247 |
+
*,
|
| 248 |
+
handle_signals: bool = False,
|
| 249 |
+
shutdown_timeout: float = 60.0,
|
| 250 |
+
**kwargs: Any,
|
| 251 |
+
) -> None:
|
| 252 |
+
self._handle_signals = handle_signals
|
| 253 |
+
self._kwargs = kwargs
|
| 254 |
+
self._server: Optional[Server] = None
|
| 255 |
+
self._sites: List[BaseSite] = []
|
| 256 |
+
self._shutdown_timeout = shutdown_timeout
|
| 257 |
+
|
| 258 |
+
@property
|
| 259 |
+
def server(self) -> Optional[Server]:
|
| 260 |
+
return self._server
|
| 261 |
+
|
| 262 |
+
@property
|
| 263 |
+
def addresses(self) -> List[Any]:
|
| 264 |
+
ret: List[Any] = []
|
| 265 |
+
for site in self._sites:
|
| 266 |
+
server = site._server
|
| 267 |
+
if server is not None:
|
| 268 |
+
sockets = server.sockets # type: ignore[attr-defined]
|
| 269 |
+
if sockets is not None:
|
| 270 |
+
for sock in sockets:
|
| 271 |
+
ret.append(sock.getsockname())
|
| 272 |
+
return ret
|
| 273 |
+
|
| 274 |
+
@property
|
| 275 |
+
def sites(self) -> Set[BaseSite]:
|
| 276 |
+
return set(self._sites)
|
| 277 |
+
|
| 278 |
+
async def setup(self) -> None:
|
| 279 |
+
loop = asyncio.get_event_loop()
|
| 280 |
+
|
| 281 |
+
if self._handle_signals:
|
| 282 |
+
try:
|
| 283 |
+
loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)
|
| 284 |
+
loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)
|
| 285 |
+
except NotImplementedError: # pragma: no cover
|
| 286 |
+
# add_signal_handler is not implemented on Windows
|
| 287 |
+
pass
|
| 288 |
+
|
| 289 |
+
self._server = await self._make_server()
|
| 290 |
+
|
| 291 |
+
@abstractmethod
|
| 292 |
+
async def shutdown(self) -> None:
|
| 293 |
+
"""Call any shutdown hooks to help server close gracefully."""
|
| 294 |
+
|
| 295 |
+
async def cleanup(self) -> None:
|
| 296 |
+
# The loop over sites is intentional, an exception on gather()
|
| 297 |
+
# leaves self._sites in unpredictable state.
|
| 298 |
+
# The loop guaranties that a site is either deleted on success or
|
| 299 |
+
# still present on failure
|
| 300 |
+
for site in list(self._sites):
|
| 301 |
+
await site.stop()
|
| 302 |
+
|
| 303 |
+
if self._server: # If setup succeeded
|
| 304 |
+
# Yield to event loop to ensure incoming requests prior to stopping the sites
|
| 305 |
+
# have all started to be handled before we proceed to close idle connections.
|
| 306 |
+
await asyncio.sleep(0)
|
| 307 |
+
self._server.pre_shutdown()
|
| 308 |
+
await self.shutdown()
|
| 309 |
+
await self._server.shutdown(self._shutdown_timeout)
|
| 310 |
+
await self._cleanup_server()
|
| 311 |
+
|
| 312 |
+
self._server = None
|
| 313 |
+
if self._handle_signals:
|
| 314 |
+
loop = asyncio.get_running_loop()
|
| 315 |
+
try:
|
| 316 |
+
loop.remove_signal_handler(signal.SIGINT)
|
| 317 |
+
loop.remove_signal_handler(signal.SIGTERM)
|
| 318 |
+
except NotImplementedError: # pragma: no cover
|
| 319 |
+
# remove_signal_handler is not implemented on Windows
|
| 320 |
+
pass
|
| 321 |
+
|
| 322 |
+
@abstractmethod
|
| 323 |
+
async def _make_server(self) -> Server:
|
| 324 |
+
pass # pragma: no cover
|
| 325 |
+
|
| 326 |
+
@abstractmethod
|
| 327 |
+
async def _cleanup_server(self) -> None:
|
| 328 |
+
pass # pragma: no cover
|
| 329 |
+
|
| 330 |
+
def _reg_site(self, site: BaseSite) -> None:
|
| 331 |
+
if site in self._sites:
|
| 332 |
+
raise RuntimeError(f"Site {site} is already registered in runner {self}")
|
| 333 |
+
self._sites.append(site)
|
| 334 |
+
|
| 335 |
+
def _check_site(self, site: BaseSite) -> None:
|
| 336 |
+
if site not in self._sites:
|
| 337 |
+
raise RuntimeError(f"Site {site} is not registered in runner {self}")
|
| 338 |
+
|
| 339 |
+
def _unreg_site(self, site: BaseSite) -> None:
|
| 340 |
+
if site not in self._sites:
|
| 341 |
+
raise RuntimeError(f"Site {site} is not registered in runner {self}")
|
| 342 |
+
self._sites.remove(site)
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
class ServerRunner(BaseRunner):
|
| 346 |
+
"""Low-level web server runner"""
|
| 347 |
+
|
| 348 |
+
__slots__ = ("_web_server",)
|
| 349 |
+
|
| 350 |
+
def __init__(
|
| 351 |
+
self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
|
| 352 |
+
) -> None:
|
| 353 |
+
super().__init__(handle_signals=handle_signals, **kwargs)
|
| 354 |
+
self._web_server = web_server
|
| 355 |
+
|
| 356 |
+
async def shutdown(self) -> None:
|
| 357 |
+
pass
|
| 358 |
+
|
| 359 |
+
async def _make_server(self) -> Server:
|
| 360 |
+
return self._web_server
|
| 361 |
+
|
| 362 |
+
async def _cleanup_server(self) -> None:
|
| 363 |
+
pass
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
class AppRunner(BaseRunner):
|
| 367 |
+
"""Web Application runner"""
|
| 368 |
+
|
| 369 |
+
__slots__ = ("_app",)
|
| 370 |
+
|
| 371 |
+
def __init__(
|
| 372 |
+
self, app: Application, *, handle_signals: bool = False, **kwargs: Any
|
| 373 |
+
) -> None:
|
| 374 |
+
super().__init__(handle_signals=handle_signals, **kwargs)
|
| 375 |
+
if not isinstance(app, Application):
|
| 376 |
+
raise TypeError(
|
| 377 |
+
"The first argument should be web.Application "
|
| 378 |
+
"instance, got {!r}".format(app)
|
| 379 |
+
)
|
| 380 |
+
self._app = app
|
| 381 |
+
|
| 382 |
+
@property
|
| 383 |
+
def app(self) -> Application:
|
| 384 |
+
return self._app
|
| 385 |
+
|
| 386 |
+
async def shutdown(self) -> None:
|
| 387 |
+
await self._app.shutdown()
|
| 388 |
+
|
| 389 |
+
async def _make_server(self) -> Server:
|
| 390 |
+
loop = asyncio.get_event_loop()
|
| 391 |
+
self._app._set_loop(loop)
|
| 392 |
+
self._app.on_startup.freeze()
|
| 393 |
+
await self._app.startup()
|
| 394 |
+
self._app.freeze()
|
| 395 |
+
|
| 396 |
+
return self._app._make_handler(loop=loop, **self._kwargs)
|
| 397 |
+
|
| 398 |
+
async def _cleanup_server(self) -> None:
|
| 399 |
+
await self._app.cleanup()
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/web_server.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Low level HTTP server."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa
|
| 5 |
+
|
| 6 |
+
from .abc import AbstractStreamWriter
|
| 7 |
+
from .http_parser import RawRequestMessage
|
| 8 |
+
from .streams import StreamReader
|
| 9 |
+
from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler
|
| 10 |
+
from .web_request import BaseRequest
|
| 11 |
+
|
| 12 |
+
__all__ = ("Server",)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class Server:
|
| 16 |
+
def __init__(
|
| 17 |
+
self,
|
| 18 |
+
handler: _RequestHandler,
|
| 19 |
+
*,
|
| 20 |
+
request_factory: Optional[_RequestFactory] = None,
|
| 21 |
+
handler_cancellation: bool = False,
|
| 22 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 23 |
+
**kwargs: Any,
|
| 24 |
+
) -> None:
|
| 25 |
+
self._loop = loop or asyncio.get_running_loop()
|
| 26 |
+
self._connections: Dict[RequestHandler, asyncio.Transport] = {}
|
| 27 |
+
self._kwargs = kwargs
|
| 28 |
+
# requests_count is the number of requests being processed by the server
|
| 29 |
+
# for the lifetime of the server.
|
| 30 |
+
self.requests_count = 0
|
| 31 |
+
self.request_handler = handler
|
| 32 |
+
self.request_factory = request_factory or self._make_request
|
| 33 |
+
self.handler_cancellation = handler_cancellation
|
| 34 |
+
|
| 35 |
+
@property
|
| 36 |
+
def connections(self) -> List[RequestHandler]:
|
| 37 |
+
return list(self._connections.keys())
|
| 38 |
+
|
| 39 |
+
def connection_made(
|
| 40 |
+
self, handler: RequestHandler, transport: asyncio.Transport
|
| 41 |
+
) -> None:
|
| 42 |
+
self._connections[handler] = transport
|
| 43 |
+
|
| 44 |
+
def connection_lost(
|
| 45 |
+
self, handler: RequestHandler, exc: Optional[BaseException] = None
|
| 46 |
+
) -> None:
|
| 47 |
+
if handler in self._connections:
|
| 48 |
+
if handler._task_handler:
|
| 49 |
+
handler._task_handler.add_done_callback(
|
| 50 |
+
lambda f: self._connections.pop(handler, None)
|
| 51 |
+
)
|
| 52 |
+
else:
|
| 53 |
+
del self._connections[handler]
|
| 54 |
+
|
| 55 |
+
def _make_request(
|
| 56 |
+
self,
|
| 57 |
+
message: RawRequestMessage,
|
| 58 |
+
payload: StreamReader,
|
| 59 |
+
protocol: RequestHandler,
|
| 60 |
+
writer: AbstractStreamWriter,
|
| 61 |
+
task: "asyncio.Task[None]",
|
| 62 |
+
) -> BaseRequest:
|
| 63 |
+
return BaseRequest(message, payload, protocol, writer, task, self._loop)
|
| 64 |
+
|
| 65 |
+
def pre_shutdown(self) -> None:
|
| 66 |
+
for conn in self._connections:
|
| 67 |
+
conn.close()
|
| 68 |
+
|
| 69 |
+
async def shutdown(self, timeout: Optional[float] = None) -> None:
|
| 70 |
+
coros = (conn.shutdown(timeout) for conn in self._connections)
|
| 71 |
+
await asyncio.gather(*coros)
|
| 72 |
+
self._connections.clear()
|
| 73 |
+
|
| 74 |
+
def __call__(self) -> RequestHandler:
|
| 75 |
+
try:
|
| 76 |
+
return RequestHandler(self, loop=self._loop, **self._kwargs)
|
| 77 |
+
except TypeError:
|
| 78 |
+
# Failsafe creation: remove all custom handler_args
|
| 79 |
+
kwargs = {
|
| 80 |
+
k: v
|
| 81 |
+
for k, v in self._kwargs.items()
|
| 82 |
+
if k in ["debug", "access_log_class"]
|
| 83 |
+
}
|
| 84 |
+
return RequestHandler(self, loop=self._loop, **kwargs)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/worker.py
ADDED
|
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Async gunicorn worker for aiohttp.web"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import signal
|
| 7 |
+
import sys
|
| 8 |
+
from types import FrameType
|
| 9 |
+
from typing import TYPE_CHECKING, Any, Optional
|
| 10 |
+
|
| 11 |
+
from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
|
| 12 |
+
from gunicorn.workers import base
|
| 13 |
+
|
| 14 |
+
from aiohttp import web
|
| 15 |
+
|
| 16 |
+
from .helpers import set_result
|
| 17 |
+
from .web_app import Application
|
| 18 |
+
from .web_log import AccessLogger
|
| 19 |
+
|
| 20 |
+
if TYPE_CHECKING:
|
| 21 |
+
import ssl
|
| 22 |
+
|
| 23 |
+
SSLContext = ssl.SSLContext
|
| 24 |
+
else:
|
| 25 |
+
try:
|
| 26 |
+
import ssl
|
| 27 |
+
|
| 28 |
+
SSLContext = ssl.SSLContext
|
| 29 |
+
except ImportError: # pragma: no cover
|
| 30 |
+
ssl = None # type: ignore[assignment]
|
| 31 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker")
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
|
| 38 |
+
|
| 39 |
+
DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
|
| 40 |
+
DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
|
| 41 |
+
|
| 42 |
+
def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
|
| 43 |
+
super().__init__(*args, **kw)
|
| 44 |
+
|
| 45 |
+
self._task: Optional[asyncio.Task[None]] = None
|
| 46 |
+
self.exit_code = 0
|
| 47 |
+
self._notify_waiter: Optional[asyncio.Future[bool]] = None
|
| 48 |
+
|
| 49 |
+
def init_process(self) -> None:
|
| 50 |
+
# create new event_loop after fork
|
| 51 |
+
asyncio.get_event_loop().close()
|
| 52 |
+
|
| 53 |
+
self.loop = asyncio.new_event_loop()
|
| 54 |
+
asyncio.set_event_loop(self.loop)
|
| 55 |
+
|
| 56 |
+
super().init_process()
|
| 57 |
+
|
| 58 |
+
def run(self) -> None:
|
| 59 |
+
self._task = self.loop.create_task(self._run())
|
| 60 |
+
|
| 61 |
+
try: # ignore all finalization problems
|
| 62 |
+
self.loop.run_until_complete(self._task)
|
| 63 |
+
except Exception:
|
| 64 |
+
self.log.exception("Exception in gunicorn worker")
|
| 65 |
+
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
|
| 66 |
+
self.loop.close()
|
| 67 |
+
|
| 68 |
+
sys.exit(self.exit_code)
|
| 69 |
+
|
| 70 |
+
async def _run(self) -> None:
|
| 71 |
+
runner = None
|
| 72 |
+
if isinstance(self.wsgi, Application):
|
| 73 |
+
app = self.wsgi
|
| 74 |
+
elif asyncio.iscoroutinefunction(self.wsgi):
|
| 75 |
+
wsgi = await self.wsgi()
|
| 76 |
+
if isinstance(wsgi, web.AppRunner):
|
| 77 |
+
runner = wsgi
|
| 78 |
+
app = runner.app
|
| 79 |
+
else:
|
| 80 |
+
app = wsgi
|
| 81 |
+
else:
|
| 82 |
+
raise RuntimeError(
|
| 83 |
+
"wsgi app should be either Application or "
|
| 84 |
+
"async function returning Application, got {}".format(self.wsgi)
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
if runner is None:
|
| 88 |
+
access_log = self.log.access_log if self.cfg.accesslog else None
|
| 89 |
+
runner = web.AppRunner(
|
| 90 |
+
app,
|
| 91 |
+
logger=self.log,
|
| 92 |
+
keepalive_timeout=self.cfg.keepalive,
|
| 93 |
+
access_log=access_log,
|
| 94 |
+
access_log_format=self._get_valid_log_format(
|
| 95 |
+
self.cfg.access_log_format
|
| 96 |
+
),
|
| 97 |
+
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
|
| 98 |
+
)
|
| 99 |
+
await runner.setup()
|
| 100 |
+
|
| 101 |
+
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
|
| 102 |
+
|
| 103 |
+
runner = runner
|
| 104 |
+
assert runner is not None
|
| 105 |
+
server = runner.server
|
| 106 |
+
assert server is not None
|
| 107 |
+
for sock in self.sockets:
|
| 108 |
+
site = web.SockSite(
|
| 109 |
+
runner,
|
| 110 |
+
sock,
|
| 111 |
+
ssl_context=ctx,
|
| 112 |
+
)
|
| 113 |
+
await site.start()
|
| 114 |
+
|
| 115 |
+
# If our parent changed then we shut down.
|
| 116 |
+
pid = os.getpid()
|
| 117 |
+
try:
|
| 118 |
+
while self.alive: # type: ignore[has-type]
|
| 119 |
+
self.notify()
|
| 120 |
+
|
| 121 |
+
cnt = server.requests_count
|
| 122 |
+
if self.max_requests and cnt > self.max_requests:
|
| 123 |
+
self.alive = False
|
| 124 |
+
self.log.info("Max requests, shutting down: %s", self)
|
| 125 |
+
|
| 126 |
+
elif pid == os.getpid() and self.ppid != os.getppid():
|
| 127 |
+
self.alive = False
|
| 128 |
+
self.log.info("Parent changed, shutting down: %s", self)
|
| 129 |
+
else:
|
| 130 |
+
await self._wait_next_notify()
|
| 131 |
+
except BaseException:
|
| 132 |
+
pass
|
| 133 |
+
|
| 134 |
+
await runner.cleanup()
|
| 135 |
+
|
| 136 |
+
def _wait_next_notify(self) -> "asyncio.Future[bool]":
|
| 137 |
+
self._notify_waiter_done()
|
| 138 |
+
|
| 139 |
+
loop = self.loop
|
| 140 |
+
assert loop is not None
|
| 141 |
+
self._notify_waiter = waiter = loop.create_future()
|
| 142 |
+
self.loop.call_later(1.0, self._notify_waiter_done, waiter)
|
| 143 |
+
|
| 144 |
+
return waiter
|
| 145 |
+
|
| 146 |
+
def _notify_waiter_done(
|
| 147 |
+
self, waiter: Optional["asyncio.Future[bool]"] = None
|
| 148 |
+
) -> None:
|
| 149 |
+
if waiter is None:
|
| 150 |
+
waiter = self._notify_waiter
|
| 151 |
+
if waiter is not None:
|
| 152 |
+
set_result(waiter, True)
|
| 153 |
+
|
| 154 |
+
if waiter is self._notify_waiter:
|
| 155 |
+
self._notify_waiter = None
|
| 156 |
+
|
| 157 |
+
def init_signals(self) -> None:
|
| 158 |
+
# Set up signals through the event loop API.
|
| 159 |
+
|
| 160 |
+
self.loop.add_signal_handler(
|
| 161 |
+
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
self.loop.add_signal_handler(
|
| 165 |
+
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
self.loop.add_signal_handler(
|
| 169 |
+
signal.SIGINT, self.handle_quit, signal.SIGINT, None
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
self.loop.add_signal_handler(
|
| 173 |
+
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
self.loop.add_signal_handler(
|
| 177 |
+
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
self.loop.add_signal_handler(
|
| 181 |
+
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
|
| 182 |
+
)
|
| 183 |
+
|
| 184 |
+
# Don't let SIGTERM and SIGUSR1 disturb active requests
|
| 185 |
+
# by interrupting system calls
|
| 186 |
+
signal.siginterrupt(signal.SIGTERM, False)
|
| 187 |
+
signal.siginterrupt(signal.SIGUSR1, False)
|
| 188 |
+
# Reset signals so Gunicorn doesn't swallow subprocess return codes
|
| 189 |
+
# See: https://github.com/aio-libs/aiohttp/issues/6130
|
| 190 |
+
|
| 191 |
+
def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None:
|
| 192 |
+
self.alive = False
|
| 193 |
+
|
| 194 |
+
# worker_int callback
|
| 195 |
+
self.cfg.worker_int(self)
|
| 196 |
+
|
| 197 |
+
# wakeup closing process
|
| 198 |
+
self._notify_waiter_done()
|
| 199 |
+
|
| 200 |
+
def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None:
|
| 201 |
+
self.alive = False
|
| 202 |
+
self.exit_code = 1
|
| 203 |
+
self.cfg.worker_abort(self)
|
| 204 |
+
sys.exit(1)
|
| 205 |
+
|
| 206 |
+
@staticmethod
|
| 207 |
+
def _create_ssl_context(cfg: Any) -> "SSLContext":
|
| 208 |
+
"""Creates SSLContext instance for usage in asyncio.create_server.
|
| 209 |
+
|
| 210 |
+
See ssl.SSLSocket.__init__ for more details.
|
| 211 |
+
"""
|
| 212 |
+
if ssl is None: # pragma: no cover
|
| 213 |
+
raise RuntimeError("SSL is not supported.")
|
| 214 |
+
|
| 215 |
+
ctx = ssl.SSLContext(cfg.ssl_version)
|
| 216 |
+
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
|
| 217 |
+
ctx.verify_mode = cfg.cert_reqs
|
| 218 |
+
if cfg.ca_certs:
|
| 219 |
+
ctx.load_verify_locations(cfg.ca_certs)
|
| 220 |
+
if cfg.ciphers:
|
| 221 |
+
ctx.set_ciphers(cfg.ciphers)
|
| 222 |
+
return ctx
|
| 223 |
+
|
| 224 |
+
def _get_valid_log_format(self, source_format: str) -> str:
|
| 225 |
+
if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
|
| 226 |
+
return self.DEFAULT_AIOHTTP_LOG_FORMAT
|
| 227 |
+
elif re.search(r"%\([^\)]+\)", source_format):
|
| 228 |
+
raise ValueError(
|
| 229 |
+
"Gunicorn's style options in form of `%(name)s` are not "
|
| 230 |
+
"supported for the log formatting. Please use aiohttp's "
|
| 231 |
+
"format specification to configure access log formatting: "
|
| 232 |
+
"http://docs.aiohttp.org/en/stable/logging.html"
|
| 233 |
+
"#format-specification"
|
| 234 |
+
)
|
| 235 |
+
else:
|
| 236 |
+
return source_format
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
class GunicornUVLoopWebWorker(GunicornWebWorker):
|
| 240 |
+
def init_process(self) -> None:
|
| 241 |
+
import uvloop
|
| 242 |
+
|
| 243 |
+
# Close any existing event loop before setting a
|
| 244 |
+
# new policy.
|
| 245 |
+
asyncio.get_event_loop().close()
|
| 246 |
+
|
| 247 |
+
# Setup uvloop policy, so that every
|
| 248 |
+
# asyncio.get_event_loop() will create an instance
|
| 249 |
+
# of uvloop event loop.
|
| 250 |
+
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
| 251 |
+
|
| 252 |
+
super().init_process()
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Apache License
|
| 2 |
+
Version 2.0, January 2004
|
| 3 |
+
http://www.apache.org/licenses/
|
| 4 |
+
|
| 5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 6 |
+
|
| 7 |
+
1. Definitions.
|
| 8 |
+
|
| 9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 11 |
+
|
| 12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 13 |
+
the copyright owner that is granting the License.
|
| 14 |
+
|
| 15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 16 |
+
other entities that control, are controlled by, or are under common
|
| 17 |
+
control with that entity. For the purposes of this definition,
|
| 18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 19 |
+
direction or management of such entity, whether by contract or
|
| 20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 22 |
+
|
| 23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 24 |
+
exercising permissions granted by this License.
|
| 25 |
+
|
| 26 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 27 |
+
including but not limited to software source code, documentation
|
| 28 |
+
source, and configuration files.
|
| 29 |
+
|
| 30 |
+
"Object" form shall mean any form resulting from mechanical
|
| 31 |
+
transformation or translation of a Source form, including but
|
| 32 |
+
not limited to compiled object code, generated documentation,
|
| 33 |
+
and conversions to other media types.
|
| 34 |
+
|
| 35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 36 |
+
Object form, made available under the License, as indicated by a
|
| 37 |
+
copyright notice that is included in or attached to the work
|
| 38 |
+
(an example is provided in the Appendix below).
|
| 39 |
+
|
| 40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 41 |
+
form, that is based on (or derived from) the Work and for which the
|
| 42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 44 |
+
of this License, Derivative Works shall not include works that remain
|
| 45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 46 |
+
the Work and Derivative Works thereof.
|
| 47 |
+
|
| 48 |
+
"Contribution" shall mean any work of authorship, including
|
| 49 |
+
the original version of the Work and any modifications or additions
|
| 50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 54 |
+
means any form of electronic, verbal, or written communication sent
|
| 55 |
+
to the Licensor or its representatives, including but not limited to
|
| 56 |
+
communication on electronic mailing lists, source code control systems,
|
| 57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 59 |
+
excluding communication that is conspicuously marked or otherwise
|
| 60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 61 |
+
|
| 62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 64 |
+
subsequently incorporated within the Work.
|
| 65 |
+
|
| 66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 71 |
+
Work and such Derivative Works in Source or Object form.
|
| 72 |
+
|
| 73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 76 |
+
(except as stated in this section) patent license to make, have made,
|
| 77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 78 |
+
where such license applies only to those patent claims licensable
|
| 79 |
+
by such Contributor that are necessarily infringed by their
|
| 80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 82 |
+
institute patent litigation against any entity (including a
|
| 83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 84 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 85 |
+
or contributory patent infringement, then any patent licenses
|
| 86 |
+
granted to You under this License for that Work shall terminate
|
| 87 |
+
as of the date such litigation is filed.
|
| 88 |
+
|
| 89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 90 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 91 |
+
modifications, and in Source or Object form, provided that You
|
| 92 |
+
meet the following conditions:
|
| 93 |
+
|
| 94 |
+
(a) You must give any other recipients of the Work or
|
| 95 |
+
Derivative Works a copy of this License; and
|
| 96 |
+
|
| 97 |
+
(b) You must cause any modified files to carry prominent notices
|
| 98 |
+
stating that You changed the files; and
|
| 99 |
+
|
| 100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 101 |
+
that You distribute, all copyright, patent, trademark, and
|
| 102 |
+
attribution notices from the Source form of the Work,
|
| 103 |
+
excluding those notices that do not pertain to any part of
|
| 104 |
+
the Derivative Works; and
|
| 105 |
+
|
| 106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 107 |
+
distribution, then any Derivative Works that You distribute must
|
| 108 |
+
include a readable copy of the attribution notices contained
|
| 109 |
+
within such NOTICE file, excluding those notices that do not
|
| 110 |
+
pertain to any part of the Derivative Works, in at least one
|
| 111 |
+
of the following places: within a NOTICE text file distributed
|
| 112 |
+
as part of the Derivative Works; within the Source form or
|
| 113 |
+
documentation, if provided along with the Derivative Works; or,
|
| 114 |
+
within a display generated by the Derivative Works, if and
|
| 115 |
+
wherever such third-party notices normally appear. The contents
|
| 116 |
+
of the NOTICE file are for informational purposes only and
|
| 117 |
+
do not modify the License. You may add Your own attribution
|
| 118 |
+
notices within Derivative Works that You distribute, alongside
|
| 119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 120 |
+
that such additional attribution notices cannot be construed
|
| 121 |
+
as modifying the License.
|
| 122 |
+
|
| 123 |
+
You may add Your own copyright statement to Your modifications and
|
| 124 |
+
may provide additional or different license terms and conditions
|
| 125 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 126 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 127 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 128 |
+
the conditions stated in this License.
|
| 129 |
+
|
| 130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 132 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 133 |
+
this License, without any additional terms or conditions.
|
| 134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 135 |
+
the terms of any separate license agreement you may have executed
|
| 136 |
+
with Licensor regarding such Contributions.
|
| 137 |
+
|
| 138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 140 |
+
except as required for reasonable and customary use in describing the
|
| 141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 142 |
+
|
| 143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 144 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 147 |
+
implied, including, without limitation, any warranties or conditions
|
| 148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 150 |
+
appropriateness of using or redistributing the Work and assume any
|
| 151 |
+
risks associated with Your exercise of permissions under this License.
|
| 152 |
+
|
| 153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 154 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 155 |
+
unless required by applicable law (such as deliberate and grossly
|
| 156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 157 |
+
liable to You for damages, including any direct, indirect, special,
|
| 158 |
+
incidental, or consequential damages of any character arising as a
|
| 159 |
+
result of this License or out of the use or inability to use the
|
| 160 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 161 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 162 |
+
other commercial damages or losses), even if such Contributor
|
| 163 |
+
has been advised of the possibility of such damages.
|
| 164 |
+
|
| 165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 168 |
+
or other liability obligations and/or rights consistent with this
|
| 169 |
+
License. However, in accepting such obligations, You may act only
|
| 170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 171 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 172 |
+
defend, and hold each Contributor harmless for any liability
|
| 173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 174 |
+
of your accepting any such warranty or additional liability.
|
| 175 |
+
|
| 176 |
+
END OF TERMS AND CONDITIONS
|
| 177 |
+
|
| 178 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 179 |
+
|
| 180 |
+
To apply the Apache License to your work, attach the following
|
| 181 |
+
boilerplate notice, with the fields enclosed by brackets "{}"
|
| 182 |
+
replaced with your own identifying information. (Don't include
|
| 183 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 184 |
+
comment syntax for the file format. We also recommend that a
|
| 185 |
+
file or class name and description of purpose be included on the
|
| 186 |
+
same "printed page" as the copyright notice for easier
|
| 187 |
+
identification within third-party archives.
|
| 188 |
+
|
| 189 |
+
Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
|
| 190 |
+
|
| 191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 192 |
+
you may not use this file except in compliance with the License.
|
| 193 |
+
You may obtain a copy of the License at
|
| 194 |
+
|
| 195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 196 |
+
|
| 197 |
+
Unless required by applicable law or agreed to in writing, software
|
| 198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 200 |
+
See the License for the specific language governing permissions and
|
| 201 |
+
limitations under the License.
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: aiosignal
|
| 3 |
+
Version: 1.3.2
|
| 4 |
+
Summary: aiosignal: a list of registered asynchronous callbacks
|
| 5 |
+
Home-page: https://github.com/aio-libs/aiosignal
|
| 6 |
+
Maintainer: aiohttp team <team@aiohttp.org>
|
| 7 |
+
Maintainer-email: team@aiohttp.org
|
| 8 |
+
License: Apache 2.0
|
| 9 |
+
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
|
| 10 |
+
Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiosignal/actions
|
| 11 |
+
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiosignal
|
| 12 |
+
Project-URL: Docs: RTD, https://docs.aiosignal.org
|
| 13 |
+
Project-URL: GitHub: issues, https://github.com/aio-libs/aiosignal/issues
|
| 14 |
+
Project-URL: GitHub: repo, https://github.com/aio-libs/aiosignal
|
| 15 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 16 |
+
Classifier: Intended Audience :: Developers
|
| 17 |
+
Classifier: Programming Language :: Python
|
| 18 |
+
Classifier: Programming Language :: Python :: 3
|
| 19 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 20 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 21 |
+
Classifier: Operating System :: POSIX
|
| 22 |
+
Classifier: Operating System :: MacOS :: MacOS X
|
| 23 |
+
Classifier: Operating System :: Microsoft :: Windows
|
| 24 |
+
Classifier: Framework :: AsyncIO
|
| 25 |
+
Requires-Python: >=3.9
|
| 26 |
+
Description-Content-Type: text/x-rst
|
| 27 |
+
License-File: LICENSE
|
| 28 |
+
Requires-Dist: frozenlist>=1.1.0
|
| 29 |
+
|
| 30 |
+
=========
|
| 31 |
+
aiosignal
|
| 32 |
+
=========
|
| 33 |
+
|
| 34 |
+
.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg
|
| 35 |
+
:target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI
|
| 36 |
+
:alt: GitHub status for master branch
|
| 37 |
+
|
| 38 |
+
.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg
|
| 39 |
+
:target: https://codecov.io/gh/aio-libs/aiosignal
|
| 40 |
+
:alt: codecov.io status for master branch
|
| 41 |
+
|
| 42 |
+
.. image:: https://badge.fury.io/py/aiosignal.svg
|
| 43 |
+
:target: https://pypi.org/project/aiosignal
|
| 44 |
+
:alt: Latest PyPI package version
|
| 45 |
+
|
| 46 |
+
.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest
|
| 47 |
+
:target: https://aiosignal.readthedocs.io/
|
| 48 |
+
:alt: Latest Read The Docs
|
| 49 |
+
|
| 50 |
+
.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F
|
| 51 |
+
:target: https://aio-libs.discourse.group/
|
| 52 |
+
:alt: Discourse group for io-libs
|
| 53 |
+
|
| 54 |
+
.. image:: https://badges.gitter.im/Join%20Chat.svg
|
| 55 |
+
:target: https://gitter.im/aio-libs/Lobby
|
| 56 |
+
:alt: Chat on Gitter
|
| 57 |
+
|
| 58 |
+
Introduction
|
| 59 |
+
============
|
| 60 |
+
|
| 61 |
+
A project to manage callbacks in `asyncio` projects.
|
| 62 |
+
|
| 63 |
+
``Signal`` is a list of registered asynchronous callbacks.
|
| 64 |
+
|
| 65 |
+
The signal's life-cycle has two stages: after creation its content
|
| 66 |
+
could be filled by using standard list operations: ``sig.append()``
|
| 67 |
+
etc.
|
| 68 |
+
|
| 69 |
+
After you call ``sig.freeze()`` the signal is *frozen*: adding, removing
|
| 70 |
+
and dropping callbacks is forbidden.
|
| 71 |
+
|
| 72 |
+
The only available operation is calling the previously registered
|
| 73 |
+
callbacks by using ``await sig.send(data)``.
|
| 74 |
+
|
| 75 |
+
For concrete usage examples see the `Signals
|
| 76 |
+
<https://docs.aiohttp.org/en/stable/web_advanced.html#aiohttp-web-signals>
|
| 77 |
+
section of the `Web Server Advanced
|
| 78 |
+
<https://docs.aiohttp.org/en/stable/web_advanced.html>` chapter of the `aiohttp
|
| 79 |
+
documentation`_.
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
Installation
|
| 83 |
+
------------
|
| 84 |
+
|
| 85 |
+
::
|
| 86 |
+
|
| 87 |
+
$ pip install aiosignal
|
| 88 |
+
|
| 89 |
+
The library requires Python 3.8 or newer.
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
Documentation
|
| 93 |
+
=============
|
| 94 |
+
|
| 95 |
+
https://aiosignal.readthedocs.io/
|
| 96 |
+
|
| 97 |
+
Communication channels
|
| 98 |
+
======================
|
| 99 |
+
|
| 100 |
+
*gitter chat* https://gitter.im/aio-libs/Lobby
|
| 101 |
+
|
| 102 |
+
Requirements
|
| 103 |
+
============
|
| 104 |
+
|
| 105 |
+
- Python >= 3.8
|
| 106 |
+
- frozenlist >= 1.0.0
|
| 107 |
+
|
| 108 |
+
License
|
| 109 |
+
=======
|
| 110 |
+
|
| 111 |
+
``aiosignal`` is offered under the Apache 2 license.
|
| 112 |
+
|
| 113 |
+
Source code
|
| 114 |
+
===========
|
| 115 |
+
|
| 116 |
+
The project is hosted on GitHub_
|
| 117 |
+
|
| 118 |
+
Please file an issue in the `bug tracker
|
| 119 |
+
<https://github.com/aio-libs/aiosignal/issues>`_ if you have found a bug
|
| 120 |
+
or have some suggestions to improve the library.
|
| 121 |
+
|
| 122 |
+
.. _GitHub: https://github.com/aio-libs/aiosignal
|
| 123 |
+
.. _aiohttp documentation: https://docs.aiohttp.org/
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
aiosignal-1.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
aiosignal-1.3.2.dist-info/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332
|
| 3 |
+
aiosignal-1.3.2.dist-info/METADATA,sha256=TeI_xgZ191qgx37rviEnpMWC0QnYsg_j9EGVivNqqjc,3753
|
| 4 |
+
aiosignal-1.3.2.dist-info/RECORD,,
|
| 5 |
+
aiosignal-1.3.2.dist-info/WHEEL,sha256=pxeNX5JdtCe58PUSYP9upmc7jdRPgvT0Gm9kb1SHlVw,109
|
| 6 |
+
aiosignal-1.3.2.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10
|
| 7 |
+
aiosignal/__init__.py,sha256=1oIrRl6kNpqFh32e7HfMFbMV_35v8sqJJFfnuKgmtEU,867
|
| 8 |
+
aiosignal/__init__.pyi,sha256=xeCddYSS8fZAkz8S4HuKSR2IDe3N7RW_LKcXDPPA1Xk,311
|
| 9 |
+
aiosignal/__pycache__/__init__.cpython-311.pyc,,
|
| 10 |
+
aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (75.6.0)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py2-none-any
|
| 5 |
+
Tag: py3-none-any
|
| 6 |
+
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal-1.3.2.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
aiosignal
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__init__.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
Classes Without Boilerplate
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from functools import partial
|
| 8 |
+
from typing import Callable, Literal, Protocol
|
| 9 |
+
|
| 10 |
+
from . import converters, exceptions, filters, setters, validators
|
| 11 |
+
from ._cmp import cmp_using
|
| 12 |
+
from ._config import get_run_validators, set_run_validators
|
| 13 |
+
from ._funcs import asdict, assoc, astuple, has, resolve_types
|
| 14 |
+
from ._make import (
|
| 15 |
+
NOTHING,
|
| 16 |
+
Attribute,
|
| 17 |
+
Converter,
|
| 18 |
+
Factory,
|
| 19 |
+
_Nothing,
|
| 20 |
+
attrib,
|
| 21 |
+
attrs,
|
| 22 |
+
evolve,
|
| 23 |
+
fields,
|
| 24 |
+
fields_dict,
|
| 25 |
+
make_class,
|
| 26 |
+
validate,
|
| 27 |
+
)
|
| 28 |
+
from ._next_gen import define, field, frozen, mutable
|
| 29 |
+
from ._version_info import VersionInfo
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
s = attributes = attrs
|
| 33 |
+
ib = attr = attrib
|
| 34 |
+
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class AttrsInstance(Protocol):
|
| 38 |
+
pass
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
NothingType = Literal[_Nothing.NOTHING]
|
| 42 |
+
|
| 43 |
+
__all__ = [
|
| 44 |
+
"NOTHING",
|
| 45 |
+
"Attribute",
|
| 46 |
+
"AttrsInstance",
|
| 47 |
+
"Converter",
|
| 48 |
+
"Factory",
|
| 49 |
+
"NothingType",
|
| 50 |
+
"asdict",
|
| 51 |
+
"assoc",
|
| 52 |
+
"astuple",
|
| 53 |
+
"attr",
|
| 54 |
+
"attrib",
|
| 55 |
+
"attributes",
|
| 56 |
+
"attrs",
|
| 57 |
+
"cmp_using",
|
| 58 |
+
"converters",
|
| 59 |
+
"define",
|
| 60 |
+
"evolve",
|
| 61 |
+
"exceptions",
|
| 62 |
+
"field",
|
| 63 |
+
"fields",
|
| 64 |
+
"fields_dict",
|
| 65 |
+
"filters",
|
| 66 |
+
"frozen",
|
| 67 |
+
"get_run_validators",
|
| 68 |
+
"has",
|
| 69 |
+
"ib",
|
| 70 |
+
"make_class",
|
| 71 |
+
"mutable",
|
| 72 |
+
"resolve_types",
|
| 73 |
+
"s",
|
| 74 |
+
"set_run_validators",
|
| 75 |
+
"setters",
|
| 76 |
+
"validate",
|
| 77 |
+
"validators",
|
| 78 |
+
]
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def _make_getattr(mod_name: str) -> Callable:
|
| 82 |
+
"""
|
| 83 |
+
Create a metadata proxy for packaging information that uses *mod_name* in
|
| 84 |
+
its warnings and errors.
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
def __getattr__(name: str) -> str:
|
| 88 |
+
if name not in ("__version__", "__version_info__"):
|
| 89 |
+
msg = f"module {mod_name} has no attribute {name}"
|
| 90 |
+
raise AttributeError(msg)
|
| 91 |
+
|
| 92 |
+
from importlib.metadata import metadata
|
| 93 |
+
|
| 94 |
+
meta = metadata("attrs")
|
| 95 |
+
|
| 96 |
+
if name == "__version_info__":
|
| 97 |
+
return VersionInfo._from_version_string(meta["version"])
|
| 98 |
+
|
| 99 |
+
return meta["version"]
|
| 100 |
+
|
| 101 |
+
return __getattr__
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
__getattr__ = _make_getattr(__name__)
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (3.15 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/_cmp.cpython-311.pyc
ADDED
|
Binary file (5.43 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/_compat.cpython-311.pyc
ADDED
|
Binary file (3.64 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/_config.cpython-311.pyc
ADDED
|
Binary file (1.21 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/ray/_private/runtime_env/agent/thirdparty_files/attr/__pycache__/_funcs.cpython-311.pyc
ADDED
|
Binary file (15.3 kB). View file
|
|
|