ZTWHHH commited on
Commit
d22b2b9
·
verified ·
1 Parent(s): 5777f35

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. deepseek/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash +1 -0
  2. deepseek/lib/python3.10/site-packages/aiohttp/__init__.py +264 -0
  3. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc +0 -0
  4. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc +0 -0
  5. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc +0 -0
  6. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc +0 -0
  7. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc +0 -0
  8. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc +0 -0
  9. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc +0 -0
  10. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc +0 -0
  11. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc +0 -0
  12. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc +0 -0
  13. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc +0 -0
  14. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc +0 -0
  15. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc +0 -0
  16. deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc +0 -0
  17. deepseek/lib/python3.10/site-packages/aiohttp/_headers.pxi +83 -0
  18. deepseek/lib/python3.10/site-packages/aiohttp/_http_writer.pyx +162 -0
  19. deepseek/lib/python3.10/site-packages/aiohttp/abc.py +247 -0
  20. deepseek/lib/python3.10/site-packages/aiohttp/base_protocol.py +100 -0
  21. deepseek/lib/python3.10/site-packages/aiohttp/client.py +1574 -0
  22. deepseek/lib/python3.10/site-packages/aiohttp/client_exceptions.py +417 -0
  23. deepseek/lib/python3.10/site-packages/aiohttp/client_proto.py +307 -0
  24. deepseek/lib/python3.10/site-packages/aiohttp/compression_utils.py +173 -0
  25. deepseek/lib/python3.10/site-packages/aiohttp/connector.py +1646 -0
  26. deepseek/lib/python3.10/site-packages/aiohttp/cookiejar.py +487 -0
  27. deepseek/lib/python3.10/site-packages/aiohttp/formdata.py +182 -0
  28. deepseek/lib/python3.10/site-packages/aiohttp/helpers.py +944 -0
  29. deepseek/lib/python3.10/site-packages/aiohttp/http.py +72 -0
  30. deepseek/lib/python3.10/site-packages/aiohttp/http_exceptions.py +112 -0
  31. deepseek/lib/python3.10/site-packages/aiohttp/http_parser.py +1046 -0
  32. deepseek/lib/python3.10/site-packages/aiohttp/http_websocket.py +36 -0
  33. deepseek/lib/python3.10/site-packages/aiohttp/multipart.py +1071 -0
  34. deepseek/lib/python3.10/site-packages/aiohttp/payload.py +519 -0
  35. deepseek/lib/python3.10/site-packages/aiohttp/payload_streamer.py +78 -0
  36. deepseek/lib/python3.10/site-packages/aiohttp/py.typed +1 -0
  37. deepseek/lib/python3.10/site-packages/aiohttp/pytest_plugin.py +436 -0
  38. deepseek/lib/python3.10/site-packages/aiohttp/streams.py +723 -0
  39. deepseek/lib/python3.10/site-packages/aiohttp/test_utils.py +770 -0
  40. deepseek/lib/python3.10/site-packages/aiohttp/typedefs.py +69 -0
  41. deepseek/lib/python3.10/site-packages/aiohttp/web.py +601 -0
  42. deepseek/lib/python3.10/site-packages/aiohttp/web_fileresponse.py +418 -0
  43. deepseek/lib/python3.10/site-packages/aiohttp/web_middlewares.py +121 -0
  44. deepseek/lib/python3.10/site-packages/aiohttp/web_protocol.py +746 -0
  45. deepseek/lib/python3.10/site-packages/aiohttp/web_request.py +916 -0
  46. deepseek/lib/python3.10/site-packages/aiohttp/web_response.py +840 -0
  47. deepseek/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py +1301 -0
  48. deepseek/lib/python3.10/site-packages/aiohttp/worker.py +247 -0
  49. deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_check.cpython-310.pyc +0 -0
  50. deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc +0 -0
deepseek/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ dab8f933203eeb245d60f856e542a45b888d5a110094620e4811f90f816628d1 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
deepseek/lib/python3.10/site-packages/aiohttp/__init__.py ADDED
@@ -0,0 +1,264 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __version__ = "3.11.10"
2
+
3
+ from typing import TYPE_CHECKING, Tuple
4
+
5
+ from . import hdrs as hdrs
6
+ from .client import (
7
+ BaseConnector,
8
+ ClientConnectionError,
9
+ ClientConnectionResetError,
10
+ ClientConnectorCertificateError,
11
+ ClientConnectorDNSError,
12
+ ClientConnectorError,
13
+ ClientConnectorSSLError,
14
+ ClientError,
15
+ ClientHttpProxyError,
16
+ ClientOSError,
17
+ ClientPayloadError,
18
+ ClientProxyConnectionError,
19
+ ClientRequest,
20
+ ClientResponse,
21
+ ClientResponseError,
22
+ ClientSession,
23
+ ClientSSLError,
24
+ ClientTimeout,
25
+ ClientWebSocketResponse,
26
+ ClientWSTimeout,
27
+ ConnectionTimeoutError,
28
+ ContentTypeError,
29
+ Fingerprint,
30
+ InvalidURL,
31
+ InvalidUrlClientError,
32
+ InvalidUrlRedirectClientError,
33
+ NamedPipeConnector,
34
+ NonHttpUrlClientError,
35
+ NonHttpUrlRedirectClientError,
36
+ RedirectClientError,
37
+ RequestInfo,
38
+ ServerConnectionError,
39
+ ServerDisconnectedError,
40
+ ServerFingerprintMismatch,
41
+ ServerTimeoutError,
42
+ SocketTimeoutError,
43
+ TCPConnector,
44
+ TooManyRedirects,
45
+ UnixConnector,
46
+ WSMessageTypeError,
47
+ WSServerHandshakeError,
48
+ request,
49
+ )
50
+ from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
51
+ from .formdata import FormData as FormData
52
+ from .helpers import BasicAuth, ChainMapProxy, ETag
53
+ from .http import (
54
+ HttpVersion as HttpVersion,
55
+ HttpVersion10 as HttpVersion10,
56
+ HttpVersion11 as HttpVersion11,
57
+ WebSocketError as WebSocketError,
58
+ WSCloseCode as WSCloseCode,
59
+ WSMessage as WSMessage,
60
+ WSMsgType as WSMsgType,
61
+ )
62
+ from .multipart import (
63
+ BadContentDispositionHeader as BadContentDispositionHeader,
64
+ BadContentDispositionParam as BadContentDispositionParam,
65
+ BodyPartReader as BodyPartReader,
66
+ MultipartReader as MultipartReader,
67
+ MultipartWriter as MultipartWriter,
68
+ content_disposition_filename as content_disposition_filename,
69
+ parse_content_disposition as parse_content_disposition,
70
+ )
71
+ from .payload import (
72
+ PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
73
+ AsyncIterablePayload as AsyncIterablePayload,
74
+ BufferedReaderPayload as BufferedReaderPayload,
75
+ BytesIOPayload as BytesIOPayload,
76
+ BytesPayload as BytesPayload,
77
+ IOBasePayload as IOBasePayload,
78
+ JsonPayload as JsonPayload,
79
+ Payload as Payload,
80
+ StringIOPayload as StringIOPayload,
81
+ StringPayload as StringPayload,
82
+ TextIOPayload as TextIOPayload,
83
+ get_payload as get_payload,
84
+ payload_type as payload_type,
85
+ )
86
+ from .payload_streamer import streamer as streamer
87
+ from .resolver import (
88
+ AsyncResolver as AsyncResolver,
89
+ DefaultResolver as DefaultResolver,
90
+ ThreadedResolver as ThreadedResolver,
91
+ )
92
+ from .streams import (
93
+ EMPTY_PAYLOAD as EMPTY_PAYLOAD,
94
+ DataQueue as DataQueue,
95
+ EofStream as EofStream,
96
+ FlowControlDataQueue as FlowControlDataQueue,
97
+ StreamReader as StreamReader,
98
+ )
99
+ from .tracing import (
100
+ TraceConfig as TraceConfig,
101
+ TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
102
+ TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
103
+ TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
104
+ TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
105
+ TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
106
+ TraceDnsCacheHitParams as TraceDnsCacheHitParams,
107
+ TraceDnsCacheMissParams as TraceDnsCacheMissParams,
108
+ TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
109
+ TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
110
+ TraceRequestChunkSentParams as TraceRequestChunkSentParams,
111
+ TraceRequestEndParams as TraceRequestEndParams,
112
+ TraceRequestExceptionParams as TraceRequestExceptionParams,
113
+ TraceRequestHeadersSentParams as TraceRequestHeadersSentParams,
114
+ TraceRequestRedirectParams as TraceRequestRedirectParams,
115
+ TraceRequestStartParams as TraceRequestStartParams,
116
+ TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
117
+ )
118
+
119
+ if TYPE_CHECKING:
120
+ # At runtime these are lazy-loaded at the bottom of the file.
121
+ from .worker import (
122
+ GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
123
+ GunicornWebWorker as GunicornWebWorker,
124
+ )
125
+
126
+ __all__: Tuple[str, ...] = (
127
+ "hdrs",
128
+ # client
129
+ "BaseConnector",
130
+ "ClientConnectionError",
131
+ "ClientConnectionResetError",
132
+ "ClientConnectorCertificateError",
133
+ "ClientConnectorDNSError",
134
+ "ClientConnectorError",
135
+ "ClientConnectorSSLError",
136
+ "ClientError",
137
+ "ClientHttpProxyError",
138
+ "ClientOSError",
139
+ "ClientPayloadError",
140
+ "ClientProxyConnectionError",
141
+ "ClientResponse",
142
+ "ClientRequest",
143
+ "ClientResponseError",
144
+ "ClientSSLError",
145
+ "ClientSession",
146
+ "ClientTimeout",
147
+ "ClientWebSocketResponse",
148
+ "ClientWSTimeout",
149
+ "ConnectionTimeoutError",
150
+ "ContentTypeError",
151
+ "Fingerprint",
152
+ "FlowControlDataQueue",
153
+ "InvalidURL",
154
+ "InvalidUrlClientError",
155
+ "InvalidUrlRedirectClientError",
156
+ "NonHttpUrlClientError",
157
+ "NonHttpUrlRedirectClientError",
158
+ "RedirectClientError",
159
+ "RequestInfo",
160
+ "ServerConnectionError",
161
+ "ServerDisconnectedError",
162
+ "ServerFingerprintMismatch",
163
+ "ServerTimeoutError",
164
+ "SocketTimeoutError",
165
+ "TCPConnector",
166
+ "TooManyRedirects",
167
+ "UnixConnector",
168
+ "NamedPipeConnector",
169
+ "WSServerHandshakeError",
170
+ "request",
171
+ # cookiejar
172
+ "CookieJar",
173
+ "DummyCookieJar",
174
+ # formdata
175
+ "FormData",
176
+ # helpers
177
+ "BasicAuth",
178
+ "ChainMapProxy",
179
+ "ETag",
180
+ # http
181
+ "HttpVersion",
182
+ "HttpVersion10",
183
+ "HttpVersion11",
184
+ "WSMsgType",
185
+ "WSCloseCode",
186
+ "WSMessage",
187
+ "WebSocketError",
188
+ # multipart
189
+ "BadContentDispositionHeader",
190
+ "BadContentDispositionParam",
191
+ "BodyPartReader",
192
+ "MultipartReader",
193
+ "MultipartWriter",
194
+ "content_disposition_filename",
195
+ "parse_content_disposition",
196
+ # payload
197
+ "AsyncIterablePayload",
198
+ "BufferedReaderPayload",
199
+ "BytesIOPayload",
200
+ "BytesPayload",
201
+ "IOBasePayload",
202
+ "JsonPayload",
203
+ "PAYLOAD_REGISTRY",
204
+ "Payload",
205
+ "StringIOPayload",
206
+ "StringPayload",
207
+ "TextIOPayload",
208
+ "get_payload",
209
+ "payload_type",
210
+ # payload_streamer
211
+ "streamer",
212
+ # resolver
213
+ "AsyncResolver",
214
+ "DefaultResolver",
215
+ "ThreadedResolver",
216
+ # streams
217
+ "DataQueue",
218
+ "EMPTY_PAYLOAD",
219
+ "EofStream",
220
+ "StreamReader",
221
+ # tracing
222
+ "TraceConfig",
223
+ "TraceConnectionCreateEndParams",
224
+ "TraceConnectionCreateStartParams",
225
+ "TraceConnectionQueuedEndParams",
226
+ "TraceConnectionQueuedStartParams",
227
+ "TraceConnectionReuseconnParams",
228
+ "TraceDnsCacheHitParams",
229
+ "TraceDnsCacheMissParams",
230
+ "TraceDnsResolveHostEndParams",
231
+ "TraceDnsResolveHostStartParams",
232
+ "TraceRequestChunkSentParams",
233
+ "TraceRequestEndParams",
234
+ "TraceRequestExceptionParams",
235
+ "TraceRequestHeadersSentParams",
236
+ "TraceRequestRedirectParams",
237
+ "TraceRequestStartParams",
238
+ "TraceResponseChunkReceivedParams",
239
+ # workers (imported lazily with __getattr__)
240
+ "GunicornUVLoopWebWorker",
241
+ "GunicornWebWorker",
242
+ "WSMessageTypeError",
243
+ )
244
+
245
+
246
+ def __dir__() -> Tuple[str, ...]:
247
+ return __all__ + ("__doc__",)
248
+
249
+
250
+ def __getattr__(name: str) -> object:
251
+ global GunicornUVLoopWebWorker, GunicornWebWorker
252
+
253
+ # Importing gunicorn takes a long time (>100ms), so only import if actually needed.
254
+ if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
255
+ try:
256
+ from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
257
+ except ImportError:
258
+ return None
259
+
260
+ GunicornUVLoopWebWorker = guv # type: ignore[misc]
261
+ GunicornWebWorker = gw # type: ignore[misc]
262
+ return guv if name == "GunicornUVLoopWebWorker" else gw
263
+
264
+ raise AttributeError(f"module {__name__} has no attribute {name}")
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc ADDED
Binary file (34.6 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc ADDED
Binary file (5.81 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc ADDED
Binary file (40.4 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc ADDED
Binary file (12 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc ADDED
Binary file (5.46 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc ADDED
Binary file (27.4 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc ADDED
Binary file (11.5 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc ADDED
Binary file (4.45 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc ADDED
Binary file (19.3 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc ADDED
Binary file (10.9 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc ADDED
Binary file (11.6 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc ADDED
Binary file (3.84 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc ADDED
Binary file (7.65 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc ADDED
Binary file (3.52 kB). View file
 
deepseek/lib/python3.10/site-packages/aiohttp/_headers.pxi ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # The file is autogenerated from aiohttp/hdrs.py
2
+ # Run ./tools/gen.py to update it after the origin changing.
3
+
4
+ from . import hdrs
5
+ cdef tuple headers = (
6
+ hdrs.ACCEPT,
7
+ hdrs.ACCEPT_CHARSET,
8
+ hdrs.ACCEPT_ENCODING,
9
+ hdrs.ACCEPT_LANGUAGE,
10
+ hdrs.ACCEPT_RANGES,
11
+ hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
12
+ hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
13
+ hdrs.ACCESS_CONTROL_ALLOW_METHODS,
14
+ hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
15
+ hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
16
+ hdrs.ACCESS_CONTROL_MAX_AGE,
17
+ hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
18
+ hdrs.ACCESS_CONTROL_REQUEST_METHOD,
19
+ hdrs.AGE,
20
+ hdrs.ALLOW,
21
+ hdrs.AUTHORIZATION,
22
+ hdrs.CACHE_CONTROL,
23
+ hdrs.CONNECTION,
24
+ hdrs.CONTENT_DISPOSITION,
25
+ hdrs.CONTENT_ENCODING,
26
+ hdrs.CONTENT_LANGUAGE,
27
+ hdrs.CONTENT_LENGTH,
28
+ hdrs.CONTENT_LOCATION,
29
+ hdrs.CONTENT_MD5,
30
+ hdrs.CONTENT_RANGE,
31
+ hdrs.CONTENT_TRANSFER_ENCODING,
32
+ hdrs.CONTENT_TYPE,
33
+ hdrs.COOKIE,
34
+ hdrs.DATE,
35
+ hdrs.DESTINATION,
36
+ hdrs.DIGEST,
37
+ hdrs.ETAG,
38
+ hdrs.EXPECT,
39
+ hdrs.EXPIRES,
40
+ hdrs.FORWARDED,
41
+ hdrs.FROM,
42
+ hdrs.HOST,
43
+ hdrs.IF_MATCH,
44
+ hdrs.IF_MODIFIED_SINCE,
45
+ hdrs.IF_NONE_MATCH,
46
+ hdrs.IF_RANGE,
47
+ hdrs.IF_UNMODIFIED_SINCE,
48
+ hdrs.KEEP_ALIVE,
49
+ hdrs.LAST_EVENT_ID,
50
+ hdrs.LAST_MODIFIED,
51
+ hdrs.LINK,
52
+ hdrs.LOCATION,
53
+ hdrs.MAX_FORWARDS,
54
+ hdrs.ORIGIN,
55
+ hdrs.PRAGMA,
56
+ hdrs.PROXY_AUTHENTICATE,
57
+ hdrs.PROXY_AUTHORIZATION,
58
+ hdrs.RANGE,
59
+ hdrs.REFERER,
60
+ hdrs.RETRY_AFTER,
61
+ hdrs.SEC_WEBSOCKET_ACCEPT,
62
+ hdrs.SEC_WEBSOCKET_EXTENSIONS,
63
+ hdrs.SEC_WEBSOCKET_KEY,
64
+ hdrs.SEC_WEBSOCKET_KEY1,
65
+ hdrs.SEC_WEBSOCKET_PROTOCOL,
66
+ hdrs.SEC_WEBSOCKET_VERSION,
67
+ hdrs.SERVER,
68
+ hdrs.SET_COOKIE,
69
+ hdrs.TE,
70
+ hdrs.TRAILER,
71
+ hdrs.TRANSFER_ENCODING,
72
+ hdrs.URI,
73
+ hdrs.UPGRADE,
74
+ hdrs.USER_AGENT,
75
+ hdrs.VARY,
76
+ hdrs.VIA,
77
+ hdrs.WWW_AUTHENTICATE,
78
+ hdrs.WANT_DIGEST,
79
+ hdrs.WARNING,
80
+ hdrs.X_FORWARDED_FOR,
81
+ hdrs.X_FORWARDED_HOST,
82
+ hdrs.X_FORWARDED_PROTO,
83
+ )
deepseek/lib/python3.10/site-packages/aiohttp/_http_writer.pyx ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cpython.bytes cimport PyBytes_FromStringAndSize
2
+ from cpython.exc cimport PyErr_NoMemory
3
+ from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
4
+ from cpython.object cimport PyObject_Str
5
+ from libc.stdint cimport uint8_t, uint64_t
6
+ from libc.string cimport memcpy
7
+
8
+ from multidict import istr
9
+
10
+ DEF BUF_SIZE = 16 * 1024 # 16KiB
11
+ cdef char BUFFER[BUF_SIZE]
12
+
13
+ cdef object _istr = istr
14
+
15
+
16
+ # ----------------- writer ---------------------------
17
+
18
+ cdef struct Writer:
19
+ char *buf
20
+ Py_ssize_t size
21
+ Py_ssize_t pos
22
+
23
+
24
+ cdef inline void _init_writer(Writer* writer):
25
+ writer.buf = &BUFFER[0]
26
+ writer.size = BUF_SIZE
27
+ writer.pos = 0
28
+
29
+
30
+ cdef inline void _release_writer(Writer* writer):
31
+ if writer.buf != BUFFER:
32
+ PyMem_Free(writer.buf)
33
+
34
+
35
+ cdef inline int _write_byte(Writer* writer, uint8_t ch):
36
+ cdef char * buf
37
+ cdef Py_ssize_t size
38
+
39
+ if writer.pos == writer.size:
40
+ # reallocate
41
+ size = writer.size + BUF_SIZE
42
+ if writer.buf == BUFFER:
43
+ buf = <char*>PyMem_Malloc(size)
44
+ if buf == NULL:
45
+ PyErr_NoMemory()
46
+ return -1
47
+ memcpy(buf, writer.buf, writer.size)
48
+ else:
49
+ buf = <char*>PyMem_Realloc(writer.buf, size)
50
+ if buf == NULL:
51
+ PyErr_NoMemory()
52
+ return -1
53
+ writer.buf = buf
54
+ writer.size = size
55
+ writer.buf[writer.pos] = <char>ch
56
+ writer.pos += 1
57
+ return 0
58
+
59
+
60
+ cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
61
+ cdef uint64_t utf = <uint64_t> symbol
62
+
63
+ if utf < 0x80:
64
+ return _write_byte(writer, <uint8_t>utf)
65
+ elif utf < 0x800:
66
+ if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
67
+ return -1
68
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
69
+ elif 0xD800 <= utf <= 0xDFFF:
70
+ # surogate pair, ignored
71
+ return 0
72
+ elif utf < 0x10000:
73
+ if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
74
+ return -1
75
+ if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
76
+ return -1
77
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
78
+ elif utf > 0x10FFFF:
79
+ # symbol is too large
80
+ return 0
81
+ else:
82
+ if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
83
+ return -1
84
+ if _write_byte(writer,
85
+ <uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
86
+ return -1
87
+ if _write_byte(writer,
88
+ <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
89
+ return -1
90
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
91
+
92
+
93
+ cdef inline int _write_str(Writer* writer, str s):
94
+ cdef Py_UCS4 ch
95
+ for ch in s:
96
+ if _write_utf8(writer, ch) < 0:
97
+ return -1
98
+
99
+
100
+ # --------------- _serialize_headers ----------------------
101
+
102
+ cdef str to_str(object s):
103
+ if type(s) is str:
104
+ return <str>s
105
+ elif type(s) is _istr:
106
+ return PyObject_Str(s)
107
+ elif not isinstance(s, str):
108
+ raise TypeError("Cannot serialize non-str key {!r}".format(s))
109
+ else:
110
+ return str(s)
111
+
112
+
113
+
114
+ def _serialize_headers(str status_line, headers):
115
+ cdef Writer writer
116
+ cdef object key
117
+ cdef object val
118
+ cdef bytes ret
119
+ cdef str key_str
120
+ cdef str val_str
121
+
122
+ _init_writer(&writer)
123
+
124
+ try:
125
+ if _write_str(&writer, status_line) < 0:
126
+ raise
127
+ if _write_byte(&writer, b'\r') < 0:
128
+ raise
129
+ if _write_byte(&writer, b'\n') < 0:
130
+ raise
131
+
132
+ for key, val in headers.items():
133
+ key_str = to_str(key)
134
+ val_str = to_str(val)
135
+
136
+ if "\r" in key_str or "\n" in key_str or "\r" in val_str or "\n" in val_str:
137
+ raise ValueError(
138
+ "Newline or carriage return character detected in HTTP status message or "
139
+ "header. This is a potential security issue."
140
+ )
141
+
142
+ if _write_str(&writer, key_str) < 0:
143
+ raise
144
+ if _write_byte(&writer, b':') < 0:
145
+ raise
146
+ if _write_byte(&writer, b' ') < 0:
147
+ raise
148
+ if _write_str(&writer, val_str) < 0:
149
+ raise
150
+ if _write_byte(&writer, b'\r') < 0:
151
+ raise
152
+ if _write_byte(&writer, b'\n') < 0:
153
+ raise
154
+
155
+ if _write_byte(&writer, b'\r') < 0:
156
+ raise
157
+ if _write_byte(&writer, b'\n') < 0:
158
+ raise
159
+
160
+ return PyBytes_FromStringAndSize(writer.buf, writer.pos)
161
+ finally:
162
+ _release_writer(&writer)
deepseek/lib/python3.10/site-packages/aiohttp/abc.py ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import logging
3
+ import socket
4
+ import zlib
5
+ from abc import ABC, abstractmethod
6
+ from collections.abc import Sized
7
+ from http.cookies import BaseCookie, Morsel
8
+ from typing import (
9
+ TYPE_CHECKING,
10
+ Any,
11
+ Awaitable,
12
+ Callable,
13
+ Dict,
14
+ Generator,
15
+ Iterable,
16
+ List,
17
+ Optional,
18
+ Tuple,
19
+ TypedDict,
20
+ )
21
+
22
+ from multidict import CIMultiDict
23
+ from yarl import URL
24
+
25
+ from .typedefs import LooseCookies
26
+
27
+ if TYPE_CHECKING:
28
+ from .web_app import Application
29
+ from .web_exceptions import HTTPException
30
+ from .web_request import BaseRequest, Request
31
+ from .web_response import StreamResponse
32
+ else:
33
+ BaseRequest = Request = Application = StreamResponse = None
34
+ HTTPException = None
35
+
36
+
37
+ class AbstractRouter(ABC):
38
+ def __init__(self) -> None:
39
+ self._frozen = False
40
+
41
+ def post_init(self, app: Application) -> None:
42
+ """Post init stage.
43
+
44
+ Not an abstract method for sake of backward compatibility,
45
+ but if the router wants to be aware of the application
46
+ it can override this.
47
+ """
48
+
49
+ @property
50
+ def frozen(self) -> bool:
51
+ return self._frozen
52
+
53
+ def freeze(self) -> None:
54
+ """Freeze router."""
55
+ self._frozen = True
56
+
57
+ @abstractmethod
58
+ async def resolve(self, request: Request) -> "AbstractMatchInfo":
59
+ """Return MATCH_INFO for given request"""
60
+
61
+
62
+ class AbstractMatchInfo(ABC):
63
+
64
+ __slots__ = ()
65
+
66
+ @property # pragma: no branch
67
+ @abstractmethod
68
+ def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
69
+ """Execute matched request handler"""
70
+
71
+ @property
72
+ @abstractmethod
73
+ def expect_handler(
74
+ self,
75
+ ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
76
+ """Expect handler for 100-continue processing"""
77
+
78
+ @property # pragma: no branch
79
+ @abstractmethod
80
+ def http_exception(self) -> Optional[HTTPException]:
81
+ """HTTPException instance raised on router's resolving, or None"""
82
+
83
+ @abstractmethod # pragma: no branch
84
+ def get_info(self) -> Dict[str, Any]:
85
+ """Return a dict with additional info useful for introspection"""
86
+
87
+ @property # pragma: no branch
88
+ @abstractmethod
89
+ def apps(self) -> Tuple[Application, ...]:
90
+ """Stack of nested applications.
91
+
92
+ Top level application is left-most element.
93
+
94
+ """
95
+
96
+ @abstractmethod
97
+ def add_app(self, app: Application) -> None:
98
+ """Add application to the nested apps stack."""
99
+
100
+ @abstractmethod
101
+ def freeze(self) -> None:
102
+ """Freeze the match info.
103
+
104
+ The method is called after route resolution.
105
+
106
+ After the call .add_app() is forbidden.
107
+
108
+ """
109
+
110
+
111
+ class AbstractView(ABC):
112
+ """Abstract class based view."""
113
+
114
+ def __init__(self, request: Request) -> None:
115
+ self._request = request
116
+
117
+ @property
118
+ def request(self) -> Request:
119
+ """Request instance."""
120
+ return self._request
121
+
122
+ @abstractmethod
123
+ def __await__(self) -> Generator[Any, None, StreamResponse]:
124
+ """Execute the view handler."""
125
+
126
+
127
+ class ResolveResult(TypedDict):
128
+ """Resolve result.
129
+
130
+ This is the result returned from an AbstractResolver's
131
+ resolve method.
132
+
133
+ :param hostname: The hostname that was provided.
134
+ :param host: The IP address that was resolved.
135
+ :param port: The port that was resolved.
136
+ :param family: The address family that was resolved.
137
+ :param proto: The protocol that was resolved.
138
+ :param flags: The flags that were resolved.
139
+ """
140
+
141
+ hostname: str
142
+ host: str
143
+ port: int
144
+ family: int
145
+ proto: int
146
+ flags: int
147
+
148
+
149
+ class AbstractResolver(ABC):
150
+ """Abstract DNS resolver."""
151
+
152
+ @abstractmethod
153
+ async def resolve(
154
+ self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
155
+ ) -> List[ResolveResult]:
156
+ """Return IP address for given hostname"""
157
+
158
+ @abstractmethod
159
+ async def close(self) -> None:
160
+ """Release resolver"""
161
+
162
+
163
+ if TYPE_CHECKING:
164
+ IterableBase = Iterable[Morsel[str]]
165
+ else:
166
+ IterableBase = Iterable
167
+
168
+
169
+ ClearCookiePredicate = Callable[["Morsel[str]"], bool]
170
+
171
+
172
+ class AbstractCookieJar(Sized, IterableBase):
173
+ """Abstract Cookie Jar."""
174
+
175
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
176
+ self._loop = loop or asyncio.get_running_loop()
177
+
178
+ @abstractmethod
179
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
180
+ """Clear all cookies if no predicate is passed."""
181
+
182
+ @abstractmethod
183
+ def clear_domain(self, domain: str) -> None:
184
+ """Clear all cookies for domain and all subdomains."""
185
+
186
+ @abstractmethod
187
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
188
+ """Update cookies."""
189
+
190
+ @abstractmethod
191
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
192
+ """Return the jar's cookies filtered by their attributes."""
193
+
194
+
195
+ class AbstractStreamWriter(ABC):
196
+ """Abstract stream writer."""
197
+
198
+ buffer_size: int = 0
199
+ output_size: int = 0
200
+ length: Optional[int] = 0
201
+
202
+ @abstractmethod
203
+ async def write(self, chunk: bytes) -> None:
204
+ """Write chunk into stream."""
205
+
206
+ @abstractmethod
207
+ async def write_eof(self, chunk: bytes = b"") -> None:
208
+ """Write last chunk."""
209
+
210
+ @abstractmethod
211
+ async def drain(self) -> None:
212
+ """Flush the write buffer."""
213
+
214
+ @abstractmethod
215
+ def enable_compression(
216
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
217
+ ) -> None:
218
+ """Enable HTTP body compression"""
219
+
220
+ @abstractmethod
221
+ def enable_chunking(self) -> None:
222
+ """Enable HTTP chunked mode"""
223
+
224
+ @abstractmethod
225
+ async def write_headers(
226
+ self, status_line: str, headers: "CIMultiDict[str]"
227
+ ) -> None:
228
+ """Write HTTP headers"""
229
+
230
+
231
+ class AbstractAccessLogger(ABC):
232
+ """Abstract writer to access log."""
233
+
234
+ __slots__ = ("logger", "log_format")
235
+
236
+ def __init__(self, logger: logging.Logger, log_format: str) -> None:
237
+ self.logger = logger
238
+ self.log_format = log_format
239
+
240
+ @abstractmethod
241
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
242
+ """Emit log to logger."""
243
+
244
+ @property
245
+ def enabled(self) -> bool:
246
+ """Check if logger is enabled."""
247
+ return True
deepseek/lib/python3.10/site-packages/aiohttp/base_protocol.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ from typing import Optional, cast
3
+
4
+ from .client_exceptions import ClientConnectionResetError
5
+ from .helpers import set_exception
6
+ from .tcp_helpers import tcp_nodelay
7
+
8
+
9
+ class BaseProtocol(asyncio.Protocol):
10
+ __slots__ = (
11
+ "_loop",
12
+ "_paused",
13
+ "_drain_waiter",
14
+ "_connection_lost",
15
+ "_reading_paused",
16
+ "transport",
17
+ )
18
+
19
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
20
+ self._loop: asyncio.AbstractEventLoop = loop
21
+ self._paused = False
22
+ self._drain_waiter: Optional[asyncio.Future[None]] = None
23
+ self._reading_paused = False
24
+
25
+ self.transport: Optional[asyncio.Transport] = None
26
+
27
+ @property
28
+ def connected(self) -> bool:
29
+ """Return True if the connection is open."""
30
+ return self.transport is not None
31
+
32
+ @property
33
+ def writing_paused(self) -> bool:
34
+ return self._paused
35
+
36
+ def pause_writing(self) -> None:
37
+ assert not self._paused
38
+ self._paused = True
39
+
40
+ def resume_writing(self) -> None:
41
+ assert self._paused
42
+ self._paused = False
43
+
44
+ waiter = self._drain_waiter
45
+ if waiter is not None:
46
+ self._drain_waiter = None
47
+ if not waiter.done():
48
+ waiter.set_result(None)
49
+
50
+ def pause_reading(self) -> None:
51
+ if not self._reading_paused and self.transport is not None:
52
+ try:
53
+ self.transport.pause_reading()
54
+ except (AttributeError, NotImplementedError, RuntimeError):
55
+ pass
56
+ self._reading_paused = True
57
+
58
+ def resume_reading(self) -> None:
59
+ if self._reading_paused and self.transport is not None:
60
+ try:
61
+ self.transport.resume_reading()
62
+ except (AttributeError, NotImplementedError, RuntimeError):
63
+ pass
64
+ self._reading_paused = False
65
+
66
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
67
+ tr = cast(asyncio.Transport, transport)
68
+ tcp_nodelay(tr, True)
69
+ self.transport = tr
70
+
71
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
72
+ # Wake up the writer if currently paused.
73
+ self.transport = None
74
+ if not self._paused:
75
+ return
76
+ waiter = self._drain_waiter
77
+ if waiter is None:
78
+ return
79
+ self._drain_waiter = None
80
+ if waiter.done():
81
+ return
82
+ if exc is None:
83
+ waiter.set_result(None)
84
+ else:
85
+ set_exception(
86
+ waiter,
87
+ ConnectionError("Connection lost"),
88
+ exc,
89
+ )
90
+
91
+ async def _drain_helper(self) -> None:
92
+ if self.transport is None:
93
+ raise ClientConnectionResetError("Connection lost")
94
+ if not self._paused:
95
+ return
96
+ waiter = self._drain_waiter
97
+ if waiter is None:
98
+ waiter = self._loop.create_future()
99
+ self._drain_waiter = waiter
100
+ await asyncio.shield(waiter)
deepseek/lib/python3.10/site-packages/aiohttp/client.py ADDED
@@ -0,0 +1,1574 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP Client for asyncio."""
2
+
3
+ import asyncio
4
+ import base64
5
+ import hashlib
6
+ import json
7
+ import os
8
+ import sys
9
+ import traceback
10
+ import warnings
11
+ from contextlib import suppress
12
+ from types import TracebackType
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Awaitable,
17
+ Callable,
18
+ Coroutine,
19
+ Final,
20
+ FrozenSet,
21
+ Generator,
22
+ Generic,
23
+ Iterable,
24
+ List,
25
+ Mapping,
26
+ Optional,
27
+ Set,
28
+ Tuple,
29
+ Type,
30
+ TypedDict,
31
+ TypeVar,
32
+ Union,
33
+ )
34
+
35
+ import attr
36
+ from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
37
+ from yarl import URL
38
+
39
+ from . import hdrs, http, payload
40
+ from ._websocket.reader import WebSocketDataQueue
41
+ from .abc import AbstractCookieJar
42
+ from .client_exceptions import (
43
+ ClientConnectionError,
44
+ ClientConnectionResetError,
45
+ ClientConnectorCertificateError,
46
+ ClientConnectorDNSError,
47
+ ClientConnectorError,
48
+ ClientConnectorSSLError,
49
+ ClientError,
50
+ ClientHttpProxyError,
51
+ ClientOSError,
52
+ ClientPayloadError,
53
+ ClientProxyConnectionError,
54
+ ClientResponseError,
55
+ ClientSSLError,
56
+ ConnectionTimeoutError,
57
+ ContentTypeError,
58
+ InvalidURL,
59
+ InvalidUrlClientError,
60
+ InvalidUrlRedirectClientError,
61
+ NonHttpUrlClientError,
62
+ NonHttpUrlRedirectClientError,
63
+ RedirectClientError,
64
+ ServerConnectionError,
65
+ ServerDisconnectedError,
66
+ ServerFingerprintMismatch,
67
+ ServerTimeoutError,
68
+ SocketTimeoutError,
69
+ TooManyRedirects,
70
+ WSMessageTypeError,
71
+ WSServerHandshakeError,
72
+ )
73
+ from .client_reqrep import (
74
+ ClientRequest as ClientRequest,
75
+ ClientResponse as ClientResponse,
76
+ Fingerprint as Fingerprint,
77
+ RequestInfo as RequestInfo,
78
+ _merge_ssl_params,
79
+ )
80
+ from .client_ws import (
81
+ DEFAULT_WS_CLIENT_TIMEOUT,
82
+ ClientWebSocketResponse as ClientWebSocketResponse,
83
+ ClientWSTimeout as ClientWSTimeout,
84
+ )
85
+ from .connector import (
86
+ HTTP_AND_EMPTY_SCHEMA_SET,
87
+ BaseConnector as BaseConnector,
88
+ NamedPipeConnector as NamedPipeConnector,
89
+ TCPConnector as TCPConnector,
90
+ UnixConnector as UnixConnector,
91
+ )
92
+ from .cookiejar import CookieJar
93
+ from .helpers import (
94
+ _SENTINEL,
95
+ DEBUG,
96
+ EMPTY_BODY_METHODS,
97
+ BasicAuth,
98
+ TimeoutHandle,
99
+ get_env_proxy_for_url,
100
+ sentinel,
101
+ strip_auth_from_url,
102
+ )
103
+ from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
104
+ from .http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse
105
+ from .tracing import Trace, TraceConfig
106
+ from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL
107
+
108
+ __all__ = (
109
+ # client_exceptions
110
+ "ClientConnectionError",
111
+ "ClientConnectionResetError",
112
+ "ClientConnectorCertificateError",
113
+ "ClientConnectorDNSError",
114
+ "ClientConnectorError",
115
+ "ClientConnectorSSLError",
116
+ "ClientError",
117
+ "ClientHttpProxyError",
118
+ "ClientOSError",
119
+ "ClientPayloadError",
120
+ "ClientProxyConnectionError",
121
+ "ClientResponseError",
122
+ "ClientSSLError",
123
+ "ConnectionTimeoutError",
124
+ "ContentTypeError",
125
+ "InvalidURL",
126
+ "InvalidUrlClientError",
127
+ "RedirectClientError",
128
+ "NonHttpUrlClientError",
129
+ "InvalidUrlRedirectClientError",
130
+ "NonHttpUrlRedirectClientError",
131
+ "ServerConnectionError",
132
+ "ServerDisconnectedError",
133
+ "ServerFingerprintMismatch",
134
+ "ServerTimeoutError",
135
+ "SocketTimeoutError",
136
+ "TooManyRedirects",
137
+ "WSServerHandshakeError",
138
+ # client_reqrep
139
+ "ClientRequest",
140
+ "ClientResponse",
141
+ "Fingerprint",
142
+ "RequestInfo",
143
+ # connector
144
+ "BaseConnector",
145
+ "TCPConnector",
146
+ "UnixConnector",
147
+ "NamedPipeConnector",
148
+ # client_ws
149
+ "ClientWebSocketResponse",
150
+ # client
151
+ "ClientSession",
152
+ "ClientTimeout",
153
+ "ClientWSTimeout",
154
+ "request",
155
+ "WSMessageTypeError",
156
+ )
157
+
158
+
159
+ if TYPE_CHECKING:
160
+ from ssl import SSLContext
161
+ else:
162
+ SSLContext = None
163
+
164
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
165
+ from typing import Unpack
166
+
167
+
168
+ class _RequestOptions(TypedDict, total=False):
169
+ params: Query
170
+ data: Any
171
+ json: Any
172
+ cookies: Union[LooseCookies, None]
173
+ headers: Union[LooseHeaders, None]
174
+ skip_auto_headers: Union[Iterable[str], None]
175
+ auth: Union[BasicAuth, None]
176
+ allow_redirects: bool
177
+ max_redirects: int
178
+ compress: Union[str, bool, None]
179
+ chunked: Union[bool, None]
180
+ expect100: bool
181
+ raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]]
182
+ read_until_eof: bool
183
+ proxy: Union[StrOrURL, None]
184
+ proxy_auth: Union[BasicAuth, None]
185
+ timeout: "Union[ClientTimeout, _SENTINEL, None]"
186
+ ssl: Union[SSLContext, bool, Fingerprint]
187
+ server_hostname: Union[str, None]
188
+ proxy_headers: Union[LooseHeaders, None]
189
+ trace_request_ctx: Union[Mapping[str, Any], None]
190
+ read_bufsize: Union[int, None]
191
+ auto_decompress: Union[bool, None]
192
+ max_line_size: Union[int, None]
193
+ max_field_size: Union[int, None]
194
+
195
+
196
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
197
+ class ClientTimeout:
198
+ total: Optional[float] = None
199
+ connect: Optional[float] = None
200
+ sock_read: Optional[float] = None
201
+ sock_connect: Optional[float] = None
202
+ ceil_threshold: float = 5
203
+
204
+ # pool_queue_timeout: Optional[float] = None
205
+ # dns_resolution_timeout: Optional[float] = None
206
+ # socket_connect_timeout: Optional[float] = None
207
+ # connection_acquiring_timeout: Optional[float] = None
208
+ # new_connection_timeout: Optional[float] = None
209
+ # http_header_timeout: Optional[float] = None
210
+ # response_body_timeout: Optional[float] = None
211
+
212
+ # to create a timeout specific for a single request, either
213
+ # - create a completely new one to overwrite the default
214
+ # - or use http://www.attrs.org/en/stable/api.html#attr.evolve
215
+ # to overwrite the defaults
216
+
217
+
218
+ # 5 Minute default read timeout
219
+ DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30)
220
+
221
+ # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2
222
+ IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"})
223
+
224
+ _RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse)
225
+ _CharsetResolver = Callable[[ClientResponse, bytes], str]
226
+
227
+
228
+ class ClientSession:
229
+ """First-class interface for making HTTP requests."""
230
+
231
+ ATTRS = frozenset(
232
+ [
233
+ "_base_url",
234
+ "_base_url_origin",
235
+ "_source_traceback",
236
+ "_connector",
237
+ "_loop",
238
+ "_cookie_jar",
239
+ "_connector_owner",
240
+ "_default_auth",
241
+ "_version",
242
+ "_json_serialize",
243
+ "_requote_redirect_url",
244
+ "_timeout",
245
+ "_raise_for_status",
246
+ "_auto_decompress",
247
+ "_trust_env",
248
+ "_default_headers",
249
+ "_skip_auto_headers",
250
+ "_request_class",
251
+ "_response_class",
252
+ "_ws_response_class",
253
+ "_trace_configs",
254
+ "_read_bufsize",
255
+ "_max_line_size",
256
+ "_max_field_size",
257
+ "_resolve_charset",
258
+ "_default_proxy",
259
+ "_default_proxy_auth",
260
+ "_retry_connection",
261
+ "requote_redirect_url",
262
+ ]
263
+ )
264
+
265
+ _source_traceback: Optional[traceback.StackSummary] = None
266
+ _connector: Optional[BaseConnector] = None
267
+
268
+ def __init__(
269
+ self,
270
+ base_url: Optional[StrOrURL] = None,
271
+ *,
272
+ connector: Optional[BaseConnector] = None,
273
+ loop: Optional[asyncio.AbstractEventLoop] = None,
274
+ cookies: Optional[LooseCookies] = None,
275
+ headers: Optional[LooseHeaders] = None,
276
+ proxy: Optional[StrOrURL] = None,
277
+ proxy_auth: Optional[BasicAuth] = None,
278
+ skip_auto_headers: Optional[Iterable[str]] = None,
279
+ auth: Optional[BasicAuth] = None,
280
+ json_serialize: JSONEncoder = json.dumps,
281
+ request_class: Type[ClientRequest] = ClientRequest,
282
+ response_class: Type[ClientResponse] = ClientResponse,
283
+ ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
284
+ version: HttpVersion = http.HttpVersion11,
285
+ cookie_jar: Optional[AbstractCookieJar] = None,
286
+ connector_owner: bool = True,
287
+ raise_for_status: Union[
288
+ bool, Callable[[ClientResponse], Awaitable[None]]
289
+ ] = False,
290
+ read_timeout: Union[float, _SENTINEL] = sentinel,
291
+ conn_timeout: Optional[float] = None,
292
+ timeout: Union[object, ClientTimeout] = sentinel,
293
+ auto_decompress: bool = True,
294
+ trust_env: bool = False,
295
+ requote_redirect_url: bool = True,
296
+ trace_configs: Optional[List[TraceConfig]] = None,
297
+ read_bufsize: int = 2**16,
298
+ max_line_size: int = 8190,
299
+ max_field_size: int = 8190,
300
+ fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
301
+ ) -> None:
302
+ # We initialise _connector to None immediately, as it's referenced in __del__()
303
+ # and could cause issues if an exception occurs during initialisation.
304
+ self._connector: Optional[BaseConnector] = None
305
+
306
+ if loop is None:
307
+ if connector is not None:
308
+ loop = connector._loop
309
+
310
+ loop = loop or asyncio.get_running_loop()
311
+
312
+ if base_url is None or isinstance(base_url, URL):
313
+ self._base_url: Optional[URL] = base_url
314
+ self._base_url_origin = None if base_url is None else base_url.origin()
315
+ else:
316
+ self._base_url = URL(base_url)
317
+ self._base_url_origin = self._base_url.origin()
318
+ assert self._base_url.absolute, "Only absolute URLs are supported"
319
+ if self._base_url is not None and not self._base_url.path.endswith("/"):
320
+ raise ValueError("base_url must have a trailing '/'")
321
+
322
+ if timeout is sentinel or timeout is None:
323
+ self._timeout = DEFAULT_TIMEOUT
324
+ if read_timeout is not sentinel:
325
+ warnings.warn(
326
+ "read_timeout is deprecated, use timeout argument instead",
327
+ DeprecationWarning,
328
+ stacklevel=2,
329
+ )
330
+ self._timeout = attr.evolve(self._timeout, total=read_timeout)
331
+ if conn_timeout is not None:
332
+ self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
333
+ warnings.warn(
334
+ "conn_timeout is deprecated, use timeout argument instead",
335
+ DeprecationWarning,
336
+ stacklevel=2,
337
+ )
338
+ else:
339
+ if not isinstance(timeout, ClientTimeout):
340
+ raise ValueError(
341
+ f"timeout parameter cannot be of {type(timeout)} type, "
342
+ "please use 'timeout=ClientTimeout(...)'",
343
+ )
344
+ self._timeout = timeout
345
+ if read_timeout is not sentinel:
346
+ raise ValueError(
347
+ "read_timeout and timeout parameters "
348
+ "conflict, please setup "
349
+ "timeout.read"
350
+ )
351
+ if conn_timeout is not None:
352
+ raise ValueError(
353
+ "conn_timeout and timeout parameters "
354
+ "conflict, please setup "
355
+ "timeout.connect"
356
+ )
357
+
358
+ if connector is None:
359
+ connector = TCPConnector(loop=loop)
360
+
361
+ if connector._loop is not loop:
362
+ raise RuntimeError("Session and connector has to use same event loop")
363
+
364
+ self._loop = loop
365
+
366
+ if loop.get_debug():
367
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
368
+
369
+ if cookie_jar is None:
370
+ cookie_jar = CookieJar(loop=loop)
371
+ self._cookie_jar = cookie_jar
372
+
373
+ if cookies:
374
+ self._cookie_jar.update_cookies(cookies)
375
+
376
+ self._connector = connector
377
+ self._connector_owner = connector_owner
378
+ self._default_auth = auth
379
+ self._version = version
380
+ self._json_serialize = json_serialize
381
+ self._raise_for_status = raise_for_status
382
+ self._auto_decompress = auto_decompress
383
+ self._trust_env = trust_env
384
+ self._requote_redirect_url = requote_redirect_url
385
+ self._read_bufsize = read_bufsize
386
+ self._max_line_size = max_line_size
387
+ self._max_field_size = max_field_size
388
+
389
+ # Convert to list of tuples
390
+ if headers:
391
+ real_headers: CIMultiDict[str] = CIMultiDict(headers)
392
+ else:
393
+ real_headers = CIMultiDict()
394
+ self._default_headers: CIMultiDict[str] = real_headers
395
+ if skip_auto_headers is not None:
396
+ self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
397
+ else:
398
+ self._skip_auto_headers = frozenset()
399
+
400
+ self._request_class = request_class
401
+ self._response_class = response_class
402
+ self._ws_response_class = ws_response_class
403
+
404
+ self._trace_configs = trace_configs or []
405
+ for trace_config in self._trace_configs:
406
+ trace_config.freeze()
407
+
408
+ self._resolve_charset = fallback_charset_resolver
409
+
410
+ self._default_proxy = proxy
411
+ self._default_proxy_auth = proxy_auth
412
+ self._retry_connection: bool = True
413
+
414
+ def __init_subclass__(cls: Type["ClientSession"]) -> None:
415
+ warnings.warn(
416
+ "Inheritance class {} from ClientSession "
417
+ "is discouraged".format(cls.__name__),
418
+ DeprecationWarning,
419
+ stacklevel=2,
420
+ )
421
+
422
+ if DEBUG:
423
+
424
+ def __setattr__(self, name: str, val: Any) -> None:
425
+ if name not in self.ATTRS:
426
+ warnings.warn(
427
+ "Setting custom ClientSession.{} attribute "
428
+ "is discouraged".format(name),
429
+ DeprecationWarning,
430
+ stacklevel=2,
431
+ )
432
+ super().__setattr__(name, val)
433
+
434
+ def __del__(self, _warnings: Any = warnings) -> None:
435
+ if not self.closed:
436
+ kwargs = {"source": self}
437
+ _warnings.warn(
438
+ f"Unclosed client session {self!r}", ResourceWarning, **kwargs
439
+ )
440
+ context = {"client_session": self, "message": "Unclosed client session"}
441
+ if self._source_traceback is not None:
442
+ context["source_traceback"] = self._source_traceback
443
+ self._loop.call_exception_handler(context)
444
+
445
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
446
+
447
+ def request(
448
+ self,
449
+ method: str,
450
+ url: StrOrURL,
451
+ **kwargs: Unpack[_RequestOptions],
452
+ ) -> "_RequestContextManager": ...
453
+
454
+ else:
455
+
456
+ def request(
457
+ self, method: str, url: StrOrURL, **kwargs: Any
458
+ ) -> "_RequestContextManager":
459
+ """Perform HTTP request."""
460
+ return _RequestContextManager(self._request(method, url, **kwargs))
461
+
462
+ def _build_url(self, str_or_url: StrOrURL) -> URL:
463
+ url = URL(str_or_url)
464
+ if self._base_url is None:
465
+ return url
466
+ else:
467
+ assert not url.absolute
468
+ return self._base_url.join(url)
469
+
470
+ async def _request(
471
+ self,
472
+ method: str,
473
+ str_or_url: StrOrURL,
474
+ *,
475
+ params: Query = None,
476
+ data: Any = None,
477
+ json: Any = None,
478
+ cookies: Optional[LooseCookies] = None,
479
+ headers: Optional[LooseHeaders] = None,
480
+ skip_auto_headers: Optional[Iterable[str]] = None,
481
+ auth: Optional[BasicAuth] = None,
482
+ allow_redirects: bool = True,
483
+ max_redirects: int = 10,
484
+ compress: Union[str, bool, None] = None,
485
+ chunked: Optional[bool] = None,
486
+ expect100: bool = False,
487
+ raise_for_status: Union[
488
+ None, bool, Callable[[ClientResponse], Awaitable[None]]
489
+ ] = None,
490
+ read_until_eof: bool = True,
491
+ proxy: Optional[StrOrURL] = None,
492
+ proxy_auth: Optional[BasicAuth] = None,
493
+ timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
494
+ verify_ssl: Optional[bool] = None,
495
+ fingerprint: Optional[bytes] = None,
496
+ ssl_context: Optional[SSLContext] = None,
497
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
498
+ server_hostname: Optional[str] = None,
499
+ proxy_headers: Optional[LooseHeaders] = None,
500
+ trace_request_ctx: Optional[Mapping[str, Any]] = None,
501
+ read_bufsize: Optional[int] = None,
502
+ auto_decompress: Optional[bool] = None,
503
+ max_line_size: Optional[int] = None,
504
+ max_field_size: Optional[int] = None,
505
+ ) -> ClientResponse:
506
+
507
+ # NOTE: timeout clamps existing connect and read timeouts. We cannot
508
+ # set the default to None because we need to detect if the user wants
509
+ # to use the existing timeouts by setting timeout to None.
510
+
511
+ if self.closed:
512
+ raise RuntimeError("Session is closed")
513
+
514
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
515
+
516
+ if data is not None and json is not None:
517
+ raise ValueError(
518
+ "data and json parameters can not be used at the same time"
519
+ )
520
+ elif json is not None:
521
+ data = payload.JsonPayload(json, dumps=self._json_serialize)
522
+
523
+ if not isinstance(chunked, bool) and chunked is not None:
524
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
525
+
526
+ redirects = 0
527
+ history: List[ClientResponse] = []
528
+ version = self._version
529
+ params = params or {}
530
+
531
+ # Merge with default headers and transform to CIMultiDict
532
+ headers = self._prepare_headers(headers)
533
+
534
+ try:
535
+ url = self._build_url(str_or_url)
536
+ except ValueError as e:
537
+ raise InvalidUrlClientError(str_or_url) from e
538
+
539
+ assert self._connector is not None
540
+ if url.scheme not in self._connector.allowed_protocol_schema_set:
541
+ raise NonHttpUrlClientError(url)
542
+
543
+ skip_headers: Optional[Iterable[istr]]
544
+ if skip_auto_headers is not None:
545
+ skip_headers = {
546
+ istr(i) for i in skip_auto_headers
547
+ } | self._skip_auto_headers
548
+ elif self._skip_auto_headers:
549
+ skip_headers = self._skip_auto_headers
550
+ else:
551
+ skip_headers = None
552
+
553
+ if proxy is None:
554
+ proxy = self._default_proxy
555
+ if proxy_auth is None:
556
+ proxy_auth = self._default_proxy_auth
557
+
558
+ if proxy is None:
559
+ proxy_headers = None
560
+ else:
561
+ proxy_headers = self._prepare_headers(proxy_headers)
562
+ try:
563
+ proxy = URL(proxy)
564
+ except ValueError as e:
565
+ raise InvalidURL(proxy) from e
566
+
567
+ if timeout is sentinel:
568
+ real_timeout: ClientTimeout = self._timeout
569
+ else:
570
+ if not isinstance(timeout, ClientTimeout):
571
+ real_timeout = ClientTimeout(total=timeout)
572
+ else:
573
+ real_timeout = timeout
574
+ # timeout is cumulative for all request operations
575
+ # (request, redirects, responses, data consuming)
576
+ tm = TimeoutHandle(
577
+ self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
578
+ )
579
+ handle = tm.start()
580
+
581
+ if read_bufsize is None:
582
+ read_bufsize = self._read_bufsize
583
+
584
+ if auto_decompress is None:
585
+ auto_decompress = self._auto_decompress
586
+
587
+ if max_line_size is None:
588
+ max_line_size = self._max_line_size
589
+
590
+ if max_field_size is None:
591
+ max_field_size = self._max_field_size
592
+
593
+ traces = [
594
+ Trace(
595
+ self,
596
+ trace_config,
597
+ trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
598
+ )
599
+ for trace_config in self._trace_configs
600
+ ]
601
+
602
+ for trace in traces:
603
+ await trace.send_request_start(method, url.update_query(params), headers)
604
+
605
+ timer = tm.timer()
606
+ try:
607
+ with timer:
608
+ # https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests
609
+ retry_persistent_connection = (
610
+ self._retry_connection and method in IDEMPOTENT_METHODS
611
+ )
612
+ while True:
613
+ url, auth_from_url = strip_auth_from_url(url)
614
+ if not url.raw_host:
615
+ # NOTE: Bail early, otherwise, causes `InvalidURL` through
616
+ # NOTE: `self._request_class()` below.
617
+ err_exc_cls = (
618
+ InvalidUrlRedirectClientError
619
+ if redirects
620
+ else InvalidUrlClientError
621
+ )
622
+ raise err_exc_cls(url)
623
+ # If `auth` was passed for an already authenticated URL,
624
+ # disallow only if this is the initial URL; this is to avoid issues
625
+ # with sketchy redirects that are not the caller's responsibility
626
+ if not history and (auth and auth_from_url):
627
+ raise ValueError(
628
+ "Cannot combine AUTH argument with "
629
+ "credentials encoded in URL"
630
+ )
631
+
632
+ # Override the auth with the one from the URL only if we
633
+ # have no auth, or if we got an auth from a redirect URL
634
+ if auth is None or (history and auth_from_url is not None):
635
+ auth = auth_from_url
636
+
637
+ if (
638
+ auth is None
639
+ and self._default_auth
640
+ and (
641
+ not self._base_url or self._base_url_origin == url.origin()
642
+ )
643
+ ):
644
+ auth = self._default_auth
645
+ # It would be confusing if we support explicit
646
+ # Authorization header with auth argument
647
+ if (
648
+ headers is not None
649
+ and auth is not None
650
+ and hdrs.AUTHORIZATION in headers
651
+ ):
652
+ raise ValueError(
653
+ "Cannot combine AUTHORIZATION header "
654
+ "with AUTH argument or credentials "
655
+ "encoded in URL"
656
+ )
657
+
658
+ all_cookies = self._cookie_jar.filter_cookies(url)
659
+
660
+ if cookies is not None:
661
+ tmp_cookie_jar = CookieJar()
662
+ tmp_cookie_jar.update_cookies(cookies)
663
+ req_cookies = tmp_cookie_jar.filter_cookies(url)
664
+ if req_cookies:
665
+ all_cookies.load(req_cookies)
666
+
667
+ if proxy is not None:
668
+ proxy = URL(proxy)
669
+ elif self._trust_env:
670
+ with suppress(LookupError):
671
+ proxy, proxy_auth = get_env_proxy_for_url(url)
672
+
673
+ req = self._request_class(
674
+ method,
675
+ url,
676
+ params=params,
677
+ headers=headers,
678
+ skip_auto_headers=skip_headers,
679
+ data=data,
680
+ cookies=all_cookies,
681
+ auth=auth,
682
+ version=version,
683
+ compress=compress,
684
+ chunked=chunked,
685
+ expect100=expect100,
686
+ loop=self._loop,
687
+ response_class=self._response_class,
688
+ proxy=proxy,
689
+ proxy_auth=proxy_auth,
690
+ timer=timer,
691
+ session=self,
692
+ ssl=ssl if ssl is not None else True,
693
+ server_hostname=server_hostname,
694
+ proxy_headers=proxy_headers,
695
+ traces=traces,
696
+ trust_env=self.trust_env,
697
+ )
698
+
699
+ # connection timeout
700
+ try:
701
+ conn = await self._connector.connect(
702
+ req, traces=traces, timeout=real_timeout
703
+ )
704
+ except asyncio.TimeoutError as exc:
705
+ raise ConnectionTimeoutError(
706
+ f"Connection timeout to host {url}"
707
+ ) from exc
708
+
709
+ assert conn.transport is not None
710
+
711
+ assert conn.protocol is not None
712
+ conn.protocol.set_response_params(
713
+ timer=timer,
714
+ skip_payload=method in EMPTY_BODY_METHODS,
715
+ read_until_eof=read_until_eof,
716
+ auto_decompress=auto_decompress,
717
+ read_timeout=real_timeout.sock_read,
718
+ read_bufsize=read_bufsize,
719
+ timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
720
+ max_line_size=max_line_size,
721
+ max_field_size=max_field_size,
722
+ )
723
+
724
+ try:
725
+ try:
726
+ resp = await req.send(conn)
727
+ try:
728
+ await resp.start(conn)
729
+ except BaseException:
730
+ resp.close()
731
+ raise
732
+ except BaseException:
733
+ conn.close()
734
+ raise
735
+ except (ClientOSError, ServerDisconnectedError):
736
+ if retry_persistent_connection:
737
+ retry_persistent_connection = False
738
+ continue
739
+ raise
740
+ except ClientError:
741
+ raise
742
+ except OSError as exc:
743
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
744
+ raise
745
+ raise ClientOSError(*exc.args) from exc
746
+
747
+ if cookies := resp._cookies:
748
+ self._cookie_jar.update_cookies(cookies, resp.url)
749
+
750
+ # redirects
751
+ if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
752
+
753
+ for trace in traces:
754
+ await trace.send_request_redirect(
755
+ method, url.update_query(params), headers, resp
756
+ )
757
+
758
+ redirects += 1
759
+ history.append(resp)
760
+ if max_redirects and redirects >= max_redirects:
761
+ resp.close()
762
+ raise TooManyRedirects(
763
+ history[0].request_info, tuple(history)
764
+ )
765
+
766
+ # For 301 and 302, mimic IE, now changed in RFC
767
+ # https://github.com/kennethreitz/requests/pull/269
768
+ if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
769
+ resp.status in (301, 302) and resp.method == hdrs.METH_POST
770
+ ):
771
+ method = hdrs.METH_GET
772
+ data = None
773
+ if headers.get(hdrs.CONTENT_LENGTH):
774
+ headers.pop(hdrs.CONTENT_LENGTH)
775
+
776
+ r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
777
+ hdrs.URI
778
+ )
779
+ if r_url is None:
780
+ # see github.com/aio-libs/aiohttp/issues/2022
781
+ break
782
+ else:
783
+ # reading from correct redirection
784
+ # response is forbidden
785
+ resp.release()
786
+
787
+ try:
788
+ parsed_redirect_url = URL(
789
+ r_url, encoded=not self._requote_redirect_url
790
+ )
791
+ except ValueError as e:
792
+ raise InvalidUrlRedirectClientError(
793
+ r_url,
794
+ "Server attempted redirecting to a location that does not look like a URL",
795
+ ) from e
796
+
797
+ scheme = parsed_redirect_url.scheme
798
+ if scheme not in HTTP_AND_EMPTY_SCHEMA_SET:
799
+ resp.close()
800
+ raise NonHttpUrlRedirectClientError(r_url)
801
+ elif not scheme:
802
+ parsed_redirect_url = url.join(parsed_redirect_url)
803
+
804
+ try:
805
+ redirect_origin = parsed_redirect_url.origin()
806
+ except ValueError as origin_val_err:
807
+ raise InvalidUrlRedirectClientError(
808
+ parsed_redirect_url,
809
+ "Invalid redirect URL origin",
810
+ ) from origin_val_err
811
+
812
+ if url.origin() != redirect_origin:
813
+ auth = None
814
+ headers.pop(hdrs.AUTHORIZATION, None)
815
+
816
+ url = parsed_redirect_url
817
+ params = {}
818
+ resp.release()
819
+ continue
820
+
821
+ break
822
+
823
+ # check response status
824
+ if raise_for_status is None:
825
+ raise_for_status = self._raise_for_status
826
+
827
+ if raise_for_status is None:
828
+ pass
829
+ elif callable(raise_for_status):
830
+ await raise_for_status(resp)
831
+ elif raise_for_status:
832
+ resp.raise_for_status()
833
+
834
+ # register connection
835
+ if handle is not None:
836
+ if resp.connection is not None:
837
+ resp.connection.add_callback(handle.cancel)
838
+ else:
839
+ handle.cancel()
840
+
841
+ resp._history = tuple(history)
842
+
843
+ for trace in traces:
844
+ await trace.send_request_end(
845
+ method, url.update_query(params), headers, resp
846
+ )
847
+ return resp
848
+
849
+ except BaseException as e:
850
+ # cleanup timer
851
+ tm.close()
852
+ if handle:
853
+ handle.cancel()
854
+ handle = None
855
+
856
+ for trace in traces:
857
+ await trace.send_request_exception(
858
+ method, url.update_query(params), headers, e
859
+ )
860
+ raise
861
+
862
+ def ws_connect(
863
+ self,
864
+ url: StrOrURL,
865
+ *,
866
+ method: str = hdrs.METH_GET,
867
+ protocols: Iterable[str] = (),
868
+ timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
869
+ receive_timeout: Optional[float] = None,
870
+ autoclose: bool = True,
871
+ autoping: bool = True,
872
+ heartbeat: Optional[float] = None,
873
+ auth: Optional[BasicAuth] = None,
874
+ origin: Optional[str] = None,
875
+ params: Query = None,
876
+ headers: Optional[LooseHeaders] = None,
877
+ proxy: Optional[StrOrURL] = None,
878
+ proxy_auth: Optional[BasicAuth] = None,
879
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
880
+ verify_ssl: Optional[bool] = None,
881
+ fingerprint: Optional[bytes] = None,
882
+ ssl_context: Optional[SSLContext] = None,
883
+ server_hostname: Optional[str] = None,
884
+ proxy_headers: Optional[LooseHeaders] = None,
885
+ compress: int = 0,
886
+ max_msg_size: int = 4 * 1024 * 1024,
887
+ ) -> "_WSRequestContextManager":
888
+ """Initiate websocket connection."""
889
+ return _WSRequestContextManager(
890
+ self._ws_connect(
891
+ url,
892
+ method=method,
893
+ protocols=protocols,
894
+ timeout=timeout,
895
+ receive_timeout=receive_timeout,
896
+ autoclose=autoclose,
897
+ autoping=autoping,
898
+ heartbeat=heartbeat,
899
+ auth=auth,
900
+ origin=origin,
901
+ params=params,
902
+ headers=headers,
903
+ proxy=proxy,
904
+ proxy_auth=proxy_auth,
905
+ ssl=ssl,
906
+ verify_ssl=verify_ssl,
907
+ fingerprint=fingerprint,
908
+ ssl_context=ssl_context,
909
+ server_hostname=server_hostname,
910
+ proxy_headers=proxy_headers,
911
+ compress=compress,
912
+ max_msg_size=max_msg_size,
913
+ )
914
+ )
915
+
916
+ async def _ws_connect(
917
+ self,
918
+ url: StrOrURL,
919
+ *,
920
+ method: str = hdrs.METH_GET,
921
+ protocols: Iterable[str] = (),
922
+ timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
923
+ receive_timeout: Optional[float] = None,
924
+ autoclose: bool = True,
925
+ autoping: bool = True,
926
+ heartbeat: Optional[float] = None,
927
+ auth: Optional[BasicAuth] = None,
928
+ origin: Optional[str] = None,
929
+ params: Query = None,
930
+ headers: Optional[LooseHeaders] = None,
931
+ proxy: Optional[StrOrURL] = None,
932
+ proxy_auth: Optional[BasicAuth] = None,
933
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
934
+ verify_ssl: Optional[bool] = None,
935
+ fingerprint: Optional[bytes] = None,
936
+ ssl_context: Optional[SSLContext] = None,
937
+ server_hostname: Optional[str] = None,
938
+ proxy_headers: Optional[LooseHeaders] = None,
939
+ compress: int = 0,
940
+ max_msg_size: int = 4 * 1024 * 1024,
941
+ ) -> ClientWebSocketResponse:
942
+ if timeout is not sentinel:
943
+ if isinstance(timeout, ClientWSTimeout):
944
+ ws_timeout = timeout
945
+ else:
946
+ warnings.warn(
947
+ "parameter 'timeout' of type 'float' "
948
+ "is deprecated, please use "
949
+ "'timeout=ClientWSTimeout(ws_close=...)'",
950
+ DeprecationWarning,
951
+ stacklevel=2,
952
+ )
953
+ ws_timeout = ClientWSTimeout(ws_close=timeout)
954
+ else:
955
+ ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT
956
+ if receive_timeout is not None:
957
+ warnings.warn(
958
+ "float parameter 'receive_timeout' "
959
+ "is deprecated, please use parameter "
960
+ "'timeout=ClientWSTimeout(ws_receive=...)'",
961
+ DeprecationWarning,
962
+ stacklevel=2,
963
+ )
964
+ ws_timeout = attr.evolve(ws_timeout, ws_receive=receive_timeout)
965
+
966
+ if headers is None:
967
+ real_headers: CIMultiDict[str] = CIMultiDict()
968
+ else:
969
+ real_headers = CIMultiDict(headers)
970
+
971
+ default_headers = {
972
+ hdrs.UPGRADE: "websocket",
973
+ hdrs.CONNECTION: "Upgrade",
974
+ hdrs.SEC_WEBSOCKET_VERSION: "13",
975
+ }
976
+
977
+ for key, value in default_headers.items():
978
+ real_headers.setdefault(key, value)
979
+
980
+ sec_key = base64.b64encode(os.urandom(16))
981
+ real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
982
+
983
+ if protocols:
984
+ real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
985
+ if origin is not None:
986
+ real_headers[hdrs.ORIGIN] = origin
987
+ if compress:
988
+ extstr = ws_ext_gen(compress=compress)
989
+ real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
990
+
991
+ # For the sake of backward compatibility, if user passes in None, convert it to True
992
+ if ssl is None:
993
+ warnings.warn(
994
+ "ssl=None is deprecated, please use ssl=True",
995
+ DeprecationWarning,
996
+ stacklevel=2,
997
+ )
998
+ ssl = True
999
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
1000
+
1001
+ # send request
1002
+ resp = await self.request(
1003
+ method,
1004
+ url,
1005
+ params=params,
1006
+ headers=real_headers,
1007
+ read_until_eof=False,
1008
+ auth=auth,
1009
+ proxy=proxy,
1010
+ proxy_auth=proxy_auth,
1011
+ ssl=ssl,
1012
+ server_hostname=server_hostname,
1013
+ proxy_headers=proxy_headers,
1014
+ )
1015
+
1016
+ try:
1017
+ # check handshake
1018
+ if resp.status != 101:
1019
+ raise WSServerHandshakeError(
1020
+ resp.request_info,
1021
+ resp.history,
1022
+ message="Invalid response status",
1023
+ status=resp.status,
1024
+ headers=resp.headers,
1025
+ )
1026
+
1027
+ if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
1028
+ raise WSServerHandshakeError(
1029
+ resp.request_info,
1030
+ resp.history,
1031
+ message="Invalid upgrade header",
1032
+ status=resp.status,
1033
+ headers=resp.headers,
1034
+ )
1035
+
1036
+ if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
1037
+ raise WSServerHandshakeError(
1038
+ resp.request_info,
1039
+ resp.history,
1040
+ message="Invalid connection header",
1041
+ status=resp.status,
1042
+ headers=resp.headers,
1043
+ )
1044
+
1045
+ # key calculation
1046
+ r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
1047
+ match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
1048
+ if r_key != match:
1049
+ raise WSServerHandshakeError(
1050
+ resp.request_info,
1051
+ resp.history,
1052
+ message="Invalid challenge response",
1053
+ status=resp.status,
1054
+ headers=resp.headers,
1055
+ )
1056
+
1057
+ # websocket protocol
1058
+ protocol = None
1059
+ if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
1060
+ resp_protocols = [
1061
+ proto.strip()
1062
+ for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
1063
+ ]
1064
+
1065
+ for proto in resp_protocols:
1066
+ if proto in protocols:
1067
+ protocol = proto
1068
+ break
1069
+
1070
+ # websocket compress
1071
+ notakeover = False
1072
+ if compress:
1073
+ compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
1074
+ if compress_hdrs:
1075
+ try:
1076
+ compress, notakeover = ws_ext_parse(compress_hdrs)
1077
+ except WSHandshakeError as exc:
1078
+ raise WSServerHandshakeError(
1079
+ resp.request_info,
1080
+ resp.history,
1081
+ message=exc.args[0],
1082
+ status=resp.status,
1083
+ headers=resp.headers,
1084
+ ) from exc
1085
+ else:
1086
+ compress = 0
1087
+ notakeover = False
1088
+
1089
+ conn = resp.connection
1090
+ assert conn is not None
1091
+ conn_proto = conn.protocol
1092
+ assert conn_proto is not None
1093
+
1094
+ # For WS connection the read_timeout must be either receive_timeout or greater
1095
+ # None == no timeout, i.e. infinite timeout, so None is the max timeout possible
1096
+ if ws_timeout.ws_receive is None:
1097
+ # Reset regardless
1098
+ conn_proto.read_timeout = None
1099
+ elif conn_proto.read_timeout is not None:
1100
+ conn_proto.read_timeout = max(
1101
+ ws_timeout.ws_receive, conn_proto.read_timeout
1102
+ )
1103
+
1104
+ transport = conn.transport
1105
+ assert transport is not None
1106
+ reader = WebSocketDataQueue(conn_proto, 2**16, loop=self._loop)
1107
+ conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
1108
+ writer = WebSocketWriter(
1109
+ conn_proto,
1110
+ transport,
1111
+ use_mask=True,
1112
+ compress=compress,
1113
+ notakeover=notakeover,
1114
+ )
1115
+ except BaseException:
1116
+ resp.close()
1117
+ raise
1118
+ else:
1119
+ return self._ws_response_class(
1120
+ reader,
1121
+ writer,
1122
+ protocol,
1123
+ resp,
1124
+ ws_timeout,
1125
+ autoclose,
1126
+ autoping,
1127
+ self._loop,
1128
+ heartbeat=heartbeat,
1129
+ compress=compress,
1130
+ client_notakeover=notakeover,
1131
+ )
1132
+
1133
+ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
1134
+ """Add default headers and transform it to CIMultiDict"""
1135
+ # Convert headers to MultiDict
1136
+ result = CIMultiDict(self._default_headers)
1137
+ if headers:
1138
+ if not isinstance(headers, (MultiDictProxy, MultiDict)):
1139
+ headers = CIMultiDict(headers)
1140
+ added_names: Set[str] = set()
1141
+ for key, value in headers.items():
1142
+ if key in added_names:
1143
+ result.add(key, value)
1144
+ else:
1145
+ result[key] = value
1146
+ added_names.add(key)
1147
+ return result
1148
+
1149
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
1150
+
1151
+ def get(
1152
+ self,
1153
+ url: StrOrURL,
1154
+ **kwargs: Unpack[_RequestOptions],
1155
+ ) -> "_RequestContextManager": ...
1156
+
1157
+ def options(
1158
+ self,
1159
+ url: StrOrURL,
1160
+ **kwargs: Unpack[_RequestOptions],
1161
+ ) -> "_RequestContextManager": ...
1162
+
1163
+ def head(
1164
+ self,
1165
+ url: StrOrURL,
1166
+ **kwargs: Unpack[_RequestOptions],
1167
+ ) -> "_RequestContextManager": ...
1168
+
1169
+ def post(
1170
+ self,
1171
+ url: StrOrURL,
1172
+ **kwargs: Unpack[_RequestOptions],
1173
+ ) -> "_RequestContextManager": ...
1174
+
1175
+ def put(
1176
+ self,
1177
+ url: StrOrURL,
1178
+ **kwargs: Unpack[_RequestOptions],
1179
+ ) -> "_RequestContextManager": ...
1180
+
1181
+ def patch(
1182
+ self,
1183
+ url: StrOrURL,
1184
+ **kwargs: Unpack[_RequestOptions],
1185
+ ) -> "_RequestContextManager": ...
1186
+
1187
+ def delete(
1188
+ self,
1189
+ url: StrOrURL,
1190
+ **kwargs: Unpack[_RequestOptions],
1191
+ ) -> "_RequestContextManager": ...
1192
+
1193
+ else:
1194
+
1195
+ def get(
1196
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
1197
+ ) -> "_RequestContextManager":
1198
+ """Perform HTTP GET request."""
1199
+ return _RequestContextManager(
1200
+ self._request(
1201
+ hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs
1202
+ )
1203
+ )
1204
+
1205
+ def options(
1206
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
1207
+ ) -> "_RequestContextManager":
1208
+ """Perform HTTP OPTIONS request."""
1209
+ return _RequestContextManager(
1210
+ self._request(
1211
+ hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
1212
+ )
1213
+ )
1214
+
1215
+ def head(
1216
+ self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
1217
+ ) -> "_RequestContextManager":
1218
+ """Perform HTTP HEAD request."""
1219
+ return _RequestContextManager(
1220
+ self._request(
1221
+ hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
1222
+ )
1223
+ )
1224
+
1225
+ def post(
1226
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1227
+ ) -> "_RequestContextManager":
1228
+ """Perform HTTP POST request."""
1229
+ return _RequestContextManager(
1230
+ self._request(hdrs.METH_POST, url, data=data, **kwargs)
1231
+ )
1232
+
1233
+ def put(
1234
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1235
+ ) -> "_RequestContextManager":
1236
+ """Perform HTTP PUT request."""
1237
+ return _RequestContextManager(
1238
+ self._request(hdrs.METH_PUT, url, data=data, **kwargs)
1239
+ )
1240
+
1241
+ def patch(
1242
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1243
+ ) -> "_RequestContextManager":
1244
+ """Perform HTTP PATCH request."""
1245
+ return _RequestContextManager(
1246
+ self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
1247
+ )
1248
+
1249
+ def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
1250
+ """Perform HTTP DELETE request."""
1251
+ return _RequestContextManager(
1252
+ self._request(hdrs.METH_DELETE, url, **kwargs)
1253
+ )
1254
+
1255
+ async def close(self) -> None:
1256
+ """Close underlying connector.
1257
+
1258
+ Release all acquired resources.
1259
+ """
1260
+ if not self.closed:
1261
+ if self._connector is not None and self._connector_owner:
1262
+ await self._connector.close()
1263
+ self._connector = None
1264
+
1265
+ @property
1266
+ def closed(self) -> bool:
1267
+ """Is client session closed.
1268
+
1269
+ A readonly property.
1270
+ """
1271
+ return self._connector is None or self._connector.closed
1272
+
1273
+ @property
1274
+ def connector(self) -> Optional[BaseConnector]:
1275
+ """Connector instance used for the session."""
1276
+ return self._connector
1277
+
1278
+ @property
1279
+ def cookie_jar(self) -> AbstractCookieJar:
1280
+ """The session cookies."""
1281
+ return self._cookie_jar
1282
+
1283
+ @property
1284
+ def version(self) -> Tuple[int, int]:
1285
+ """The session HTTP protocol version."""
1286
+ return self._version
1287
+
1288
+ @property
1289
+ def requote_redirect_url(self) -> bool:
1290
+ """Do URL requoting on redirection handling."""
1291
+ return self._requote_redirect_url
1292
+
1293
+ @requote_redirect_url.setter
1294
+ def requote_redirect_url(self, val: bool) -> None:
1295
+ """Do URL requoting on redirection handling."""
1296
+ warnings.warn(
1297
+ "session.requote_redirect_url modification is deprecated #2778",
1298
+ DeprecationWarning,
1299
+ stacklevel=2,
1300
+ )
1301
+ self._requote_redirect_url = val
1302
+
1303
+ @property
1304
+ def loop(self) -> asyncio.AbstractEventLoop:
1305
+ """Session's loop."""
1306
+ warnings.warn(
1307
+ "client.loop property is deprecated", DeprecationWarning, stacklevel=2
1308
+ )
1309
+ return self._loop
1310
+
1311
+ @property
1312
+ def timeout(self) -> ClientTimeout:
1313
+ """Timeout for the session."""
1314
+ return self._timeout
1315
+
1316
+ @property
1317
+ def headers(self) -> "CIMultiDict[str]":
1318
+ """The default headers of the client session."""
1319
+ return self._default_headers
1320
+
1321
+ @property
1322
+ def skip_auto_headers(self) -> FrozenSet[istr]:
1323
+ """Headers for which autogeneration should be skipped"""
1324
+ return self._skip_auto_headers
1325
+
1326
+ @property
1327
+ def auth(self) -> Optional[BasicAuth]:
1328
+ """An object that represents HTTP Basic Authorization"""
1329
+ return self._default_auth
1330
+
1331
+ @property
1332
+ def json_serialize(self) -> JSONEncoder:
1333
+ """Json serializer callable"""
1334
+ return self._json_serialize
1335
+
1336
+ @property
1337
+ def connector_owner(self) -> bool:
1338
+ """Should connector be closed on session closing"""
1339
+ return self._connector_owner
1340
+
1341
+ @property
1342
+ def raise_for_status(
1343
+ self,
1344
+ ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
1345
+ """Should `ClientResponse.raise_for_status()` be called for each response."""
1346
+ return self._raise_for_status
1347
+
1348
+ @property
1349
+ def auto_decompress(self) -> bool:
1350
+ """Should the body response be automatically decompressed."""
1351
+ return self._auto_decompress
1352
+
1353
+ @property
1354
+ def trust_env(self) -> bool:
1355
+ """
1356
+ Should proxies information from environment or netrc be trusted.
1357
+
1358
+ Information is from HTTP_PROXY / HTTPS_PROXY environment variables
1359
+ or ~/.netrc file if present.
1360
+ """
1361
+ return self._trust_env
1362
+
1363
+ @property
1364
+ def trace_configs(self) -> List[TraceConfig]:
1365
+ """A list of TraceConfig instances used for client tracing"""
1366
+ return self._trace_configs
1367
+
1368
+ def detach(self) -> None:
1369
+ """Detach connector from session without closing the former.
1370
+
1371
+ Session is switched to closed state anyway.
1372
+ """
1373
+ self._connector = None
1374
+
1375
+ def __enter__(self) -> None:
1376
+ raise TypeError("Use async with instead")
1377
+
1378
+ def __exit__(
1379
+ self,
1380
+ exc_type: Optional[Type[BaseException]],
1381
+ exc_val: Optional[BaseException],
1382
+ exc_tb: Optional[TracebackType],
1383
+ ) -> None:
1384
+ # __exit__ should exist in pair with __enter__ but never executed
1385
+ pass # pragma: no cover
1386
+
1387
+ async def __aenter__(self) -> "ClientSession":
1388
+ return self
1389
+
1390
+ async def __aexit__(
1391
+ self,
1392
+ exc_type: Optional[Type[BaseException]],
1393
+ exc_val: Optional[BaseException],
1394
+ exc_tb: Optional[TracebackType],
1395
+ ) -> None:
1396
+ await self.close()
1397
+
1398
+
1399
+ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
1400
+
1401
+ __slots__ = ("_coro", "_resp")
1402
+
1403
+ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
1404
+ self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro
1405
+
1406
+ def send(self, arg: None) -> "asyncio.Future[Any]":
1407
+ return self._coro.send(arg)
1408
+
1409
+ def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
1410
+ return self._coro.throw(*args, **kwargs)
1411
+
1412
+ def close(self) -> None:
1413
+ return self._coro.close()
1414
+
1415
+ def __await__(self) -> Generator[Any, None, _RetType]:
1416
+ ret = self._coro.__await__()
1417
+ return ret
1418
+
1419
+ def __iter__(self) -> Generator[Any, None, _RetType]:
1420
+ return self.__await__()
1421
+
1422
+ async def __aenter__(self) -> _RetType:
1423
+ self._resp: _RetType = await self._coro
1424
+ return await self._resp.__aenter__()
1425
+
1426
+ async def __aexit__(
1427
+ self,
1428
+ exc_type: Optional[Type[BaseException]],
1429
+ exc: Optional[BaseException],
1430
+ tb: Optional[TracebackType],
1431
+ ) -> None:
1432
+ await self._resp.__aexit__(exc_type, exc, tb)
1433
+
1434
+
1435
+ _RequestContextManager = _BaseRequestContextManager[ClientResponse]
1436
+ _WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse]
1437
+
1438
+
1439
+ class _SessionRequestContextManager:
1440
+
1441
+ __slots__ = ("_coro", "_resp", "_session")
1442
+
1443
+ def __init__(
1444
+ self,
1445
+ coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
1446
+ session: ClientSession,
1447
+ ) -> None:
1448
+ self._coro = coro
1449
+ self._resp: Optional[ClientResponse] = None
1450
+ self._session = session
1451
+
1452
+ async def __aenter__(self) -> ClientResponse:
1453
+ try:
1454
+ self._resp = await self._coro
1455
+ except BaseException:
1456
+ await self._session.close()
1457
+ raise
1458
+ else:
1459
+ return self._resp
1460
+
1461
+ async def __aexit__(
1462
+ self,
1463
+ exc_type: Optional[Type[BaseException]],
1464
+ exc: Optional[BaseException],
1465
+ tb: Optional[TracebackType],
1466
+ ) -> None:
1467
+ assert self._resp is not None
1468
+ self._resp.close()
1469
+ await self._session.close()
1470
+
1471
+
1472
+ def request(
1473
+ method: str,
1474
+ url: StrOrURL,
1475
+ *,
1476
+ params: Query = None,
1477
+ data: Any = None,
1478
+ json: Any = None,
1479
+ headers: Optional[LooseHeaders] = None,
1480
+ skip_auto_headers: Optional[Iterable[str]] = None,
1481
+ auth: Optional[BasicAuth] = None,
1482
+ allow_redirects: bool = True,
1483
+ max_redirects: int = 10,
1484
+ compress: Optional[str] = None,
1485
+ chunked: Optional[bool] = None,
1486
+ expect100: bool = False,
1487
+ raise_for_status: Optional[bool] = None,
1488
+ read_until_eof: bool = True,
1489
+ proxy: Optional[StrOrURL] = None,
1490
+ proxy_auth: Optional[BasicAuth] = None,
1491
+ timeout: Union[ClientTimeout, object] = sentinel,
1492
+ cookies: Optional[LooseCookies] = None,
1493
+ version: HttpVersion = http.HttpVersion11,
1494
+ connector: Optional[BaseConnector] = None,
1495
+ read_bufsize: Optional[int] = None,
1496
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1497
+ max_line_size: int = 8190,
1498
+ max_field_size: int = 8190,
1499
+ ) -> _SessionRequestContextManager:
1500
+ """Constructs and sends a request.
1501
+
1502
+ Returns response object.
1503
+ method - HTTP method
1504
+ url - request url
1505
+ params - (optional) Dictionary or bytes to be sent in the query
1506
+ string of the new request
1507
+ data - (optional) Dictionary, bytes, or file-like object to
1508
+ send in the body of the request
1509
+ json - (optional) Any json compatible python object
1510
+ headers - (optional) Dictionary of HTTP Headers to send with
1511
+ the request
1512
+ cookies - (optional) Dict object to send with the request
1513
+ auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
1514
+ auth - aiohttp.helpers.BasicAuth
1515
+ allow_redirects - (optional) If set to False, do not follow
1516
+ redirects
1517
+ version - Request HTTP version.
1518
+ compress - Set to True if request has to be compressed
1519
+ with deflate encoding.
1520
+ chunked - Set to chunk size for chunked transfer encoding.
1521
+ expect100 - Expect 100-continue response from server.
1522
+ connector - BaseConnector sub-class instance to support
1523
+ connection pooling.
1524
+ read_until_eof - Read response until eof if response
1525
+ does not have Content-Length header.
1526
+ loop - Optional event loop.
1527
+ timeout - Optional ClientTimeout settings structure, 5min
1528
+ total timeout by default.
1529
+ Usage::
1530
+ >>> import aiohttp
1531
+ >>> resp = await aiohttp.request('GET', 'http://python.org/')
1532
+ >>> resp
1533
+ <ClientResponse(python.org/) [200]>
1534
+ >>> data = await resp.read()
1535
+ """
1536
+ connector_owner = False
1537
+ if connector is None:
1538
+ connector_owner = True
1539
+ connector = TCPConnector(loop=loop, force_close=True)
1540
+
1541
+ session = ClientSession(
1542
+ loop=loop,
1543
+ cookies=cookies,
1544
+ version=version,
1545
+ timeout=timeout,
1546
+ connector=connector,
1547
+ connector_owner=connector_owner,
1548
+ )
1549
+
1550
+ return _SessionRequestContextManager(
1551
+ session._request(
1552
+ method,
1553
+ url,
1554
+ params=params,
1555
+ data=data,
1556
+ json=json,
1557
+ headers=headers,
1558
+ skip_auto_headers=skip_auto_headers,
1559
+ auth=auth,
1560
+ allow_redirects=allow_redirects,
1561
+ max_redirects=max_redirects,
1562
+ compress=compress,
1563
+ chunked=chunked,
1564
+ expect100=expect100,
1565
+ raise_for_status=raise_for_status,
1566
+ read_until_eof=read_until_eof,
1567
+ proxy=proxy,
1568
+ proxy_auth=proxy_auth,
1569
+ read_bufsize=read_bufsize,
1570
+ max_line_size=max_line_size,
1571
+ max_field_size=max_field_size,
1572
+ ),
1573
+ session,
1574
+ )
deepseek/lib/python3.10/site-packages/aiohttp/client_exceptions.py ADDED
@@ -0,0 +1,417 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP related errors."""
2
+
3
+ import asyncio
4
+ import warnings
5
+ from typing import TYPE_CHECKING, Optional, Tuple, Union
6
+
7
+ from multidict import MultiMapping
8
+
9
+ from .typedefs import StrOrURL
10
+
11
+ try:
12
+ import ssl
13
+
14
+ SSLContext = ssl.SSLContext
15
+ except ImportError: # pragma: no cover
16
+ ssl = SSLContext = None # type: ignore[assignment]
17
+
18
+
19
+ if TYPE_CHECKING:
20
+ from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
21
+ from .http_parser import RawResponseMessage
22
+ else:
23
+ RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None
24
+
25
+ __all__ = (
26
+ "ClientError",
27
+ "ClientConnectionError",
28
+ "ClientConnectionResetError",
29
+ "ClientOSError",
30
+ "ClientConnectorError",
31
+ "ClientProxyConnectionError",
32
+ "ClientSSLError",
33
+ "ClientConnectorDNSError",
34
+ "ClientConnectorSSLError",
35
+ "ClientConnectorCertificateError",
36
+ "ConnectionTimeoutError",
37
+ "SocketTimeoutError",
38
+ "ServerConnectionError",
39
+ "ServerTimeoutError",
40
+ "ServerDisconnectedError",
41
+ "ServerFingerprintMismatch",
42
+ "ClientResponseError",
43
+ "ClientHttpProxyError",
44
+ "WSServerHandshakeError",
45
+ "ContentTypeError",
46
+ "ClientPayloadError",
47
+ "InvalidURL",
48
+ "InvalidUrlClientError",
49
+ "RedirectClientError",
50
+ "NonHttpUrlClientError",
51
+ "InvalidUrlRedirectClientError",
52
+ "NonHttpUrlRedirectClientError",
53
+ "WSMessageTypeError",
54
+ )
55
+
56
+
57
+ class ClientError(Exception):
58
+ """Base class for client connection errors."""
59
+
60
+
61
+ class ClientResponseError(ClientError):
62
+ """Base class for exceptions that occur after getting a response.
63
+
64
+ request_info: An instance of RequestInfo.
65
+ history: A sequence of responses, if redirects occurred.
66
+ status: HTTP status code.
67
+ message: Error message.
68
+ headers: Response headers.
69
+ """
70
+
71
+ def __init__(
72
+ self,
73
+ request_info: RequestInfo,
74
+ history: Tuple[ClientResponse, ...],
75
+ *,
76
+ code: Optional[int] = None,
77
+ status: Optional[int] = None,
78
+ message: str = "",
79
+ headers: Optional[MultiMapping[str]] = None,
80
+ ) -> None:
81
+ self.request_info = request_info
82
+ if code is not None:
83
+ if status is not None:
84
+ raise ValueError(
85
+ "Both code and status arguments are provided; "
86
+ "code is deprecated, use status instead"
87
+ )
88
+ warnings.warn(
89
+ "code argument is deprecated, use status instead",
90
+ DeprecationWarning,
91
+ stacklevel=2,
92
+ )
93
+ if status is not None:
94
+ self.status = status
95
+ elif code is not None:
96
+ self.status = code
97
+ else:
98
+ self.status = 0
99
+ self.message = message
100
+ self.headers = headers
101
+ self.history = history
102
+ self.args = (request_info, history)
103
+
104
+ def __str__(self) -> str:
105
+ return "{}, message={!r}, url={!r}".format(
106
+ self.status,
107
+ self.message,
108
+ str(self.request_info.real_url),
109
+ )
110
+
111
+ def __repr__(self) -> str:
112
+ args = f"{self.request_info!r}, {self.history!r}"
113
+ if self.status != 0:
114
+ args += f", status={self.status!r}"
115
+ if self.message != "":
116
+ args += f", message={self.message!r}"
117
+ if self.headers is not None:
118
+ args += f", headers={self.headers!r}"
119
+ return f"{type(self).__name__}({args})"
120
+
121
+ @property
122
+ def code(self) -> int:
123
+ warnings.warn(
124
+ "code property is deprecated, use status instead",
125
+ DeprecationWarning,
126
+ stacklevel=2,
127
+ )
128
+ return self.status
129
+
130
+ @code.setter
131
+ def code(self, value: int) -> None:
132
+ warnings.warn(
133
+ "code property is deprecated, use status instead",
134
+ DeprecationWarning,
135
+ stacklevel=2,
136
+ )
137
+ self.status = value
138
+
139
+
140
+ class ContentTypeError(ClientResponseError):
141
+ """ContentType found is not valid."""
142
+
143
+
144
+ class WSServerHandshakeError(ClientResponseError):
145
+ """websocket server handshake error."""
146
+
147
+
148
+ class ClientHttpProxyError(ClientResponseError):
149
+ """HTTP proxy error.
150
+
151
+ Raised in :class:`aiohttp.connector.TCPConnector` if
152
+ proxy responds with status other than ``200 OK``
153
+ on ``CONNECT`` request.
154
+ """
155
+
156
+
157
+ class TooManyRedirects(ClientResponseError):
158
+ """Client was redirected too many times."""
159
+
160
+
161
+ class ClientConnectionError(ClientError):
162
+ """Base class for client socket errors."""
163
+
164
+
165
+ class ClientConnectionResetError(ClientConnectionError, ConnectionResetError):
166
+ """ConnectionResetError"""
167
+
168
+
169
+ class ClientOSError(ClientConnectionError, OSError):
170
+ """OSError error."""
171
+
172
+
173
+ class ClientConnectorError(ClientOSError):
174
+ """Client connector error.
175
+
176
+ Raised in :class:`aiohttp.connector.TCPConnector` if
177
+ a connection can not be established.
178
+ """
179
+
180
+ def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
181
+ self._conn_key = connection_key
182
+ self._os_error = os_error
183
+ super().__init__(os_error.errno, os_error.strerror)
184
+ self.args = (connection_key, os_error)
185
+
186
+ @property
187
+ def os_error(self) -> OSError:
188
+ return self._os_error
189
+
190
+ @property
191
+ def host(self) -> str:
192
+ return self._conn_key.host
193
+
194
+ @property
195
+ def port(self) -> Optional[int]:
196
+ return self._conn_key.port
197
+
198
+ @property
199
+ def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
200
+ return self._conn_key.ssl
201
+
202
+ def __str__(self) -> str:
203
+ return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
204
+ self, "default" if self.ssl is True else self.ssl, self.strerror
205
+ )
206
+
207
+ # OSError.__reduce__ does too much black magick
208
+ __reduce__ = BaseException.__reduce__
209
+
210
+
211
+ class ClientConnectorDNSError(ClientConnectorError):
212
+ """DNS resolution failed during client connection.
213
+
214
+ Raised in :class:`aiohttp.connector.TCPConnector` if
215
+ DNS resolution fails.
216
+ """
217
+
218
+
219
+ class ClientProxyConnectionError(ClientConnectorError):
220
+ """Proxy connection error.
221
+
222
+ Raised in :class:`aiohttp.connector.TCPConnector` if
223
+ connection to proxy can not be established.
224
+ """
225
+
226
+
227
+ class UnixClientConnectorError(ClientConnectorError):
228
+ """Unix connector error.
229
+
230
+ Raised in :py:class:`aiohttp.connector.UnixConnector`
231
+ if connection to unix socket can not be established.
232
+ """
233
+
234
+ def __init__(
235
+ self, path: str, connection_key: ConnectionKey, os_error: OSError
236
+ ) -> None:
237
+ self._path = path
238
+ super().__init__(connection_key, os_error)
239
+
240
+ @property
241
+ def path(self) -> str:
242
+ return self._path
243
+
244
+ def __str__(self) -> str:
245
+ return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
246
+ self, "default" if self.ssl is True else self.ssl, self.strerror
247
+ )
248
+
249
+
250
+ class ServerConnectionError(ClientConnectionError):
251
+ """Server connection errors."""
252
+
253
+
254
+ class ServerDisconnectedError(ServerConnectionError):
255
+ """Server disconnected."""
256
+
257
+ def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
258
+ if message is None:
259
+ message = "Server disconnected"
260
+
261
+ self.args = (message,)
262
+ self.message = message
263
+
264
+
265
+ class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
266
+ """Server timeout error."""
267
+
268
+
269
+ class ConnectionTimeoutError(ServerTimeoutError):
270
+ """Connection timeout error."""
271
+
272
+
273
+ class SocketTimeoutError(ServerTimeoutError):
274
+ """Socket timeout error."""
275
+
276
+
277
+ class ServerFingerprintMismatch(ServerConnectionError):
278
+ """SSL certificate does not match expected fingerprint."""
279
+
280
+ def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
281
+ self.expected = expected
282
+ self.got = got
283
+ self.host = host
284
+ self.port = port
285
+ self.args = (expected, got, host, port)
286
+
287
+ def __repr__(self) -> str:
288
+ return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
289
+ self.__class__.__name__, self.expected, self.got, self.host, self.port
290
+ )
291
+
292
+
293
+ class ClientPayloadError(ClientError):
294
+ """Response payload error."""
295
+
296
+
297
+ class InvalidURL(ClientError, ValueError):
298
+ """Invalid URL.
299
+
300
+ URL used for fetching is malformed, e.g. it doesn't contains host
301
+ part.
302
+ """
303
+
304
+ # Derive from ValueError for backward compatibility
305
+
306
+ def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None:
307
+ # The type of url is not yarl.URL because the exception can be raised
308
+ # on URL(url) call
309
+ self._url = url
310
+ self._description = description
311
+
312
+ if description:
313
+ super().__init__(url, description)
314
+ else:
315
+ super().__init__(url)
316
+
317
+ @property
318
+ def url(self) -> StrOrURL:
319
+ return self._url
320
+
321
+ @property
322
+ def description(self) -> "str | None":
323
+ return self._description
324
+
325
+ def __repr__(self) -> str:
326
+ return f"<{self.__class__.__name__} {self}>"
327
+
328
+ def __str__(self) -> str:
329
+ if self._description:
330
+ return f"{self._url} - {self._description}"
331
+ return str(self._url)
332
+
333
+
334
+ class InvalidUrlClientError(InvalidURL):
335
+ """Invalid URL client error."""
336
+
337
+
338
+ class RedirectClientError(ClientError):
339
+ """Client redirect error."""
340
+
341
+
342
+ class NonHttpUrlClientError(ClientError):
343
+ """Non http URL client error."""
344
+
345
+
346
+ class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError):
347
+ """Invalid URL redirect client error."""
348
+
349
+
350
+ class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError):
351
+ """Non http URL redirect client error."""
352
+
353
+
354
+ class ClientSSLError(ClientConnectorError):
355
+ """Base error for ssl.*Errors."""
356
+
357
+
358
+ if ssl is not None:
359
+ cert_errors = (ssl.CertificateError,)
360
+ cert_errors_bases = (
361
+ ClientSSLError,
362
+ ssl.CertificateError,
363
+ )
364
+
365
+ ssl_errors = (ssl.SSLError,)
366
+ ssl_error_bases = (ClientSSLError, ssl.SSLError)
367
+ else: # pragma: no cover
368
+ cert_errors = tuple()
369
+ cert_errors_bases = (
370
+ ClientSSLError,
371
+ ValueError,
372
+ )
373
+
374
+ ssl_errors = tuple()
375
+ ssl_error_bases = (ClientSSLError,)
376
+
377
+
378
+ class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
379
+ """Response ssl error."""
380
+
381
+
382
+ class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
383
+ """Response certificate error."""
384
+
385
+ def __init__(
386
+ self, connection_key: ConnectionKey, certificate_error: Exception
387
+ ) -> None:
388
+ self._conn_key = connection_key
389
+ self._certificate_error = certificate_error
390
+ self.args = (connection_key, certificate_error)
391
+
392
+ @property
393
+ def certificate_error(self) -> Exception:
394
+ return self._certificate_error
395
+
396
+ @property
397
+ def host(self) -> str:
398
+ return self._conn_key.host
399
+
400
+ @property
401
+ def port(self) -> Optional[int]:
402
+ return self._conn_key.port
403
+
404
+ @property
405
+ def ssl(self) -> bool:
406
+ return self._conn_key.is_ssl
407
+
408
+ def __str__(self) -> str:
409
+ return (
410
+ "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
411
+ "[{0.certificate_error.__class__.__name__}: "
412
+ "{0.certificate_error.args}]".format(self)
413
+ )
414
+
415
+
416
+ class WSMessageTypeError(TypeError):
417
+ """WebSocket message type is not valid."""
deepseek/lib/python3.10/site-packages/aiohttp/client_proto.py ADDED
@@ -0,0 +1,307 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ from contextlib import suppress
3
+ from typing import Any, Optional, Tuple
4
+
5
+ from .base_protocol import BaseProtocol
6
+ from .client_exceptions import (
7
+ ClientOSError,
8
+ ClientPayloadError,
9
+ ServerDisconnectedError,
10
+ SocketTimeoutError,
11
+ )
12
+ from .helpers import (
13
+ _EXC_SENTINEL,
14
+ EMPTY_BODY_STATUS_CODES,
15
+ BaseTimerContext,
16
+ set_exception,
17
+ )
18
+ from .http import HttpResponseParser, RawResponseMessage
19
+ from .http_exceptions import HttpProcessingError
20
+ from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
21
+
22
+
23
+ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
24
+ """Helper class to adapt between Protocol and StreamReader."""
25
+
26
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
27
+ BaseProtocol.__init__(self, loop=loop)
28
+ DataQueue.__init__(self, loop)
29
+
30
+ self._should_close = False
31
+
32
+ self._payload: Optional[StreamReader] = None
33
+ self._skip_payload = False
34
+ self._payload_parser = None
35
+
36
+ self._timer = None
37
+
38
+ self._tail = b""
39
+ self._upgraded = False
40
+ self._parser: Optional[HttpResponseParser] = None
41
+
42
+ self._read_timeout: Optional[float] = None
43
+ self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
44
+
45
+ self._timeout_ceil_threshold: Optional[float] = 5
46
+
47
+ @property
48
+ def upgraded(self) -> bool:
49
+ return self._upgraded
50
+
51
+ @property
52
+ def should_close(self) -> bool:
53
+ return bool(
54
+ self._should_close
55
+ or (self._payload is not None and not self._payload.is_eof())
56
+ or self._upgraded
57
+ or self._exception is not None
58
+ or self._payload_parser is not None
59
+ or self._buffer
60
+ or self._tail
61
+ )
62
+
63
+ def force_close(self) -> None:
64
+ self._should_close = True
65
+
66
+ def close(self) -> None:
67
+ transport = self.transport
68
+ if transport is not None:
69
+ transport.close()
70
+ self.transport = None
71
+ self._payload = None
72
+ self._drop_timeout()
73
+
74
+ def is_connected(self) -> bool:
75
+ return self.transport is not None and not self.transport.is_closing()
76
+
77
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
78
+ self._drop_timeout()
79
+
80
+ original_connection_error = exc
81
+ reraised_exc = original_connection_error
82
+
83
+ connection_closed_cleanly = original_connection_error is None
84
+
85
+ if self._payload_parser is not None:
86
+ with suppress(Exception): # FIXME: log this somehow?
87
+ self._payload_parser.feed_eof()
88
+
89
+ uncompleted = None
90
+ if self._parser is not None:
91
+ try:
92
+ uncompleted = self._parser.feed_eof()
93
+ except Exception as underlying_exc:
94
+ if self._payload is not None:
95
+ client_payload_exc_msg = (
96
+ f"Response payload is not completed: {underlying_exc !r}"
97
+ )
98
+ if not connection_closed_cleanly:
99
+ client_payload_exc_msg = (
100
+ f"{client_payload_exc_msg !s}. "
101
+ f"{original_connection_error !r}"
102
+ )
103
+ set_exception(
104
+ self._payload,
105
+ ClientPayloadError(client_payload_exc_msg),
106
+ underlying_exc,
107
+ )
108
+
109
+ if not self.is_eof():
110
+ if isinstance(original_connection_error, OSError):
111
+ reraised_exc = ClientOSError(*original_connection_error.args)
112
+ if connection_closed_cleanly:
113
+ reraised_exc = ServerDisconnectedError(uncompleted)
114
+ # assigns self._should_close to True as side effect,
115
+ # we do it anyway below
116
+ underlying_non_eof_exc = (
117
+ _EXC_SENTINEL
118
+ if connection_closed_cleanly
119
+ else original_connection_error
120
+ )
121
+ assert underlying_non_eof_exc is not None
122
+ assert reraised_exc is not None
123
+ self.set_exception(reraised_exc, underlying_non_eof_exc)
124
+
125
+ self._should_close = True
126
+ self._parser = None
127
+ self._payload = None
128
+ self._payload_parser = None
129
+ self._reading_paused = False
130
+
131
+ super().connection_lost(reraised_exc)
132
+
133
+ def eof_received(self) -> None:
134
+ # should call parser.feed_eof() most likely
135
+ self._drop_timeout()
136
+
137
+ def pause_reading(self) -> None:
138
+ super().pause_reading()
139
+ self._drop_timeout()
140
+
141
+ def resume_reading(self) -> None:
142
+ super().resume_reading()
143
+ self._reschedule_timeout()
144
+
145
+ def set_exception(
146
+ self,
147
+ exc: BaseException,
148
+ exc_cause: BaseException = _EXC_SENTINEL,
149
+ ) -> None:
150
+ self._should_close = True
151
+ self._drop_timeout()
152
+ super().set_exception(exc, exc_cause)
153
+
154
+ def set_parser(self, parser: Any, payload: Any) -> None:
155
+ # TODO: actual types are:
156
+ # parser: WebSocketReader
157
+ # payload: WebSocketDataQueue
158
+ # but they are not generi enough
159
+ # Need an ABC for both types
160
+ self._payload = payload
161
+ self._payload_parser = parser
162
+
163
+ self._drop_timeout()
164
+
165
+ if self._tail:
166
+ data, self._tail = self._tail, b""
167
+ self.data_received(data)
168
+
169
+ def set_response_params(
170
+ self,
171
+ *,
172
+ timer: Optional[BaseTimerContext] = None,
173
+ skip_payload: bool = False,
174
+ read_until_eof: bool = False,
175
+ auto_decompress: bool = True,
176
+ read_timeout: Optional[float] = None,
177
+ read_bufsize: int = 2**16,
178
+ timeout_ceil_threshold: float = 5,
179
+ max_line_size: int = 8190,
180
+ max_field_size: int = 8190,
181
+ ) -> None:
182
+ self._skip_payload = skip_payload
183
+
184
+ self._read_timeout = read_timeout
185
+
186
+ self._timeout_ceil_threshold = timeout_ceil_threshold
187
+
188
+ self._parser = HttpResponseParser(
189
+ self,
190
+ self._loop,
191
+ read_bufsize,
192
+ timer=timer,
193
+ payload_exception=ClientPayloadError,
194
+ response_with_body=not skip_payload,
195
+ read_until_eof=read_until_eof,
196
+ auto_decompress=auto_decompress,
197
+ max_line_size=max_line_size,
198
+ max_field_size=max_field_size,
199
+ )
200
+
201
+ if self._tail:
202
+ data, self._tail = self._tail, b""
203
+ self.data_received(data)
204
+
205
+ def _drop_timeout(self) -> None:
206
+ if self._read_timeout_handle is not None:
207
+ self._read_timeout_handle.cancel()
208
+ self._read_timeout_handle = None
209
+
210
+ def _reschedule_timeout(self) -> None:
211
+ timeout = self._read_timeout
212
+ if self._read_timeout_handle is not None:
213
+ self._read_timeout_handle.cancel()
214
+
215
+ if timeout:
216
+ self._read_timeout_handle = self._loop.call_later(
217
+ timeout, self._on_read_timeout
218
+ )
219
+ else:
220
+ self._read_timeout_handle = None
221
+
222
+ def start_timeout(self) -> None:
223
+ self._reschedule_timeout()
224
+
225
+ @property
226
+ def read_timeout(self) -> Optional[float]:
227
+ return self._read_timeout
228
+
229
+ @read_timeout.setter
230
+ def read_timeout(self, read_timeout: Optional[float]) -> None:
231
+ self._read_timeout = read_timeout
232
+
233
+ def _on_read_timeout(self) -> None:
234
+ exc = SocketTimeoutError("Timeout on reading data from socket")
235
+ self.set_exception(exc)
236
+ if self._payload is not None:
237
+ set_exception(self._payload, exc)
238
+
239
+ def data_received(self, data: bytes) -> None:
240
+ self._reschedule_timeout()
241
+
242
+ if not data:
243
+ return
244
+
245
+ # custom payload parser - currently always WebSocketReader
246
+ if self._payload_parser is not None:
247
+ eof, tail = self._payload_parser.feed_data(data)
248
+ if eof:
249
+ self._payload = None
250
+ self._payload_parser = None
251
+
252
+ if tail:
253
+ self.data_received(tail)
254
+ return
255
+
256
+ if self._upgraded or self._parser is None:
257
+ # i.e. websocket connection, websocket parser is not set yet
258
+ self._tail += data
259
+ return
260
+
261
+ # parse http messages
262
+ try:
263
+ messages, upgraded, tail = self._parser.feed_data(data)
264
+ except BaseException as underlying_exc:
265
+ if self.transport is not None:
266
+ # connection.release() could be called BEFORE
267
+ # data_received(), the transport is already
268
+ # closed in this case
269
+ self.transport.close()
270
+ # should_close is True after the call
271
+ if isinstance(underlying_exc, HttpProcessingError):
272
+ exc = HttpProcessingError(
273
+ code=underlying_exc.code,
274
+ message=underlying_exc.message,
275
+ headers=underlying_exc.headers,
276
+ )
277
+ else:
278
+ exc = HttpProcessingError()
279
+ self.set_exception(exc, underlying_exc)
280
+ return
281
+
282
+ self._upgraded = upgraded
283
+
284
+ payload: Optional[StreamReader] = None
285
+ for message, payload in messages:
286
+ if message.should_close:
287
+ self._should_close = True
288
+
289
+ self._payload = payload
290
+
291
+ if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES:
292
+ self.feed_data((message, EMPTY_PAYLOAD), 0)
293
+ else:
294
+ self.feed_data((message, payload), 0)
295
+
296
+ if payload is not None:
297
+ # new message(s) was processed
298
+ # register timeout handler unsubscribing
299
+ # either on end-of-stream or immediately for
300
+ # EMPTY_PAYLOAD
301
+ if payload is not EMPTY_PAYLOAD:
302
+ payload.on_eof(self._drop_timeout)
303
+ else:
304
+ self._drop_timeout()
305
+
306
+ if upgraded and tail:
307
+ self.data_received(tail)
deepseek/lib/python3.10/site-packages/aiohttp/compression_utils.py ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import zlib
3
+ from concurrent.futures import Executor
4
+ from typing import Optional, cast
5
+
6
+ try:
7
+ try:
8
+ import brotlicffi as brotli
9
+ except ImportError:
10
+ import brotli
11
+
12
+ HAS_BROTLI = True
13
+ except ImportError: # pragma: no cover
14
+ HAS_BROTLI = False
15
+
16
+ MAX_SYNC_CHUNK_SIZE = 1024
17
+
18
+
19
+ def encoding_to_mode(
20
+ encoding: Optional[str] = None,
21
+ suppress_deflate_header: bool = False,
22
+ ) -> int:
23
+ if encoding == "gzip":
24
+ return 16 + zlib.MAX_WBITS
25
+
26
+ return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
27
+
28
+
29
+ class ZlibBaseHandler:
30
+ def __init__(
31
+ self,
32
+ mode: int,
33
+ executor: Optional[Executor] = None,
34
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
35
+ ):
36
+ self._mode = mode
37
+ self._executor = executor
38
+ self._max_sync_chunk_size = max_sync_chunk_size
39
+
40
+
41
+ class ZLibCompressor(ZlibBaseHandler):
42
+ def __init__(
43
+ self,
44
+ encoding: Optional[str] = None,
45
+ suppress_deflate_header: bool = False,
46
+ level: Optional[int] = None,
47
+ wbits: Optional[int] = None,
48
+ strategy: int = zlib.Z_DEFAULT_STRATEGY,
49
+ executor: Optional[Executor] = None,
50
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
51
+ ):
52
+ super().__init__(
53
+ mode=(
54
+ encoding_to_mode(encoding, suppress_deflate_header)
55
+ if wbits is None
56
+ else wbits
57
+ ),
58
+ executor=executor,
59
+ max_sync_chunk_size=max_sync_chunk_size,
60
+ )
61
+ if level is None:
62
+ self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
63
+ else:
64
+ self._compressor = zlib.compressobj(
65
+ wbits=self._mode, strategy=strategy, level=level
66
+ )
67
+ self._compress_lock = asyncio.Lock()
68
+
69
+ def compress_sync(self, data: bytes) -> bytes:
70
+ return self._compressor.compress(data)
71
+
72
+ async def compress(self, data: bytes) -> bytes:
73
+ """Compress the data and returned the compressed bytes.
74
+
75
+ Note that flush() must be called after the last call to compress()
76
+
77
+ If the data size is large than the max_sync_chunk_size, the compression
78
+ will be done in the executor. Otherwise, the compression will be done
79
+ in the event loop.
80
+ """
81
+ async with self._compress_lock:
82
+ # To ensure the stream is consistent in the event
83
+ # there are multiple writers, we need to lock
84
+ # the compressor so that only one writer can
85
+ # compress at a time.
86
+ if (
87
+ self._max_sync_chunk_size is not None
88
+ and len(data) > self._max_sync_chunk_size
89
+ ):
90
+ return await asyncio.get_running_loop().run_in_executor(
91
+ self._executor, self._compressor.compress, data
92
+ )
93
+ return self.compress_sync(data)
94
+
95
+ def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
96
+ return self._compressor.flush(mode)
97
+
98
+
99
+ class ZLibDecompressor(ZlibBaseHandler):
100
+ def __init__(
101
+ self,
102
+ encoding: Optional[str] = None,
103
+ suppress_deflate_header: bool = False,
104
+ executor: Optional[Executor] = None,
105
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
106
+ ):
107
+ super().__init__(
108
+ mode=encoding_to_mode(encoding, suppress_deflate_header),
109
+ executor=executor,
110
+ max_sync_chunk_size=max_sync_chunk_size,
111
+ )
112
+ self._decompressor = zlib.decompressobj(wbits=self._mode)
113
+
114
+ def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
115
+ return self._decompressor.decompress(data, max_length)
116
+
117
+ async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
118
+ """Decompress the data and return the decompressed bytes.
119
+
120
+ If the data size is large than the max_sync_chunk_size, the decompression
121
+ will be done in the executor. Otherwise, the decompression will be done
122
+ in the event loop.
123
+ """
124
+ if (
125
+ self._max_sync_chunk_size is not None
126
+ and len(data) > self._max_sync_chunk_size
127
+ ):
128
+ return await asyncio.get_running_loop().run_in_executor(
129
+ self._executor, self._decompressor.decompress, data, max_length
130
+ )
131
+ return self.decompress_sync(data, max_length)
132
+
133
+ def flush(self, length: int = 0) -> bytes:
134
+ return (
135
+ self._decompressor.flush(length)
136
+ if length > 0
137
+ else self._decompressor.flush()
138
+ )
139
+
140
+ @property
141
+ def eof(self) -> bool:
142
+ return self._decompressor.eof
143
+
144
+ @property
145
+ def unconsumed_tail(self) -> bytes:
146
+ return self._decompressor.unconsumed_tail
147
+
148
+ @property
149
+ def unused_data(self) -> bytes:
150
+ return self._decompressor.unused_data
151
+
152
+
153
+ class BrotliDecompressor:
154
+ # Supports both 'brotlipy' and 'Brotli' packages
155
+ # since they share an import name. The top branches
156
+ # are for 'brotlipy' and bottom branches for 'Brotli'
157
+ def __init__(self) -> None:
158
+ if not HAS_BROTLI:
159
+ raise RuntimeError(
160
+ "The brotli decompression is not available. "
161
+ "Please install `Brotli` module"
162
+ )
163
+ self._obj = brotli.Decompressor()
164
+
165
+ def decompress_sync(self, data: bytes) -> bytes:
166
+ if hasattr(self._obj, "decompress"):
167
+ return cast(bytes, self._obj.decompress(data))
168
+ return cast(bytes, self._obj.process(data))
169
+
170
+ def flush(self) -> bytes:
171
+ if hasattr(self._obj, "flush"):
172
+ return cast(bytes, self._obj.flush())
173
+ return b""
deepseek/lib/python3.10/site-packages/aiohttp/connector.py ADDED
@@ -0,0 +1,1646 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import functools
3
+ import random
4
+ import socket
5
+ import sys
6
+ import traceback
7
+ import warnings
8
+ from collections import OrderedDict, defaultdict, deque
9
+ from contextlib import suppress
10
+ from http import HTTPStatus
11
+ from itertools import chain, cycle, islice
12
+ from time import monotonic
13
+ from types import TracebackType
14
+ from typing import (
15
+ TYPE_CHECKING,
16
+ Any,
17
+ Awaitable,
18
+ Callable,
19
+ DefaultDict,
20
+ Deque,
21
+ Dict,
22
+ Iterator,
23
+ List,
24
+ Literal,
25
+ Optional,
26
+ Sequence,
27
+ Set,
28
+ Tuple,
29
+ Type,
30
+ Union,
31
+ cast,
32
+ )
33
+
34
+ import aiohappyeyeballs
35
+
36
+ from . import hdrs, helpers
37
+ from .abc import AbstractResolver, ResolveResult
38
+ from .client_exceptions import (
39
+ ClientConnectionError,
40
+ ClientConnectorCertificateError,
41
+ ClientConnectorDNSError,
42
+ ClientConnectorError,
43
+ ClientConnectorSSLError,
44
+ ClientHttpProxyError,
45
+ ClientProxyConnectionError,
46
+ ServerFingerprintMismatch,
47
+ UnixClientConnectorError,
48
+ cert_errors,
49
+ ssl_errors,
50
+ )
51
+ from .client_proto import ResponseHandler
52
+ from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
53
+ from .helpers import (
54
+ ceil_timeout,
55
+ is_ip_address,
56
+ noop,
57
+ sentinel,
58
+ set_exception,
59
+ set_result,
60
+ )
61
+ from .resolver import DefaultResolver
62
+
63
+ try:
64
+ import ssl
65
+
66
+ SSLContext = ssl.SSLContext
67
+ except ImportError: # pragma: no cover
68
+ ssl = None # type: ignore[assignment]
69
+ SSLContext = object # type: ignore[misc,assignment]
70
+
71
+
72
+ EMPTY_SCHEMA_SET = frozenset({""})
73
+ HTTP_SCHEMA_SET = frozenset({"http", "https"})
74
+ WS_SCHEMA_SET = frozenset({"ws", "wss"})
75
+
76
+ HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET
77
+ HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET
78
+
79
+ NEEDS_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < (
80
+ 3,
81
+ 13,
82
+ 1,
83
+ ) or sys.version_info < (3, 12, 7)
84
+ # Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960
85
+ # which first appeared in Python 3.12.7 and 3.13.1
86
+
87
+
88
+ __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
89
+
90
+
91
+ if TYPE_CHECKING:
92
+ from .client import ClientTimeout
93
+ from .client_reqrep import ConnectionKey
94
+ from .tracing import Trace
95
+
96
+
97
+ class _DeprecationWaiter:
98
+ __slots__ = ("_awaitable", "_awaited")
99
+
100
+ def __init__(self, awaitable: Awaitable[Any]) -> None:
101
+ self._awaitable = awaitable
102
+ self._awaited = False
103
+
104
+ def __await__(self) -> Any:
105
+ self._awaited = True
106
+ return self._awaitable.__await__()
107
+
108
+ def __del__(self) -> None:
109
+ if not self._awaited:
110
+ warnings.warn(
111
+ "Connector.close() is a coroutine, "
112
+ "please use await connector.close()",
113
+ DeprecationWarning,
114
+ )
115
+
116
+
117
+ class Connection:
118
+
119
+ _source_traceback = None
120
+
121
+ def __init__(
122
+ self,
123
+ connector: "BaseConnector",
124
+ key: "ConnectionKey",
125
+ protocol: ResponseHandler,
126
+ loop: asyncio.AbstractEventLoop,
127
+ ) -> None:
128
+ self._key = key
129
+ self._connector = connector
130
+ self._loop = loop
131
+ self._protocol: Optional[ResponseHandler] = protocol
132
+ self._callbacks: List[Callable[[], None]] = []
133
+
134
+ if loop.get_debug():
135
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
136
+
137
+ def __repr__(self) -> str:
138
+ return f"Connection<{self._key}>"
139
+
140
+ def __del__(self, _warnings: Any = warnings) -> None:
141
+ if self._protocol is not None:
142
+ kwargs = {"source": self}
143
+ _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
144
+ if self._loop.is_closed():
145
+ return
146
+
147
+ self._connector._release(self._key, self._protocol, should_close=True)
148
+
149
+ context = {"client_connection": self, "message": "Unclosed connection"}
150
+ if self._source_traceback is not None:
151
+ context["source_traceback"] = self._source_traceback
152
+ self._loop.call_exception_handler(context)
153
+
154
+ def __bool__(self) -> Literal[True]:
155
+ """Force subclasses to not be falsy, to make checks simpler."""
156
+ return True
157
+
158
+ @property
159
+ def loop(self) -> asyncio.AbstractEventLoop:
160
+ warnings.warn(
161
+ "connector.loop property is deprecated", DeprecationWarning, stacklevel=2
162
+ )
163
+ return self._loop
164
+
165
+ @property
166
+ def transport(self) -> Optional[asyncio.Transport]:
167
+ if self._protocol is None:
168
+ return None
169
+ return self._protocol.transport
170
+
171
+ @property
172
+ def protocol(self) -> Optional[ResponseHandler]:
173
+ return self._protocol
174
+
175
+ def add_callback(self, callback: Callable[[], None]) -> None:
176
+ if callback is not None:
177
+ self._callbacks.append(callback)
178
+
179
+ def _notify_release(self) -> None:
180
+ callbacks, self._callbacks = self._callbacks[:], []
181
+
182
+ for cb in callbacks:
183
+ with suppress(Exception):
184
+ cb()
185
+
186
+ def close(self) -> None:
187
+ self._notify_release()
188
+
189
+ if self._protocol is not None:
190
+ self._connector._release(self._key, self._protocol, should_close=True)
191
+ self._protocol = None
192
+
193
+ def release(self) -> None:
194
+ self._notify_release()
195
+
196
+ if self._protocol is not None:
197
+ self._connector._release(self._key, self._protocol)
198
+ self._protocol = None
199
+
200
+ @property
201
+ def closed(self) -> bool:
202
+ return self._protocol is None or not self._protocol.is_connected()
203
+
204
+
205
+ class _TransportPlaceholder:
206
+ """placeholder for BaseConnector.connect function"""
207
+
208
+ __slots__ = ()
209
+
210
+ def close(self) -> None:
211
+ """Close the placeholder transport."""
212
+
213
+
214
+ class BaseConnector:
215
+ """Base connector class.
216
+
217
+ keepalive_timeout - (optional) Keep-alive timeout.
218
+ force_close - Set to True to force close and do reconnect
219
+ after each request (and between redirects).
220
+ limit - The total number of simultaneous connections.
221
+ limit_per_host - Number of simultaneous connections to one host.
222
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
223
+ Disabled by default.
224
+ timeout_ceil_threshold - Trigger ceiling of timeout values when
225
+ it's above timeout_ceil_threshold.
226
+ loop - Optional event loop.
227
+ """
228
+
229
+ _closed = True # prevent AttributeError in __del__ if ctor was failed
230
+ _source_traceback = None
231
+
232
+ # abort transport after 2 seconds (cleanup broken connections)
233
+ _cleanup_closed_period = 2.0
234
+
235
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET
236
+
237
+ def __init__(
238
+ self,
239
+ *,
240
+ keepalive_timeout: Union[object, None, float] = sentinel,
241
+ force_close: bool = False,
242
+ limit: int = 100,
243
+ limit_per_host: int = 0,
244
+ enable_cleanup_closed: bool = False,
245
+ loop: Optional[asyncio.AbstractEventLoop] = None,
246
+ timeout_ceil_threshold: float = 5,
247
+ ) -> None:
248
+
249
+ if force_close:
250
+ if keepalive_timeout is not None and keepalive_timeout is not sentinel:
251
+ raise ValueError(
252
+ "keepalive_timeout cannot be set if force_close is True"
253
+ )
254
+ else:
255
+ if keepalive_timeout is sentinel:
256
+ keepalive_timeout = 15.0
257
+
258
+ loop = loop or asyncio.get_running_loop()
259
+ self._timeout_ceil_threshold = timeout_ceil_threshold
260
+
261
+ self._closed = False
262
+ if loop.get_debug():
263
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
264
+
265
+ # Connection pool of reusable connections.
266
+ # We use a deque to store connections because it has O(1) popleft()
267
+ # and O(1) append() operations to implement a FIFO queue.
268
+ self._conns: DefaultDict[
269
+ ConnectionKey, Deque[Tuple[ResponseHandler, float]]
270
+ ] = defaultdict(deque)
271
+ self._limit = limit
272
+ self._limit_per_host = limit_per_host
273
+ self._acquired: Set[ResponseHandler] = set()
274
+ self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = (
275
+ defaultdict(set)
276
+ )
277
+ self._keepalive_timeout = cast(float, keepalive_timeout)
278
+ self._force_close = force_close
279
+
280
+ # {host_key: FIFO list of waiters}
281
+ # The FIFO is implemented with an OrderedDict with None keys because
282
+ # python does not have an ordered set.
283
+ self._waiters: DefaultDict[
284
+ ConnectionKey, OrderedDict[asyncio.Future[None], None]
285
+ ] = defaultdict(OrderedDict)
286
+
287
+ self._loop = loop
288
+ self._factory = functools.partial(ResponseHandler, loop=loop)
289
+
290
+ # start keep-alive connection cleanup task
291
+ self._cleanup_handle: Optional[asyncio.TimerHandle] = None
292
+
293
+ # start cleanup closed transports task
294
+ self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
295
+
296
+ if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED:
297
+ warnings.warn(
298
+ "enable_cleanup_closed ignored because "
299
+ "https://github.com/python/cpython/pull/118960 is fixed "
300
+ f"in Python version {sys.version_info}",
301
+ DeprecationWarning,
302
+ stacklevel=2,
303
+ )
304
+ enable_cleanup_closed = False
305
+
306
+ self._cleanup_closed_disabled = not enable_cleanup_closed
307
+ self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
308
+ self._cleanup_closed()
309
+
310
+ def __del__(self, _warnings: Any = warnings) -> None:
311
+ if self._closed:
312
+ return
313
+ if not self._conns:
314
+ return
315
+
316
+ conns = [repr(c) for c in self._conns.values()]
317
+
318
+ self._close()
319
+
320
+ kwargs = {"source": self}
321
+ _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
322
+ context = {
323
+ "connector": self,
324
+ "connections": conns,
325
+ "message": "Unclosed connector",
326
+ }
327
+ if self._source_traceback is not None:
328
+ context["source_traceback"] = self._source_traceback
329
+ self._loop.call_exception_handler(context)
330
+
331
+ def __enter__(self) -> "BaseConnector":
332
+ warnings.warn(
333
+ '"with Connector():" is deprecated, '
334
+ 'use "async with Connector():" instead',
335
+ DeprecationWarning,
336
+ )
337
+ return self
338
+
339
+ def __exit__(self, *exc: Any) -> None:
340
+ self._close()
341
+
342
+ async def __aenter__(self) -> "BaseConnector":
343
+ return self
344
+
345
+ async def __aexit__(
346
+ self,
347
+ exc_type: Optional[Type[BaseException]] = None,
348
+ exc_value: Optional[BaseException] = None,
349
+ exc_traceback: Optional[TracebackType] = None,
350
+ ) -> None:
351
+ await self.close()
352
+
353
+ @property
354
+ def force_close(self) -> bool:
355
+ """Ultimately close connection on releasing if True."""
356
+ return self._force_close
357
+
358
+ @property
359
+ def limit(self) -> int:
360
+ """The total number for simultaneous connections.
361
+
362
+ If limit is 0 the connector has no limit.
363
+ The default limit size is 100.
364
+ """
365
+ return self._limit
366
+
367
+ @property
368
+ def limit_per_host(self) -> int:
369
+ """The limit for simultaneous connections to the same endpoint.
370
+
371
+ Endpoints are the same if they are have equal
372
+ (host, port, is_ssl) triple.
373
+ """
374
+ return self._limit_per_host
375
+
376
+ def _cleanup(self) -> None:
377
+ """Cleanup unused transports."""
378
+ if self._cleanup_handle:
379
+ self._cleanup_handle.cancel()
380
+ # _cleanup_handle should be unset, otherwise _release() will not
381
+ # recreate it ever!
382
+ self._cleanup_handle = None
383
+
384
+ now = monotonic()
385
+ timeout = self._keepalive_timeout
386
+
387
+ if self._conns:
388
+ connections = defaultdict(deque)
389
+ deadline = now - timeout
390
+ for key, conns in self._conns.items():
391
+ alive: Deque[Tuple[ResponseHandler, float]] = deque()
392
+ for proto, use_time in conns:
393
+ if proto.is_connected() and use_time - deadline >= 0:
394
+ alive.append((proto, use_time))
395
+ continue
396
+ transport = proto.transport
397
+ proto.close()
398
+ if not self._cleanup_closed_disabled and key.is_ssl:
399
+ self._cleanup_closed_transports.append(transport)
400
+
401
+ if alive:
402
+ connections[key] = alive
403
+
404
+ self._conns = connections
405
+
406
+ if self._conns:
407
+ self._cleanup_handle = helpers.weakref_handle(
408
+ self,
409
+ "_cleanup",
410
+ timeout,
411
+ self._loop,
412
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
413
+ )
414
+
415
+ def _cleanup_closed(self) -> None:
416
+ """Double confirmation for transport close.
417
+
418
+ Some broken ssl servers may leave socket open without proper close.
419
+ """
420
+ if self._cleanup_closed_handle:
421
+ self._cleanup_closed_handle.cancel()
422
+
423
+ for transport in self._cleanup_closed_transports:
424
+ if transport is not None:
425
+ transport.abort()
426
+
427
+ self._cleanup_closed_transports = []
428
+
429
+ if not self._cleanup_closed_disabled:
430
+ self._cleanup_closed_handle = helpers.weakref_handle(
431
+ self,
432
+ "_cleanup_closed",
433
+ self._cleanup_closed_period,
434
+ self._loop,
435
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
436
+ )
437
+
438
+ def close(self) -> Awaitable[None]:
439
+ """Close all opened transports."""
440
+ self._close()
441
+ return _DeprecationWaiter(noop())
442
+
443
+ def _close(self) -> None:
444
+ if self._closed:
445
+ return
446
+
447
+ self._closed = True
448
+
449
+ try:
450
+ if self._loop.is_closed():
451
+ return
452
+
453
+ # cancel cleanup task
454
+ if self._cleanup_handle:
455
+ self._cleanup_handle.cancel()
456
+
457
+ # cancel cleanup close task
458
+ if self._cleanup_closed_handle:
459
+ self._cleanup_closed_handle.cancel()
460
+
461
+ for data in self._conns.values():
462
+ for proto, t0 in data:
463
+ proto.close()
464
+
465
+ for proto in self._acquired:
466
+ proto.close()
467
+
468
+ for transport in self._cleanup_closed_transports:
469
+ if transport is not None:
470
+ transport.abort()
471
+
472
+ finally:
473
+ self._conns.clear()
474
+ self._acquired.clear()
475
+ for keyed_waiters in self._waiters.values():
476
+ for keyed_waiter in keyed_waiters:
477
+ keyed_waiter.cancel()
478
+ self._waiters.clear()
479
+ self._cleanup_handle = None
480
+ self._cleanup_closed_transports.clear()
481
+ self._cleanup_closed_handle = None
482
+
483
+ @property
484
+ def closed(self) -> bool:
485
+ """Is connector closed.
486
+
487
+ A readonly property.
488
+ """
489
+ return self._closed
490
+
491
+ def _available_connections(self, key: "ConnectionKey") -> int:
492
+ """
493
+ Return number of available connections.
494
+
495
+ The limit, limit_per_host and the connection key are taken into account.
496
+
497
+ If it returns less than 1 means that there are no connections
498
+ available.
499
+ """
500
+ # check total available connections
501
+ # If there are no limits, this will always return 1
502
+ total_remain = 1
503
+
504
+ if self._limit and (total_remain := self._limit - len(self._acquired)) <= 0:
505
+ return total_remain
506
+
507
+ # check limit per host
508
+ if host_remain := self._limit_per_host:
509
+ if acquired := self._acquired_per_host.get(key):
510
+ host_remain -= len(acquired)
511
+ if total_remain > host_remain:
512
+ return host_remain
513
+
514
+ return total_remain
515
+
516
+ async def connect(
517
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
518
+ ) -> Connection:
519
+ """Get from pool or create new connection."""
520
+ key = req.connection_key
521
+ if (conn := await self._get(key, traces)) is not None:
522
+ # If we do not have to wait and we can get a connection from the pool
523
+ # we can avoid the timeout ceil logic and directly return the connection
524
+ return conn
525
+
526
+ async with ceil_timeout(timeout.connect, timeout.ceil_threshold):
527
+ if self._available_connections(key) <= 0:
528
+ await self._wait_for_available_connection(key, traces)
529
+ if (conn := await self._get(key, traces)) is not None:
530
+ return conn
531
+
532
+ placeholder = cast(ResponseHandler, _TransportPlaceholder())
533
+ self._acquired.add(placeholder)
534
+ if self._limit_per_host:
535
+ self._acquired_per_host[key].add(placeholder)
536
+
537
+ try:
538
+ # Traces are done inside the try block to ensure that the
539
+ # that the placeholder is still cleaned up if an exception
540
+ # is raised.
541
+ if traces:
542
+ for trace in traces:
543
+ await trace.send_connection_create_start()
544
+ proto = await self._create_connection(req, traces, timeout)
545
+ if traces:
546
+ for trace in traces:
547
+ await trace.send_connection_create_end()
548
+ except BaseException:
549
+ self._release_acquired(key, placeholder)
550
+ raise
551
+ else:
552
+ if self._closed:
553
+ proto.close()
554
+ raise ClientConnectionError("Connector is closed.")
555
+
556
+ # The connection was successfully created, drop the placeholder
557
+ # and add the real connection to the acquired set. There should
558
+ # be no awaits after the proto is added to the acquired set
559
+ # to ensure that the connection is not left in the acquired set
560
+ # on cancellation.
561
+ self._acquired.remove(placeholder)
562
+ self._acquired.add(proto)
563
+ if self._limit_per_host:
564
+ acquired_per_host = self._acquired_per_host[key]
565
+ acquired_per_host.remove(placeholder)
566
+ acquired_per_host.add(proto)
567
+ return Connection(self, key, proto, self._loop)
568
+
569
+ async def _wait_for_available_connection(
570
+ self, key: "ConnectionKey", traces: List["Trace"]
571
+ ) -> None:
572
+ """Wait for an available connection slot."""
573
+ # We loop here because there is a race between
574
+ # the connection limit check and the connection
575
+ # being acquired. If the connection is acquired
576
+ # between the check and the await statement, we
577
+ # need to loop again to check if the connection
578
+ # slot is still available.
579
+ attempts = 0
580
+ while True:
581
+ fut: asyncio.Future[None] = self._loop.create_future()
582
+ keyed_waiters = self._waiters[key]
583
+ keyed_waiters[fut] = None
584
+ if attempts:
585
+ # If we have waited before, we need to move the waiter
586
+ # to the front of the queue as otherwise we might get
587
+ # starved and hit the timeout.
588
+ keyed_waiters.move_to_end(fut, last=False)
589
+
590
+ try:
591
+ # Traces happen in the try block to ensure that the
592
+ # the waiter is still cleaned up if an exception is raised.
593
+ if traces:
594
+ for trace in traces:
595
+ await trace.send_connection_queued_start()
596
+ await fut
597
+ if traces:
598
+ for trace in traces:
599
+ await trace.send_connection_queued_end()
600
+ finally:
601
+ # pop the waiter from the queue if its still
602
+ # there and not already removed by _release_waiter
603
+ keyed_waiters.pop(fut, None)
604
+ if not self._waiters.get(key, True):
605
+ del self._waiters[key]
606
+
607
+ if self._available_connections(key) > 0:
608
+ break
609
+ attempts += 1
610
+
611
+ async def _get(
612
+ self, key: "ConnectionKey", traces: List["Trace"]
613
+ ) -> Optional[Connection]:
614
+ """Get next reusable connection for the key or None.
615
+
616
+ The connection will be marked as acquired.
617
+ """
618
+ if (conns := self._conns.get(key)) is None:
619
+ return None
620
+
621
+ t1 = monotonic()
622
+ while conns:
623
+ proto, t0 = conns.popleft()
624
+ # We will we reuse the connection if its connected and
625
+ # the keepalive timeout has not been exceeded
626
+ if proto.is_connected() and t1 - t0 <= self._keepalive_timeout:
627
+ if not conns:
628
+ # The very last connection was reclaimed: drop the key
629
+ del self._conns[key]
630
+ self._acquired.add(proto)
631
+ if self._limit_per_host:
632
+ self._acquired_per_host[key].add(proto)
633
+ if traces:
634
+ for trace in traces:
635
+ try:
636
+ await trace.send_connection_reuseconn()
637
+ except BaseException:
638
+ self._release_acquired(key, proto)
639
+ raise
640
+ return Connection(self, key, proto, self._loop)
641
+
642
+ # Connection cannot be reused, close it
643
+ transport = proto.transport
644
+ proto.close()
645
+ # only for SSL transports
646
+ if not self._cleanup_closed_disabled and key.is_ssl:
647
+ self._cleanup_closed_transports.append(transport)
648
+
649
+ # No more connections: drop the key
650
+ del self._conns[key]
651
+ return None
652
+
653
+ def _release_waiter(self) -> None:
654
+ """
655
+ Iterates over all waiters until one to be released is found.
656
+
657
+ The one to be released is not finished and
658
+ belongs to a host that has available connections.
659
+ """
660
+ if not self._waiters:
661
+ return
662
+
663
+ # Having the dict keys ordered this avoids to iterate
664
+ # at the same order at each call.
665
+ queues = list(self._waiters)
666
+ random.shuffle(queues)
667
+
668
+ for key in queues:
669
+ if self._available_connections(key) < 1:
670
+ continue
671
+
672
+ waiters = self._waiters[key]
673
+ while waiters:
674
+ waiter, _ = waiters.popitem(last=False)
675
+ if not waiter.done():
676
+ waiter.set_result(None)
677
+ return
678
+
679
+ def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
680
+ """Release acquired connection."""
681
+ if self._closed:
682
+ # acquired connection is already released on connector closing
683
+ return
684
+
685
+ self._acquired.discard(proto)
686
+ if self._limit_per_host and (conns := self._acquired_per_host.get(key)):
687
+ conns.discard(proto)
688
+ if not conns:
689
+ del self._acquired_per_host[key]
690
+ self._release_waiter()
691
+
692
+ def _release(
693
+ self,
694
+ key: "ConnectionKey",
695
+ protocol: ResponseHandler,
696
+ *,
697
+ should_close: bool = False,
698
+ ) -> None:
699
+ if self._closed:
700
+ # acquired connection is already released on connector closing
701
+ return
702
+
703
+ self._release_acquired(key, protocol)
704
+
705
+ if self._force_close or should_close or protocol.should_close:
706
+ transport = protocol.transport
707
+ protocol.close()
708
+
709
+ if key.is_ssl and not self._cleanup_closed_disabled:
710
+ self._cleanup_closed_transports.append(transport)
711
+ return
712
+
713
+ self._conns[key].append((protocol, monotonic()))
714
+
715
+ if self._cleanup_handle is None:
716
+ self._cleanup_handle = helpers.weakref_handle(
717
+ self,
718
+ "_cleanup",
719
+ self._keepalive_timeout,
720
+ self._loop,
721
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
722
+ )
723
+
724
+ async def _create_connection(
725
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
726
+ ) -> ResponseHandler:
727
+ raise NotImplementedError()
728
+
729
+
730
+ class _DNSCacheTable:
731
+ def __init__(self, ttl: Optional[float] = None) -> None:
732
+ self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {}
733
+ self._timestamps: Dict[Tuple[str, int], float] = {}
734
+ self._ttl = ttl
735
+
736
+ def __contains__(self, host: object) -> bool:
737
+ return host in self._addrs_rr
738
+
739
+ def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None:
740
+ self._addrs_rr[key] = (cycle(addrs), len(addrs))
741
+
742
+ if self._ttl is not None:
743
+ self._timestamps[key] = monotonic()
744
+
745
+ def remove(self, key: Tuple[str, int]) -> None:
746
+ self._addrs_rr.pop(key, None)
747
+
748
+ if self._ttl is not None:
749
+ self._timestamps.pop(key, None)
750
+
751
+ def clear(self) -> None:
752
+ self._addrs_rr.clear()
753
+ self._timestamps.clear()
754
+
755
+ def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]:
756
+ loop, length = self._addrs_rr[key]
757
+ addrs = list(islice(loop, length))
758
+ # Consume one more element to shift internal state of `cycle`
759
+ next(loop)
760
+ return addrs
761
+
762
+ def expired(self, key: Tuple[str, int]) -> bool:
763
+ if self._ttl is None:
764
+ return False
765
+
766
+ return self._timestamps[key] + self._ttl < monotonic()
767
+
768
+
769
+ def _make_ssl_context(verified: bool) -> SSLContext:
770
+ """Create SSL context.
771
+
772
+ This method is not async-friendly and should be called from a thread
773
+ because it will load certificates from disk and do other blocking I/O.
774
+ """
775
+ if ssl is None:
776
+ # No ssl support
777
+ return None
778
+ if verified:
779
+ return ssl.create_default_context()
780
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
781
+ sslcontext.options |= ssl.OP_NO_SSLv2
782
+ sslcontext.options |= ssl.OP_NO_SSLv3
783
+ sslcontext.check_hostname = False
784
+ sslcontext.verify_mode = ssl.CERT_NONE
785
+ sslcontext.options |= ssl.OP_NO_COMPRESSION
786
+ sslcontext.set_default_verify_paths()
787
+ return sslcontext
788
+
789
+
790
+ # The default SSLContext objects are created at import time
791
+ # since they do blocking I/O to load certificates from disk,
792
+ # and imports should always be done before the event loop starts
793
+ # or in a thread.
794
+ _SSL_CONTEXT_VERIFIED = _make_ssl_context(True)
795
+ _SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False)
796
+
797
+
798
+ class TCPConnector(BaseConnector):
799
+ """TCP connector.
800
+
801
+ verify_ssl - Set to True to check ssl certifications.
802
+ fingerprint - Pass the binary sha256
803
+ digest of the expected certificate in DER format to verify
804
+ that the certificate the server presents matches. See also
805
+ https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning
806
+ resolver - Enable DNS lookups and use this
807
+ resolver
808
+ use_dns_cache - Use memory cache for DNS lookups.
809
+ ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
810
+ family - socket address family
811
+ local_addr - local tuple of (host, port) to bind socket to
812
+
813
+ keepalive_timeout - (optional) Keep-alive timeout.
814
+ force_close - Set to True to force close and do reconnect
815
+ after each request (and between redirects).
816
+ limit - The total number of simultaneous connections.
817
+ limit_per_host - Number of simultaneous connections to one host.
818
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
819
+ Disabled by default.
820
+ happy_eyeballs_delay - This is the “Connection Attempt Delay”
821
+ as defined in RFC 8305. To disable
822
+ the happy eyeballs algorithm, set to None.
823
+ interleave - “First Address Family Count” as defined in RFC 8305
824
+ loop - Optional event loop.
825
+ """
826
+
827
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"})
828
+
829
+ def __init__(
830
+ self,
831
+ *,
832
+ verify_ssl: bool = True,
833
+ fingerprint: Optional[bytes] = None,
834
+ use_dns_cache: bool = True,
835
+ ttl_dns_cache: Optional[int] = 10,
836
+ family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC,
837
+ ssl_context: Optional[SSLContext] = None,
838
+ ssl: Union[bool, Fingerprint, SSLContext] = True,
839
+ local_addr: Optional[Tuple[str, int]] = None,
840
+ resolver: Optional[AbstractResolver] = None,
841
+ keepalive_timeout: Union[None, float, object] = sentinel,
842
+ force_close: bool = False,
843
+ limit: int = 100,
844
+ limit_per_host: int = 0,
845
+ enable_cleanup_closed: bool = False,
846
+ loop: Optional[asyncio.AbstractEventLoop] = None,
847
+ timeout_ceil_threshold: float = 5,
848
+ happy_eyeballs_delay: Optional[float] = 0.25,
849
+ interleave: Optional[int] = None,
850
+ ):
851
+ super().__init__(
852
+ keepalive_timeout=keepalive_timeout,
853
+ force_close=force_close,
854
+ limit=limit,
855
+ limit_per_host=limit_per_host,
856
+ enable_cleanup_closed=enable_cleanup_closed,
857
+ loop=loop,
858
+ timeout_ceil_threshold=timeout_ceil_threshold,
859
+ )
860
+
861
+ self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
862
+ if resolver is None:
863
+ resolver = DefaultResolver(loop=self._loop)
864
+ self._resolver = resolver
865
+
866
+ self._use_dns_cache = use_dns_cache
867
+ self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
868
+ self._throttle_dns_futures: Dict[
869
+ Tuple[str, int], Set["asyncio.Future[None]"]
870
+ ] = {}
871
+ self._family = family
872
+ self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr)
873
+ self._happy_eyeballs_delay = happy_eyeballs_delay
874
+ self._interleave = interleave
875
+ self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set()
876
+
877
+ def close(self) -> Awaitable[None]:
878
+ """Close all ongoing DNS calls."""
879
+ for fut in chain.from_iterable(self._throttle_dns_futures.values()):
880
+ fut.cancel()
881
+
882
+ for t in self._resolve_host_tasks:
883
+ t.cancel()
884
+
885
+ return super().close()
886
+
887
+ @property
888
+ def family(self) -> int:
889
+ """Socket family like AF_INET."""
890
+ return self._family
891
+
892
+ @property
893
+ def use_dns_cache(self) -> bool:
894
+ """True if local DNS caching is enabled."""
895
+ return self._use_dns_cache
896
+
897
+ def clear_dns_cache(
898
+ self, host: Optional[str] = None, port: Optional[int] = None
899
+ ) -> None:
900
+ """Remove specified host/port or clear all dns local cache."""
901
+ if host is not None and port is not None:
902
+ self._cached_hosts.remove((host, port))
903
+ elif host is not None or port is not None:
904
+ raise ValueError("either both host and port or none of them are allowed")
905
+ else:
906
+ self._cached_hosts.clear()
907
+
908
+ async def _resolve_host(
909
+ self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None
910
+ ) -> List[ResolveResult]:
911
+ """Resolve host and return list of addresses."""
912
+ if is_ip_address(host):
913
+ return [
914
+ {
915
+ "hostname": host,
916
+ "host": host,
917
+ "port": port,
918
+ "family": self._family,
919
+ "proto": 0,
920
+ "flags": 0,
921
+ }
922
+ ]
923
+
924
+ if not self._use_dns_cache:
925
+
926
+ if traces:
927
+ for trace in traces:
928
+ await trace.send_dns_resolvehost_start(host)
929
+
930
+ res = await self._resolver.resolve(host, port, family=self._family)
931
+
932
+ if traces:
933
+ for trace in traces:
934
+ await trace.send_dns_resolvehost_end(host)
935
+
936
+ return res
937
+
938
+ key = (host, port)
939
+ if key in self._cached_hosts and not self._cached_hosts.expired(key):
940
+ # get result early, before any await (#4014)
941
+ result = self._cached_hosts.next_addrs(key)
942
+
943
+ if traces:
944
+ for trace in traces:
945
+ await trace.send_dns_cache_hit(host)
946
+ return result
947
+
948
+ futures: Set["asyncio.Future[None]"]
949
+ #
950
+ # If multiple connectors are resolving the same host, we wait
951
+ # for the first one to resolve and then use the result for all of them.
952
+ # We use a throttle to ensure that we only resolve the host once
953
+ # and then use the result for all the waiters.
954
+ #
955
+ if key in self._throttle_dns_futures:
956
+ # get futures early, before any await (#4014)
957
+ futures = self._throttle_dns_futures[key]
958
+ future: asyncio.Future[None] = self._loop.create_future()
959
+ futures.add(future)
960
+ if traces:
961
+ for trace in traces:
962
+ await trace.send_dns_cache_hit(host)
963
+ try:
964
+ await future
965
+ finally:
966
+ futures.discard(future)
967
+ return self._cached_hosts.next_addrs(key)
968
+
969
+ # update dict early, before any await (#4014)
970
+ self._throttle_dns_futures[key] = futures = set()
971
+ # In this case we need to create a task to ensure that we can shield
972
+ # the task from cancellation as cancelling this lookup should not cancel
973
+ # the underlying lookup or else the cancel event will get broadcast to
974
+ # all the waiters across all connections.
975
+ #
976
+ coro = self._resolve_host_with_throttle(key, host, port, futures, traces)
977
+ loop = asyncio.get_running_loop()
978
+ if sys.version_info >= (3, 12):
979
+ # Optimization for Python 3.12, try to send immediately
980
+ resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True)
981
+ else:
982
+ resolved_host_task = loop.create_task(coro)
983
+
984
+ if not resolved_host_task.done():
985
+ self._resolve_host_tasks.add(resolved_host_task)
986
+ resolved_host_task.add_done_callback(self._resolve_host_tasks.discard)
987
+
988
+ try:
989
+ return await asyncio.shield(resolved_host_task)
990
+ except asyncio.CancelledError:
991
+
992
+ def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None:
993
+ with suppress(Exception, asyncio.CancelledError):
994
+ fut.result()
995
+
996
+ resolved_host_task.add_done_callback(drop_exception)
997
+ raise
998
+
999
+ async def _resolve_host_with_throttle(
1000
+ self,
1001
+ key: Tuple[str, int],
1002
+ host: str,
1003
+ port: int,
1004
+ futures: Set["asyncio.Future[None]"],
1005
+ traces: Optional[Sequence["Trace"]],
1006
+ ) -> List[ResolveResult]:
1007
+ """Resolve host and set result for all waiters.
1008
+
1009
+ This method must be run in a task and shielded from cancellation
1010
+ to avoid cancelling the underlying lookup.
1011
+ """
1012
+ if traces:
1013
+ for trace in traces:
1014
+ await trace.send_dns_cache_miss(host)
1015
+ try:
1016
+ if traces:
1017
+ for trace in traces:
1018
+ await trace.send_dns_resolvehost_start(host)
1019
+
1020
+ addrs = await self._resolver.resolve(host, port, family=self._family)
1021
+ if traces:
1022
+ for trace in traces:
1023
+ await trace.send_dns_resolvehost_end(host)
1024
+
1025
+ self._cached_hosts.add(key, addrs)
1026
+ for fut in futures:
1027
+ set_result(fut, None)
1028
+ except BaseException as e:
1029
+ # any DNS exception is set for the waiters to raise the same exception.
1030
+ # This coro is always run in task that is shielded from cancellation so
1031
+ # we should never be propagating cancellation here.
1032
+ for fut in futures:
1033
+ set_exception(fut, e)
1034
+ raise
1035
+ finally:
1036
+ self._throttle_dns_futures.pop(key)
1037
+
1038
+ return self._cached_hosts.next_addrs(key)
1039
+
1040
+ async def _create_connection(
1041
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1042
+ ) -> ResponseHandler:
1043
+ """Create connection.
1044
+
1045
+ Has same keyword arguments as BaseEventLoop.create_connection.
1046
+ """
1047
+ if req.proxy:
1048
+ _, proto = await self._create_proxy_connection(req, traces, timeout)
1049
+ else:
1050
+ _, proto = await self._create_direct_connection(req, traces, timeout)
1051
+
1052
+ return proto
1053
+
1054
+ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]:
1055
+ """Logic to get the correct SSL context
1056
+
1057
+ 0. if req.ssl is false, return None
1058
+
1059
+ 1. if ssl_context is specified in req, use it
1060
+ 2. if _ssl_context is specified in self, use it
1061
+ 3. otherwise:
1062
+ 1. if verify_ssl is not specified in req, use self.ssl_context
1063
+ (will generate a default context according to self.verify_ssl)
1064
+ 2. if verify_ssl is True in req, generate a default SSL context
1065
+ 3. if verify_ssl is False in req, generate a SSL context that
1066
+ won't verify
1067
+ """
1068
+ if not req.is_ssl():
1069
+ return None
1070
+
1071
+ if ssl is None: # pragma: no cover
1072
+ raise RuntimeError("SSL is not supported.")
1073
+ sslcontext = req.ssl
1074
+ if isinstance(sslcontext, ssl.SSLContext):
1075
+ return sslcontext
1076
+ if sslcontext is not True:
1077
+ # not verified or fingerprinted
1078
+ return _SSL_CONTEXT_UNVERIFIED
1079
+ sslcontext = self._ssl
1080
+ if isinstance(sslcontext, ssl.SSLContext):
1081
+ return sslcontext
1082
+ if sslcontext is not True:
1083
+ # not verified or fingerprinted
1084
+ return _SSL_CONTEXT_UNVERIFIED
1085
+ return _SSL_CONTEXT_VERIFIED
1086
+
1087
+ def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]:
1088
+ ret = req.ssl
1089
+ if isinstance(ret, Fingerprint):
1090
+ return ret
1091
+ ret = self._ssl
1092
+ if isinstance(ret, Fingerprint):
1093
+ return ret
1094
+ return None
1095
+
1096
+ async def _wrap_create_connection(
1097
+ self,
1098
+ *args: Any,
1099
+ addr_infos: List[aiohappyeyeballs.AddrInfoType],
1100
+ req: ClientRequest,
1101
+ timeout: "ClientTimeout",
1102
+ client_error: Type[Exception] = ClientConnectorError,
1103
+ **kwargs: Any,
1104
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
1105
+ try:
1106
+ async with ceil_timeout(
1107
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1108
+ ):
1109
+ sock = await aiohappyeyeballs.start_connection(
1110
+ addr_infos=addr_infos,
1111
+ local_addr_infos=self._local_addr_infos,
1112
+ happy_eyeballs_delay=self._happy_eyeballs_delay,
1113
+ interleave=self._interleave,
1114
+ loop=self._loop,
1115
+ )
1116
+ return await self._loop.create_connection(*args, **kwargs, sock=sock)
1117
+ except cert_errors as exc:
1118
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
1119
+ except ssl_errors as exc:
1120
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
1121
+ except OSError as exc:
1122
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1123
+ raise
1124
+ raise client_error(req.connection_key, exc) from exc
1125
+
1126
+ async def _wrap_existing_connection(
1127
+ self,
1128
+ *args: Any,
1129
+ req: ClientRequest,
1130
+ timeout: "ClientTimeout",
1131
+ client_error: Type[Exception] = ClientConnectorError,
1132
+ **kwargs: Any,
1133
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
1134
+ try:
1135
+ async with ceil_timeout(
1136
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1137
+ ):
1138
+ return await self._loop.create_connection(*args, **kwargs)
1139
+ except cert_errors as exc:
1140
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
1141
+ except ssl_errors as exc:
1142
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
1143
+ except OSError as exc:
1144
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1145
+ raise
1146
+ raise client_error(req.connection_key, exc) from exc
1147
+
1148
+ def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
1149
+ """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
1150
+
1151
+ It is necessary for TLS-in-TLS so that it is possible to
1152
+ send HTTPS queries through HTTPS proxies.
1153
+
1154
+ This doesn't affect regular HTTP requests, though.
1155
+ """
1156
+ if not req.is_ssl():
1157
+ return
1158
+
1159
+ proxy_url = req.proxy
1160
+ assert proxy_url is not None
1161
+ if proxy_url.scheme != "https":
1162
+ return
1163
+
1164
+ self._check_loop_for_start_tls()
1165
+
1166
+ def _check_loop_for_start_tls(self) -> None:
1167
+ try:
1168
+ self._loop.start_tls
1169
+ except AttributeError as attr_exc:
1170
+ raise RuntimeError(
1171
+ "An HTTPS request is being sent through an HTTPS proxy. "
1172
+ "This needs support for TLS in TLS but it is not implemented "
1173
+ "in your runtime for the stdlib asyncio.\n\n"
1174
+ "Please upgrade to Python 3.11 or higher. For more details, "
1175
+ "please see:\n"
1176
+ "* https://bugs.python.org/issue37179\n"
1177
+ "* https://github.com/python/cpython/pull/28073\n"
1178
+ "* https://docs.aiohttp.org/en/stable/"
1179
+ "client_advanced.html#proxy-support\n"
1180
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
1181
+ ) from attr_exc
1182
+
1183
+ def _loop_supports_start_tls(self) -> bool:
1184
+ try:
1185
+ self._check_loop_for_start_tls()
1186
+ except RuntimeError:
1187
+ return False
1188
+ else:
1189
+ return True
1190
+
1191
+ def _warn_about_tls_in_tls(
1192
+ self,
1193
+ underlying_transport: asyncio.Transport,
1194
+ req: ClientRequest,
1195
+ ) -> None:
1196
+ """Issue a warning if the requested URL has HTTPS scheme."""
1197
+ if req.request_info.url.scheme != "https":
1198
+ return
1199
+
1200
+ asyncio_supports_tls_in_tls = getattr(
1201
+ underlying_transport,
1202
+ "_start_tls_compatible",
1203
+ False,
1204
+ )
1205
+
1206
+ if asyncio_supports_tls_in_tls:
1207
+ return
1208
+
1209
+ warnings.warn(
1210
+ "An HTTPS request is being sent through an HTTPS proxy. "
1211
+ "This support for TLS in TLS is known to be disabled "
1212
+ "in the stdlib asyncio (Python <3.11). This is why you'll probably see "
1213
+ "an error in the log below.\n\n"
1214
+ "It is possible to enable it via monkeypatching. "
1215
+ "For more details, see:\n"
1216
+ "* https://bugs.python.org/issue37179\n"
1217
+ "* https://github.com/python/cpython/pull/28073\n\n"
1218
+ "You can temporarily patch this as follows:\n"
1219
+ "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
1220
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
1221
+ RuntimeWarning,
1222
+ source=self,
1223
+ # Why `4`? At least 3 of the calls in the stack originate
1224
+ # from the methods in this class.
1225
+ stacklevel=3,
1226
+ )
1227
+
1228
+ async def _start_tls_connection(
1229
+ self,
1230
+ underlying_transport: asyncio.Transport,
1231
+ req: ClientRequest,
1232
+ timeout: "ClientTimeout",
1233
+ client_error: Type[Exception] = ClientConnectorError,
1234
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
1235
+ """Wrap the raw TCP transport with TLS."""
1236
+ tls_proto = self._factory() # Create a brand new proto for TLS
1237
+ sslcontext = self._get_ssl_context(req)
1238
+ if TYPE_CHECKING:
1239
+ # _start_tls_connection is unreachable in the current code path
1240
+ # if sslcontext is None.
1241
+ assert sslcontext is not None
1242
+
1243
+ try:
1244
+ async with ceil_timeout(
1245
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1246
+ ):
1247
+ try:
1248
+ tls_transport = await self._loop.start_tls(
1249
+ underlying_transport,
1250
+ tls_proto,
1251
+ sslcontext,
1252
+ server_hostname=req.server_hostname or req.host,
1253
+ ssl_handshake_timeout=timeout.total,
1254
+ )
1255
+ except BaseException:
1256
+ # We need to close the underlying transport since
1257
+ # `start_tls()` probably failed before it had a
1258
+ # chance to do this:
1259
+ underlying_transport.close()
1260
+ raise
1261
+ if isinstance(tls_transport, asyncio.Transport):
1262
+ fingerprint = self._get_fingerprint(req)
1263
+ if fingerprint:
1264
+ try:
1265
+ fingerprint.check(tls_transport)
1266
+ except ServerFingerprintMismatch:
1267
+ tls_transport.close()
1268
+ if not self._cleanup_closed_disabled:
1269
+ self._cleanup_closed_transports.append(tls_transport)
1270
+ raise
1271
+ except cert_errors as exc:
1272
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
1273
+ except ssl_errors as exc:
1274
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
1275
+ except OSError as exc:
1276
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1277
+ raise
1278
+ raise client_error(req.connection_key, exc) from exc
1279
+ except TypeError as type_err:
1280
+ # Example cause looks like this:
1281
+ # TypeError: transport <asyncio.sslproto._SSLProtocolTransport
1282
+ # object at 0x7f760615e460> is not supported by start_tls()
1283
+
1284
+ raise ClientConnectionError(
1285
+ "Cannot initialize a TLS-in-TLS connection to host "
1286
+ f"{req.host!s}:{req.port:d} through an underlying connection "
1287
+ f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
1288
+ f"[{type_err!s}]"
1289
+ ) from type_err
1290
+ else:
1291
+ if tls_transport is None:
1292
+ msg = "Failed to start TLS (possibly caused by closing transport)"
1293
+ raise client_error(req.connection_key, OSError(msg))
1294
+ tls_proto.connection_made(
1295
+ tls_transport
1296
+ ) # Kick the state machine of the new TLS protocol
1297
+
1298
+ return tls_transport, tls_proto
1299
+
1300
+ def _convert_hosts_to_addr_infos(
1301
+ self, hosts: List[ResolveResult]
1302
+ ) -> List[aiohappyeyeballs.AddrInfoType]:
1303
+ """Converts the list of hosts to a list of addr_infos.
1304
+
1305
+ The list of hosts is the result of a DNS lookup. The list of
1306
+ addr_infos is the result of a call to `socket.getaddrinfo()`.
1307
+ """
1308
+ addr_infos: List[aiohappyeyeballs.AddrInfoType] = []
1309
+ for hinfo in hosts:
1310
+ host = hinfo["host"]
1311
+ is_ipv6 = ":" in host
1312
+ family = socket.AF_INET6 if is_ipv6 else socket.AF_INET
1313
+ if self._family and self._family != family:
1314
+ continue
1315
+ addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"])
1316
+ addr_infos.append(
1317
+ (family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)
1318
+ )
1319
+ return addr_infos
1320
+
1321
+ async def _create_direct_connection(
1322
+ self,
1323
+ req: ClientRequest,
1324
+ traces: List["Trace"],
1325
+ timeout: "ClientTimeout",
1326
+ *,
1327
+ client_error: Type[Exception] = ClientConnectorError,
1328
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
1329
+ sslcontext = self._get_ssl_context(req)
1330
+ fingerprint = self._get_fingerprint(req)
1331
+
1332
+ host = req.url.raw_host
1333
+ assert host is not None
1334
+ # Replace multiple trailing dots with a single one.
1335
+ # A trailing dot is only present for fully-qualified domain names.
1336
+ # See https://github.com/aio-libs/aiohttp/pull/7364.
1337
+ if host.endswith(".."):
1338
+ host = host.rstrip(".") + "."
1339
+ port = req.port
1340
+ assert port is not None
1341
+ try:
1342
+ # Cancelling this lookup should not cancel the underlying lookup
1343
+ # or else the cancel event will get broadcast to all the waiters
1344
+ # across all connections.
1345
+ hosts = await self._resolve_host(host, port, traces=traces)
1346
+ except OSError as exc:
1347
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1348
+ raise
1349
+ # in case of proxy it is not ClientProxyConnectionError
1350
+ # it is problem of resolving proxy ip itself
1351
+ raise ClientConnectorDNSError(req.connection_key, exc) from exc
1352
+
1353
+ last_exc: Optional[Exception] = None
1354
+ addr_infos = self._convert_hosts_to_addr_infos(hosts)
1355
+ while addr_infos:
1356
+ # Strip trailing dots, certificates contain FQDN without dots.
1357
+ # See https://github.com/aio-libs/aiohttp/issues/3636
1358
+ server_hostname = (
1359
+ (req.server_hostname or host).rstrip(".") if sslcontext else None
1360
+ )
1361
+
1362
+ try:
1363
+ transp, proto = await self._wrap_create_connection(
1364
+ self._factory,
1365
+ timeout=timeout,
1366
+ ssl=sslcontext,
1367
+ addr_infos=addr_infos,
1368
+ server_hostname=server_hostname,
1369
+ req=req,
1370
+ client_error=client_error,
1371
+ )
1372
+ except (ClientConnectorError, asyncio.TimeoutError) as exc:
1373
+ last_exc = exc
1374
+ aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave)
1375
+ continue
1376
+
1377
+ if req.is_ssl() and fingerprint:
1378
+ try:
1379
+ fingerprint.check(transp)
1380
+ except ServerFingerprintMismatch as exc:
1381
+ transp.close()
1382
+ if not self._cleanup_closed_disabled:
1383
+ self._cleanup_closed_transports.append(transp)
1384
+ last_exc = exc
1385
+ # Remove the bad peer from the list of addr_infos
1386
+ sock: socket.socket = transp.get_extra_info("socket")
1387
+ bad_peer = sock.getpeername()
1388
+ aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer)
1389
+ continue
1390
+
1391
+ return transp, proto
1392
+ else:
1393
+ assert last_exc is not None
1394
+ raise last_exc
1395
+
1396
+ async def _create_proxy_connection(
1397
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1398
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
1399
+ self._fail_on_no_start_tls(req)
1400
+ runtime_has_start_tls = self._loop_supports_start_tls()
1401
+
1402
+ headers: Dict[str, str] = {}
1403
+ if req.proxy_headers is not None:
1404
+ headers = req.proxy_headers # type: ignore[assignment]
1405
+ headers[hdrs.HOST] = req.headers[hdrs.HOST]
1406
+
1407
+ url = req.proxy
1408
+ assert url is not None
1409
+ proxy_req = ClientRequest(
1410
+ hdrs.METH_GET,
1411
+ url,
1412
+ headers=headers,
1413
+ auth=req.proxy_auth,
1414
+ loop=self._loop,
1415
+ ssl=req.ssl,
1416
+ )
1417
+
1418
+ # create connection to proxy server
1419
+ transport, proto = await self._create_direct_connection(
1420
+ proxy_req, [], timeout, client_error=ClientProxyConnectionError
1421
+ )
1422
+
1423
+ auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
1424
+ if auth is not None:
1425
+ if not req.is_ssl():
1426
+ req.headers[hdrs.PROXY_AUTHORIZATION] = auth
1427
+ else:
1428
+ proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
1429
+
1430
+ if req.is_ssl():
1431
+ if runtime_has_start_tls:
1432
+ self._warn_about_tls_in_tls(transport, req)
1433
+
1434
+ # For HTTPS requests over HTTP proxy
1435
+ # we must notify proxy to tunnel connection
1436
+ # so we send CONNECT command:
1437
+ # CONNECT www.python.org:443 HTTP/1.1
1438
+ # Host: www.python.org
1439
+ #
1440
+ # next we must do TLS handshake and so on
1441
+ # to do this we must wrap raw socket into secure one
1442
+ # asyncio handles this perfectly
1443
+ proxy_req.method = hdrs.METH_CONNECT
1444
+ proxy_req.url = req.url
1445
+ key = req.connection_key._replace(
1446
+ proxy=None, proxy_auth=None, proxy_headers_hash=None
1447
+ )
1448
+ conn = Connection(self, key, proto, self._loop)
1449
+ proxy_resp = await proxy_req.send(conn)
1450
+ try:
1451
+ protocol = conn._protocol
1452
+ assert protocol is not None
1453
+
1454
+ # read_until_eof=True will ensure the connection isn't closed
1455
+ # once the response is received and processed allowing
1456
+ # START_TLS to work on the connection below.
1457
+ protocol.set_response_params(
1458
+ read_until_eof=runtime_has_start_tls,
1459
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
1460
+ )
1461
+ resp = await proxy_resp.start(conn)
1462
+ except BaseException:
1463
+ proxy_resp.close()
1464
+ conn.close()
1465
+ raise
1466
+ else:
1467
+ conn._protocol = None
1468
+ try:
1469
+ if resp.status != 200:
1470
+ message = resp.reason
1471
+ if message is None:
1472
+ message = HTTPStatus(resp.status).phrase
1473
+ raise ClientHttpProxyError(
1474
+ proxy_resp.request_info,
1475
+ resp.history,
1476
+ status=resp.status,
1477
+ message=message,
1478
+ headers=resp.headers,
1479
+ )
1480
+ if not runtime_has_start_tls:
1481
+ rawsock = transport.get_extra_info("socket", default=None)
1482
+ if rawsock is None:
1483
+ raise RuntimeError(
1484
+ "Transport does not expose socket instance"
1485
+ )
1486
+ # Duplicate the socket, so now we can close proxy transport
1487
+ rawsock = rawsock.dup()
1488
+ except BaseException:
1489
+ # It shouldn't be closed in `finally` because it's fed to
1490
+ # `loop.start_tls()` and the docs say not to touch it after
1491
+ # passing there.
1492
+ transport.close()
1493
+ raise
1494
+ finally:
1495
+ if not runtime_has_start_tls:
1496
+ transport.close()
1497
+
1498
+ if not runtime_has_start_tls:
1499
+ # HTTP proxy with support for upgrade to HTTPS
1500
+ sslcontext = self._get_ssl_context(req)
1501
+ return await self._wrap_existing_connection(
1502
+ self._factory,
1503
+ timeout=timeout,
1504
+ ssl=sslcontext,
1505
+ sock=rawsock,
1506
+ server_hostname=req.host,
1507
+ req=req,
1508
+ )
1509
+
1510
+ return await self._start_tls_connection(
1511
+ # Access the old transport for the last time before it's
1512
+ # closed and forgotten forever:
1513
+ transport,
1514
+ req=req,
1515
+ timeout=timeout,
1516
+ )
1517
+ finally:
1518
+ proxy_resp.close()
1519
+
1520
+ return transport, proto
1521
+
1522
+
1523
+ class UnixConnector(BaseConnector):
1524
+ """Unix socket connector.
1525
+
1526
+ path - Unix socket path.
1527
+ keepalive_timeout - (optional) Keep-alive timeout.
1528
+ force_close - Set to True to force close and do reconnect
1529
+ after each request (and between redirects).
1530
+ limit - The total number of simultaneous connections.
1531
+ limit_per_host - Number of simultaneous connections to one host.
1532
+ loop - Optional event loop.
1533
+ """
1534
+
1535
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"})
1536
+
1537
+ def __init__(
1538
+ self,
1539
+ path: str,
1540
+ force_close: bool = False,
1541
+ keepalive_timeout: Union[object, float, None] = sentinel,
1542
+ limit: int = 100,
1543
+ limit_per_host: int = 0,
1544
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1545
+ ) -> None:
1546
+ super().__init__(
1547
+ force_close=force_close,
1548
+ keepalive_timeout=keepalive_timeout,
1549
+ limit=limit,
1550
+ limit_per_host=limit_per_host,
1551
+ loop=loop,
1552
+ )
1553
+ self._path = path
1554
+
1555
+ @property
1556
+ def path(self) -> str:
1557
+ """Path to unix socket."""
1558
+ return self._path
1559
+
1560
+ async def _create_connection(
1561
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1562
+ ) -> ResponseHandler:
1563
+ try:
1564
+ async with ceil_timeout(
1565
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1566
+ ):
1567
+ _, proto = await self._loop.create_unix_connection(
1568
+ self._factory, self._path
1569
+ )
1570
+ except OSError as exc:
1571
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1572
+ raise
1573
+ raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
1574
+
1575
+ return proto
1576
+
1577
+
1578
+ class NamedPipeConnector(BaseConnector):
1579
+ """Named pipe connector.
1580
+
1581
+ Only supported by the proactor event loop.
1582
+ See also: https://docs.python.org/3/library/asyncio-eventloop.html
1583
+
1584
+ path - Windows named pipe path.
1585
+ keepalive_timeout - (optional) Keep-alive timeout.
1586
+ force_close - Set to True to force close and do reconnect
1587
+ after each request (and between redirects).
1588
+ limit - The total number of simultaneous connections.
1589
+ limit_per_host - Number of simultaneous connections to one host.
1590
+ loop - Optional event loop.
1591
+ """
1592
+
1593
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"})
1594
+
1595
+ def __init__(
1596
+ self,
1597
+ path: str,
1598
+ force_close: bool = False,
1599
+ keepalive_timeout: Union[object, float, None] = sentinel,
1600
+ limit: int = 100,
1601
+ limit_per_host: int = 0,
1602
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1603
+ ) -> None:
1604
+ super().__init__(
1605
+ force_close=force_close,
1606
+ keepalive_timeout=keepalive_timeout,
1607
+ limit=limit,
1608
+ limit_per_host=limit_per_host,
1609
+ loop=loop,
1610
+ )
1611
+ if not isinstance(
1612
+ self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
1613
+ ):
1614
+ raise RuntimeError(
1615
+ "Named Pipes only available in proactor loop under windows"
1616
+ )
1617
+ self._path = path
1618
+
1619
+ @property
1620
+ def path(self) -> str:
1621
+ """Path to the named pipe."""
1622
+ return self._path
1623
+
1624
+ async def _create_connection(
1625
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1626
+ ) -> ResponseHandler:
1627
+ try:
1628
+ async with ceil_timeout(
1629
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1630
+ ):
1631
+ _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
1632
+ self._factory, self._path
1633
+ )
1634
+ # the drain is required so that the connection_made is called
1635
+ # and transport is set otherwise it is not set before the
1636
+ # `assert conn.transport is not None`
1637
+ # in client.py's _request method
1638
+ await asyncio.sleep(0)
1639
+ # other option is to manually set transport like
1640
+ # `proto.transport = trans`
1641
+ except OSError as exc:
1642
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1643
+ raise
1644
+ raise ClientConnectorError(req.connection_key, exc) from exc
1645
+
1646
+ return cast(ResponseHandler, proto)
deepseek/lib/python3.10/site-packages/aiohttp/cookiejar.py ADDED
@@ -0,0 +1,487 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import calendar
3
+ import contextlib
4
+ import datetime
5
+ import heapq
6
+ import itertools
7
+ import os # noqa
8
+ import pathlib
9
+ import pickle
10
+ import re
11
+ import time
12
+ import warnings
13
+ from collections import defaultdict
14
+ from http.cookies import BaseCookie, Morsel, SimpleCookie
15
+ from typing import (
16
+ DefaultDict,
17
+ Dict,
18
+ Iterable,
19
+ Iterator,
20
+ List,
21
+ Mapping,
22
+ Optional,
23
+ Set,
24
+ Tuple,
25
+ Union,
26
+ cast,
27
+ )
28
+
29
+ from yarl import URL
30
+
31
+ from .abc import AbstractCookieJar, ClearCookiePredicate
32
+ from .helpers import is_ip_address
33
+ from .typedefs import LooseCookies, PathLike, StrOrURL
34
+
35
+ __all__ = ("CookieJar", "DummyCookieJar")
36
+
37
+
38
+ CookieItem = Union[str, "Morsel[str]"]
39
+
40
+ # We cache these string methods here as their use is in performance critical code.
41
+ _FORMAT_PATH = "{}/{}".format
42
+ _FORMAT_DOMAIN_REVERSED = "{1}.{0}".format
43
+
44
+ # The minimum number of scheduled cookie expirations before we start cleaning up
45
+ # the expiration heap. This is a performance optimization to avoid cleaning up the
46
+ # heap too often when there are only a few scheduled expirations.
47
+ _MIN_SCHEDULED_COOKIE_EXPIRATION = 100
48
+
49
+
50
+ class CookieJar(AbstractCookieJar):
51
+ """Implements cookie storage adhering to RFC 6265."""
52
+
53
+ DATE_TOKENS_RE = re.compile(
54
+ r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
55
+ r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
56
+ )
57
+
58
+ DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
59
+
60
+ DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
61
+
62
+ DATE_MONTH_RE = re.compile(
63
+ "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|(aug)|(sep)|(oct)|(nov)|(dec)",
64
+ re.I,
65
+ )
66
+
67
+ DATE_YEAR_RE = re.compile(r"(\d{2,4})")
68
+
69
+ # calendar.timegm() fails for timestamps after datetime.datetime.max
70
+ # Minus one as a loss of precision occurs when timestamp() is called.
71
+ MAX_TIME = (
72
+ int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
73
+ )
74
+ try:
75
+ calendar.timegm(time.gmtime(MAX_TIME))
76
+ except (OSError, ValueError):
77
+ # Hit the maximum representable time on Windows
78
+ # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
79
+ # Throws ValueError on PyPy 3.9, OSError elsewhere
80
+ MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
81
+ except OverflowError:
82
+ # #4515: datetime.max may not be representable on 32-bit platforms
83
+ MAX_TIME = 2**31 - 1
84
+ # Avoid minuses in the future, 3x faster
85
+ SUB_MAX_TIME = MAX_TIME - 1
86
+
87
+ def __init__(
88
+ self,
89
+ *,
90
+ unsafe: bool = False,
91
+ quote_cookie: bool = True,
92
+ treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
93
+ loop: Optional[asyncio.AbstractEventLoop] = None,
94
+ ) -> None:
95
+ super().__init__(loop=loop)
96
+ self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
97
+ SimpleCookie
98
+ )
99
+ self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = (
100
+ defaultdict(dict)
101
+ )
102
+ self._host_only_cookies: Set[Tuple[str, str]] = set()
103
+ self._unsafe = unsafe
104
+ self._quote_cookie = quote_cookie
105
+ if treat_as_secure_origin is None:
106
+ treat_as_secure_origin = []
107
+ elif isinstance(treat_as_secure_origin, URL):
108
+ treat_as_secure_origin = [treat_as_secure_origin.origin()]
109
+ elif isinstance(treat_as_secure_origin, str):
110
+ treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
111
+ else:
112
+ treat_as_secure_origin = [
113
+ URL(url).origin() if isinstance(url, str) else url.origin()
114
+ for url in treat_as_secure_origin
115
+ ]
116
+ self._treat_as_secure_origin = treat_as_secure_origin
117
+ self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = []
118
+ self._expirations: Dict[Tuple[str, str, str], float] = {}
119
+
120
+ def save(self, file_path: PathLike) -> None:
121
+ file_path = pathlib.Path(file_path)
122
+ with file_path.open(mode="wb") as f:
123
+ pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
124
+
125
+ def load(self, file_path: PathLike) -> None:
126
+ file_path = pathlib.Path(file_path)
127
+ with file_path.open(mode="rb") as f:
128
+ self._cookies = pickle.load(f)
129
+
130
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
131
+ if predicate is None:
132
+ self._expire_heap.clear()
133
+ self._cookies.clear()
134
+ self._morsel_cache.clear()
135
+ self._host_only_cookies.clear()
136
+ self._expirations.clear()
137
+ return
138
+
139
+ now = time.time()
140
+ to_del = [
141
+ key
142
+ for (domain, path), cookie in self._cookies.items()
143
+ for name, morsel in cookie.items()
144
+ if (
145
+ (key := (domain, path, name)) in self._expirations
146
+ and self._expirations[key] <= now
147
+ )
148
+ or predicate(morsel)
149
+ ]
150
+ if to_del:
151
+ self._delete_cookies(to_del)
152
+
153
+ def clear_domain(self, domain: str) -> None:
154
+ self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
155
+
156
+ def __iter__(self) -> "Iterator[Morsel[str]]":
157
+ self._do_expiration()
158
+ for val in self._cookies.values():
159
+ yield from val.values()
160
+
161
+ def __len__(self) -> int:
162
+ """Return number of cookies.
163
+
164
+ This function does not iterate self to avoid unnecessary expiration
165
+ checks.
166
+ """
167
+ return sum(len(cookie.values()) for cookie in self._cookies.values())
168
+
169
+ def _do_expiration(self) -> None:
170
+ """Remove expired cookies."""
171
+ if not (expire_heap_len := len(self._expire_heap)):
172
+ return
173
+
174
+ # If the expiration heap grows larger than the number expirations
175
+ # times two, we clean it up to avoid keeping expired entries in
176
+ # the heap and consuming memory. We guard this with a minimum
177
+ # threshold to avoid cleaning up the heap too often when there are
178
+ # only a few scheduled expirations.
179
+ if (
180
+ expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION
181
+ and expire_heap_len > len(self._expirations) * 2
182
+ ):
183
+ # Remove any expired entries from the expiration heap
184
+ # that do not match the expiration time in the expirations
185
+ # as it means the cookie has been re-added to the heap
186
+ # with a different expiration time.
187
+ self._expire_heap = [
188
+ entry
189
+ for entry in self._expire_heap
190
+ if self._expirations.get(entry[1]) == entry[0]
191
+ ]
192
+ heapq.heapify(self._expire_heap)
193
+
194
+ now = time.time()
195
+ to_del: List[Tuple[str, str, str]] = []
196
+ # Find any expired cookies and add them to the to-delete list
197
+ while self._expire_heap:
198
+ when, cookie_key = self._expire_heap[0]
199
+ if when > now:
200
+ break
201
+ heapq.heappop(self._expire_heap)
202
+ # Check if the cookie hasn't been re-added to the heap
203
+ # with a different expiration time as it will be removed
204
+ # later when it reaches the top of the heap and its
205
+ # expiration time is met.
206
+ if self._expirations.get(cookie_key) == when:
207
+ to_del.append(cookie_key)
208
+
209
+ if to_del:
210
+ self._delete_cookies(to_del)
211
+
212
+ def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None:
213
+ for domain, path, name in to_del:
214
+ self._host_only_cookies.discard((domain, name))
215
+ self._cookies[(domain, path)].pop(name, None)
216
+ self._morsel_cache[(domain, path)].pop(name, None)
217
+ self._expirations.pop((domain, path, name), None)
218
+
219
+ def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
220
+ cookie_key = (domain, path, name)
221
+ if self._expirations.get(cookie_key) == when:
222
+ # Avoid adding duplicates to the heap
223
+ return
224
+ heapq.heappush(self._expire_heap, (when, cookie_key))
225
+ self._expirations[cookie_key] = when
226
+
227
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
228
+ """Update cookies."""
229
+ hostname = response_url.raw_host
230
+
231
+ if not self._unsafe and is_ip_address(hostname):
232
+ # Don't accept cookies from IPs
233
+ return
234
+
235
+ if isinstance(cookies, Mapping):
236
+ cookies = cookies.items()
237
+
238
+ for name, cookie in cookies:
239
+ if not isinstance(cookie, Morsel):
240
+ tmp = SimpleCookie()
241
+ tmp[name] = cookie # type: ignore[assignment]
242
+ cookie = tmp[name]
243
+
244
+ domain = cookie["domain"]
245
+
246
+ # ignore domains with trailing dots
247
+ if domain and domain[-1] == ".":
248
+ domain = ""
249
+ del cookie["domain"]
250
+
251
+ if not domain and hostname is not None:
252
+ # Set the cookie's domain to the response hostname
253
+ # and set its host-only-flag
254
+ self._host_only_cookies.add((hostname, name))
255
+ domain = cookie["domain"] = hostname
256
+
257
+ if domain and domain[0] == ".":
258
+ # Remove leading dot
259
+ domain = domain[1:]
260
+ cookie["domain"] = domain
261
+
262
+ if hostname and not self._is_domain_match(domain, hostname):
263
+ # Setting cookies for different domains is not allowed
264
+ continue
265
+
266
+ path = cookie["path"]
267
+ if not path or path[0] != "/":
268
+ # Set the cookie's path to the response path
269
+ path = response_url.path
270
+ if not path.startswith("/"):
271
+ path = "/"
272
+ else:
273
+ # Cut everything from the last slash to the end
274
+ path = "/" + path[1 : path.rfind("/")]
275
+ cookie["path"] = path
276
+ path = path.rstrip("/")
277
+
278
+ if max_age := cookie["max-age"]:
279
+ try:
280
+ delta_seconds = int(max_age)
281
+ max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
282
+ self._expire_cookie(max_age_expiration, domain, path, name)
283
+ except ValueError:
284
+ cookie["max-age"] = ""
285
+
286
+ elif expires := cookie["expires"]:
287
+ if expire_time := self._parse_date(expires):
288
+ self._expire_cookie(expire_time, domain, path, name)
289
+ else:
290
+ cookie["expires"] = ""
291
+
292
+ key = (domain, path)
293
+ if self._cookies[key].get(name) != cookie:
294
+ # Don't blow away the cache if the same
295
+ # cookie gets set again
296
+ self._cookies[key][name] = cookie
297
+ self._morsel_cache[key].pop(name, None)
298
+
299
+ self._do_expiration()
300
+
301
+ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
302
+ """Returns this jar's cookies filtered by their attributes."""
303
+ filtered: Union[SimpleCookie, "BaseCookie[str]"] = (
304
+ SimpleCookie() if self._quote_cookie else BaseCookie()
305
+ )
306
+ if not self._cookies:
307
+ # Skip do_expiration() if there are no cookies.
308
+ return filtered
309
+ self._do_expiration()
310
+ if not self._cookies:
311
+ # Skip rest of function if no non-expired cookies.
312
+ return filtered
313
+ if type(request_url) is not URL:
314
+ warnings.warn(
315
+ "filter_cookies expects yarl.URL instances only,"
316
+ f"and will stop working in 4.x, got {type(request_url)}",
317
+ DeprecationWarning,
318
+ stacklevel=2,
319
+ )
320
+ request_url = URL(request_url)
321
+ hostname = request_url.raw_host or ""
322
+
323
+ is_not_secure = request_url.scheme not in ("https", "wss")
324
+ if is_not_secure and self._treat_as_secure_origin:
325
+ request_origin = URL()
326
+ with contextlib.suppress(ValueError):
327
+ request_origin = request_url.origin()
328
+ is_not_secure = request_origin not in self._treat_as_secure_origin
329
+
330
+ # Send shared cookie
331
+ for c in self._cookies[("", "")].values():
332
+ filtered[c.key] = c.value
333
+
334
+ if is_ip_address(hostname):
335
+ if not self._unsafe:
336
+ return filtered
337
+ domains: Iterable[str] = (hostname,)
338
+ else:
339
+ # Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com")
340
+ domains = itertools.accumulate(
341
+ reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED
342
+ )
343
+
344
+ # Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar")
345
+ paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH)
346
+ # Create every combination of (domain, path) pairs.
347
+ pairs = itertools.product(domains, paths)
348
+
349
+ path_len = len(request_url.path)
350
+ # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
351
+ for p in pairs:
352
+ for name, cookie in self._cookies[p].items():
353
+ domain = cookie["domain"]
354
+
355
+ if (domain, name) in self._host_only_cookies and domain != hostname:
356
+ continue
357
+
358
+ # Skip edge case when the cookie has a trailing slash but request doesn't.
359
+ if len(cookie["path"]) > path_len:
360
+ continue
361
+
362
+ if is_not_secure and cookie["secure"]:
363
+ continue
364
+
365
+ # We already built the Morsel so reuse it here
366
+ if name in self._morsel_cache[p]:
367
+ filtered[name] = self._morsel_cache[p][name]
368
+ continue
369
+
370
+ # It's critical we use the Morsel so the coded_value
371
+ # (based on cookie version) is preserved
372
+ mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
373
+ mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
374
+ self._morsel_cache[p][name] = mrsl_val
375
+ filtered[name] = mrsl_val
376
+
377
+ return filtered
378
+
379
+ @staticmethod
380
+ def _is_domain_match(domain: str, hostname: str) -> bool:
381
+ """Implements domain matching adhering to RFC 6265."""
382
+ if hostname == domain:
383
+ return True
384
+
385
+ if not hostname.endswith(domain):
386
+ return False
387
+
388
+ non_matching = hostname[: -len(domain)]
389
+
390
+ if not non_matching.endswith("."):
391
+ return False
392
+
393
+ return not is_ip_address(hostname)
394
+
395
+ @classmethod
396
+ def _parse_date(cls, date_str: str) -> Optional[int]:
397
+ """Implements date string parsing adhering to RFC 6265."""
398
+ if not date_str:
399
+ return None
400
+
401
+ found_time = False
402
+ found_day = False
403
+ found_month = False
404
+ found_year = False
405
+
406
+ hour = minute = second = 0
407
+ day = 0
408
+ month = 0
409
+ year = 0
410
+
411
+ for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
412
+
413
+ token = token_match.group("token")
414
+
415
+ if not found_time:
416
+ time_match = cls.DATE_HMS_TIME_RE.match(token)
417
+ if time_match:
418
+ found_time = True
419
+ hour, minute, second = (int(s) for s in time_match.groups())
420
+ continue
421
+
422
+ if not found_day:
423
+ day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
424
+ if day_match:
425
+ found_day = True
426
+ day = int(day_match.group())
427
+ continue
428
+
429
+ if not found_month:
430
+ month_match = cls.DATE_MONTH_RE.match(token)
431
+ if month_match:
432
+ found_month = True
433
+ assert month_match.lastindex is not None
434
+ month = month_match.lastindex
435
+ continue
436
+
437
+ if not found_year:
438
+ year_match = cls.DATE_YEAR_RE.match(token)
439
+ if year_match:
440
+ found_year = True
441
+ year = int(year_match.group())
442
+
443
+ if 70 <= year <= 99:
444
+ year += 1900
445
+ elif 0 <= year <= 69:
446
+ year += 2000
447
+
448
+ if False in (found_day, found_month, found_year, found_time):
449
+ return None
450
+
451
+ if not 1 <= day <= 31:
452
+ return None
453
+
454
+ if year < 1601 or hour > 23 or minute > 59 or second > 59:
455
+ return None
456
+
457
+ return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
458
+
459
+
460
+ class DummyCookieJar(AbstractCookieJar):
461
+ """Implements a dummy cookie storage.
462
+
463
+ It can be used with the ClientSession when no cookie processing is needed.
464
+
465
+ """
466
+
467
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
468
+ super().__init__(loop=loop)
469
+
470
+ def __iter__(self) -> "Iterator[Morsel[str]]":
471
+ while False:
472
+ yield None
473
+
474
+ def __len__(self) -> int:
475
+ return 0
476
+
477
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
478
+ pass
479
+
480
+ def clear_domain(self, domain: str) -> None:
481
+ pass
482
+
483
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
484
+ pass
485
+
486
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
487
+ return SimpleCookie()
deepseek/lib/python3.10/site-packages/aiohttp/formdata.py ADDED
@@ -0,0 +1,182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import io
2
+ import warnings
3
+ from typing import Any, Iterable, List, Optional
4
+ from urllib.parse import urlencode
5
+
6
+ from multidict import MultiDict, MultiDictProxy
7
+
8
+ from . import hdrs, multipart, payload
9
+ from .helpers import guess_filename
10
+ from .payload import Payload
11
+
12
+ __all__ = ("FormData",)
13
+
14
+
15
+ class FormData:
16
+ """Helper class for form body generation.
17
+
18
+ Supports multipart/form-data and application/x-www-form-urlencoded.
19
+ """
20
+
21
+ def __init__(
22
+ self,
23
+ fields: Iterable[Any] = (),
24
+ quote_fields: bool = True,
25
+ charset: Optional[str] = None,
26
+ *,
27
+ default_to_multipart: bool = False,
28
+ ) -> None:
29
+ self._writer = multipart.MultipartWriter("form-data")
30
+ self._fields: List[Any] = []
31
+ self._is_multipart = default_to_multipart
32
+ self._is_processed = False
33
+ self._quote_fields = quote_fields
34
+ self._charset = charset
35
+
36
+ if isinstance(fields, dict):
37
+ fields = list(fields.items())
38
+ elif not isinstance(fields, (list, tuple)):
39
+ fields = (fields,)
40
+ self.add_fields(*fields)
41
+
42
+ @property
43
+ def is_multipart(self) -> bool:
44
+ return self._is_multipart
45
+
46
+ def add_field(
47
+ self,
48
+ name: str,
49
+ value: Any,
50
+ *,
51
+ content_type: Optional[str] = None,
52
+ filename: Optional[str] = None,
53
+ content_transfer_encoding: Optional[str] = None,
54
+ ) -> None:
55
+
56
+ if isinstance(value, io.IOBase):
57
+ self._is_multipart = True
58
+ elif isinstance(value, (bytes, bytearray, memoryview)):
59
+ msg = (
60
+ "In v4, passing bytes will no longer create a file field. "
61
+ "Please explicitly use the filename parameter or pass a BytesIO object."
62
+ )
63
+ if filename is None and content_transfer_encoding is None:
64
+ warnings.warn(msg, DeprecationWarning)
65
+ filename = name
66
+
67
+ type_options: MultiDict[str] = MultiDict({"name": name})
68
+ if filename is not None and not isinstance(filename, str):
69
+ raise TypeError("filename must be an instance of str. Got: %s" % filename)
70
+ if filename is None and isinstance(value, io.IOBase):
71
+ filename = guess_filename(value, name)
72
+ if filename is not None:
73
+ type_options["filename"] = filename
74
+ self._is_multipart = True
75
+
76
+ headers = {}
77
+ if content_type is not None:
78
+ if not isinstance(content_type, str):
79
+ raise TypeError(
80
+ "content_type must be an instance of str. Got: %s" % content_type
81
+ )
82
+ headers[hdrs.CONTENT_TYPE] = content_type
83
+ self._is_multipart = True
84
+ if content_transfer_encoding is not None:
85
+ if not isinstance(content_transfer_encoding, str):
86
+ raise TypeError(
87
+ "content_transfer_encoding must be an instance"
88
+ " of str. Got: %s" % content_transfer_encoding
89
+ )
90
+ msg = (
91
+ "content_transfer_encoding is deprecated. "
92
+ "To maintain compatibility with v4 please pass a BytesPayload."
93
+ )
94
+ warnings.warn(msg, DeprecationWarning)
95
+ self._is_multipart = True
96
+
97
+ self._fields.append((type_options, headers, value))
98
+
99
+ def add_fields(self, *fields: Any) -> None:
100
+ to_add = list(fields)
101
+
102
+ while to_add:
103
+ rec = to_add.pop(0)
104
+
105
+ if isinstance(rec, io.IOBase):
106
+ k = guess_filename(rec, "unknown")
107
+ self.add_field(k, rec) # type: ignore[arg-type]
108
+
109
+ elif isinstance(rec, (MultiDictProxy, MultiDict)):
110
+ to_add.extend(rec.items())
111
+
112
+ elif isinstance(rec, (list, tuple)) and len(rec) == 2:
113
+ k, fp = rec
114
+ self.add_field(k, fp) # type: ignore[arg-type]
115
+
116
+ else:
117
+ raise TypeError(
118
+ "Only io.IOBase, multidict and (name, file) "
119
+ "pairs allowed, use .add_field() for passing "
120
+ "more complex parameters, got {!r}".format(rec)
121
+ )
122
+
123
+ def _gen_form_urlencoded(self) -> payload.BytesPayload:
124
+ # form data (x-www-form-urlencoded)
125
+ data = []
126
+ for type_options, _, value in self._fields:
127
+ data.append((type_options["name"], value))
128
+
129
+ charset = self._charset if self._charset is not None else "utf-8"
130
+
131
+ if charset == "utf-8":
132
+ content_type = "application/x-www-form-urlencoded"
133
+ else:
134
+ content_type = "application/x-www-form-urlencoded; charset=%s" % charset
135
+
136
+ return payload.BytesPayload(
137
+ urlencode(data, doseq=True, encoding=charset).encode(),
138
+ content_type=content_type,
139
+ )
140
+
141
+ def _gen_form_data(self) -> multipart.MultipartWriter:
142
+ """Encode a list of fields using the multipart/form-data MIME format"""
143
+ if self._is_processed:
144
+ raise RuntimeError("Form data has been processed already")
145
+ for dispparams, headers, value in self._fields:
146
+ try:
147
+ if hdrs.CONTENT_TYPE in headers:
148
+ part = payload.get_payload(
149
+ value,
150
+ content_type=headers[hdrs.CONTENT_TYPE],
151
+ headers=headers,
152
+ encoding=self._charset,
153
+ )
154
+ else:
155
+ part = payload.get_payload(
156
+ value, headers=headers, encoding=self._charset
157
+ )
158
+ except Exception as exc:
159
+ raise TypeError(
160
+ "Can not serialize value type: %r\n "
161
+ "headers: %r\n value: %r" % (type(value), headers, value)
162
+ ) from exc
163
+
164
+ if dispparams:
165
+ part.set_content_disposition(
166
+ "form-data", quote_fields=self._quote_fields, **dispparams
167
+ )
168
+ # FIXME cgi.FieldStorage doesn't likes body parts with
169
+ # Content-Length which were sent via chunked transfer encoding
170
+ assert part.headers is not None
171
+ part.headers.popall(hdrs.CONTENT_LENGTH, None)
172
+
173
+ self._writer.append_payload(part)
174
+
175
+ self._is_processed = True
176
+ return self._writer
177
+
178
+ def __call__(self) -> Payload:
179
+ if self._is_multipart:
180
+ return self._gen_form_data()
181
+ else:
182
+ return self._gen_form_urlencoded()
deepseek/lib/python3.10/site-packages/aiohttp/helpers.py ADDED
@@ -0,0 +1,944 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Various helper functions"""
2
+
3
+ import asyncio
4
+ import base64
5
+ import binascii
6
+ import contextlib
7
+ import datetime
8
+ import enum
9
+ import functools
10
+ import inspect
11
+ import netrc
12
+ import os
13
+ import platform
14
+ import re
15
+ import sys
16
+ import time
17
+ import weakref
18
+ from collections import namedtuple
19
+ from contextlib import suppress
20
+ from email.parser import HeaderParser
21
+ from email.utils import parsedate
22
+ from math import ceil
23
+ from pathlib import Path
24
+ from types import TracebackType
25
+ from typing import (
26
+ Any,
27
+ Callable,
28
+ ContextManager,
29
+ Dict,
30
+ Generator,
31
+ Generic,
32
+ Iterable,
33
+ Iterator,
34
+ List,
35
+ Mapping,
36
+ Optional,
37
+ Protocol,
38
+ Tuple,
39
+ Type,
40
+ TypeVar,
41
+ Union,
42
+ get_args,
43
+ overload,
44
+ )
45
+ from urllib.parse import quote
46
+ from urllib.request import getproxies, proxy_bypass
47
+
48
+ import attr
49
+ from multidict import MultiDict, MultiDictProxy, MultiMapping
50
+ from propcache.api import under_cached_property as reify
51
+ from yarl import URL
52
+
53
+ from . import hdrs
54
+ from .log import client_logger
55
+
56
+ if sys.version_info >= (3, 11):
57
+ import asyncio as async_timeout
58
+ else:
59
+ import async_timeout
60
+
61
+ __all__ = ("BasicAuth", "ChainMapProxy", "ETag", "reify")
62
+
63
+ IS_MACOS = platform.system() == "Darwin"
64
+ IS_WINDOWS = platform.system() == "Windows"
65
+
66
+ PY_310 = sys.version_info >= (3, 10)
67
+ PY_311 = sys.version_info >= (3, 11)
68
+
69
+
70
+ _T = TypeVar("_T")
71
+ _S = TypeVar("_S")
72
+
73
+ _SENTINEL = enum.Enum("_SENTINEL", "sentinel")
74
+ sentinel = _SENTINEL.sentinel
75
+
76
+ NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
77
+
78
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
79
+ EMPTY_BODY_STATUS_CODES = frozenset((204, 304, *range(100, 200)))
80
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
81
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
82
+ EMPTY_BODY_METHODS = hdrs.METH_HEAD_ALL
83
+
84
+ DEBUG = sys.flags.dev_mode or (
85
+ not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
86
+ )
87
+
88
+
89
+ CHAR = {chr(i) for i in range(0, 128)}
90
+ CTL = {chr(i) for i in range(0, 32)} | {
91
+ chr(127),
92
+ }
93
+ SEPARATORS = {
94
+ "(",
95
+ ")",
96
+ "<",
97
+ ">",
98
+ "@",
99
+ ",",
100
+ ";",
101
+ ":",
102
+ "\\",
103
+ '"',
104
+ "/",
105
+ "[",
106
+ "]",
107
+ "?",
108
+ "=",
109
+ "{",
110
+ "}",
111
+ " ",
112
+ chr(9),
113
+ }
114
+ TOKEN = CHAR ^ CTL ^ SEPARATORS
115
+
116
+
117
+ class noop:
118
+ def __await__(self) -> Generator[None, None, None]:
119
+ yield
120
+
121
+
122
+ class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
123
+ """Http basic authentication helper."""
124
+
125
+ def __new__(
126
+ cls, login: str, password: str = "", encoding: str = "latin1"
127
+ ) -> "BasicAuth":
128
+ if login is None:
129
+ raise ValueError("None is not allowed as login value")
130
+
131
+ if password is None:
132
+ raise ValueError("None is not allowed as password value")
133
+
134
+ if ":" in login:
135
+ raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
136
+
137
+ return super().__new__(cls, login, password, encoding)
138
+
139
+ @classmethod
140
+ def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
141
+ """Create a BasicAuth object from an Authorization HTTP header."""
142
+ try:
143
+ auth_type, encoded_credentials = auth_header.split(" ", 1)
144
+ except ValueError:
145
+ raise ValueError("Could not parse authorization header.")
146
+
147
+ if auth_type.lower() != "basic":
148
+ raise ValueError("Unknown authorization method %s" % auth_type)
149
+
150
+ try:
151
+ decoded = base64.b64decode(
152
+ encoded_credentials.encode("ascii"), validate=True
153
+ ).decode(encoding)
154
+ except binascii.Error:
155
+ raise ValueError("Invalid base64 encoding.")
156
+
157
+ try:
158
+ # RFC 2617 HTTP Authentication
159
+ # https://www.ietf.org/rfc/rfc2617.txt
160
+ # the colon must be present, but the username and password may be
161
+ # otherwise blank.
162
+ username, password = decoded.split(":", 1)
163
+ except ValueError:
164
+ raise ValueError("Invalid credentials.")
165
+
166
+ return cls(username, password, encoding=encoding)
167
+
168
+ @classmethod
169
+ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
170
+ """Create BasicAuth from url."""
171
+ if not isinstance(url, URL):
172
+ raise TypeError("url should be yarl.URL instance")
173
+ # Check raw_user and raw_password first as yarl is likely
174
+ # to already have these values parsed from the netloc in the cache.
175
+ if url.raw_user is None and url.raw_password is None:
176
+ return None
177
+ return cls(url.user or "", url.password or "", encoding=encoding)
178
+
179
+ def encode(self) -> str:
180
+ """Encode credentials."""
181
+ creds = (f"{self.login}:{self.password}").encode(self.encoding)
182
+ return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
183
+
184
+
185
+ def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
186
+ """Remove user and password from URL if present and return BasicAuth object."""
187
+ # Check raw_user and raw_password first as yarl is likely
188
+ # to already have these values parsed from the netloc in the cache.
189
+ if url.raw_user is None and url.raw_password is None:
190
+ return url, None
191
+ return url.with_user(None), BasicAuth(url.user or "", url.password or "")
192
+
193
+
194
+ def netrc_from_env() -> Optional[netrc.netrc]:
195
+ """Load netrc from file.
196
+
197
+ Attempt to load it from the path specified by the env-var
198
+ NETRC or in the default location in the user's home directory.
199
+
200
+ Returns None if it couldn't be found or fails to parse.
201
+ """
202
+ netrc_env = os.environ.get("NETRC")
203
+
204
+ if netrc_env is not None:
205
+ netrc_path = Path(netrc_env)
206
+ else:
207
+ try:
208
+ home_dir = Path.home()
209
+ except RuntimeError as e: # pragma: no cover
210
+ # if pathlib can't resolve home, it may raise a RuntimeError
211
+ client_logger.debug(
212
+ "Could not resolve home directory when "
213
+ "trying to look for .netrc file: %s",
214
+ e,
215
+ )
216
+ return None
217
+
218
+ netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
219
+
220
+ try:
221
+ return netrc.netrc(str(netrc_path))
222
+ except netrc.NetrcParseError as e:
223
+ client_logger.warning("Could not parse .netrc file: %s", e)
224
+ except OSError as e:
225
+ netrc_exists = False
226
+ with contextlib.suppress(OSError):
227
+ netrc_exists = netrc_path.is_file()
228
+ # we couldn't read the file (doesn't exist, permissions, etc.)
229
+ if netrc_env or netrc_exists:
230
+ # only warn if the environment wanted us to load it,
231
+ # or it appears like the default file does actually exist
232
+ client_logger.warning("Could not read .netrc file: %s", e)
233
+
234
+ return None
235
+
236
+
237
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
238
+ class ProxyInfo:
239
+ proxy: URL
240
+ proxy_auth: Optional[BasicAuth]
241
+
242
+
243
+ def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
244
+ """
245
+ Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
246
+
247
+ :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
248
+ entry is found for the ``host``.
249
+ """
250
+ if netrc_obj is None:
251
+ raise LookupError("No .netrc file found")
252
+ auth_from_netrc = netrc_obj.authenticators(host)
253
+
254
+ if auth_from_netrc is None:
255
+ raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
256
+ login, account, password = auth_from_netrc
257
+
258
+ # TODO(PY311): username = login or account
259
+ # Up to python 3.10, account could be None if not specified,
260
+ # and login will be empty string if not specified. From 3.11,
261
+ # login and account will be empty string if not specified.
262
+ username = login if (login or account is None) else account
263
+
264
+ # TODO(PY311): Remove this, as password will be empty string
265
+ # if not specified
266
+ if password is None:
267
+ password = ""
268
+
269
+ return BasicAuth(username, password)
270
+
271
+
272
+ def proxies_from_env() -> Dict[str, ProxyInfo]:
273
+ proxy_urls = {
274
+ k: URL(v)
275
+ for k, v in getproxies().items()
276
+ if k in ("http", "https", "ws", "wss")
277
+ }
278
+ netrc_obj = netrc_from_env()
279
+ stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
280
+ ret = {}
281
+ for proto, val in stripped.items():
282
+ proxy, auth = val
283
+ if proxy.scheme in ("https", "wss"):
284
+ client_logger.warning(
285
+ "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
286
+ )
287
+ continue
288
+ if netrc_obj and auth is None:
289
+ if proxy.host is not None:
290
+ try:
291
+ auth = basicauth_from_netrc(netrc_obj, proxy.host)
292
+ except LookupError:
293
+ auth = None
294
+ ret[proto] = ProxyInfo(proxy, auth)
295
+ return ret
296
+
297
+
298
+ def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
299
+ """Get a permitted proxy for the given URL from the env."""
300
+ if url.host is not None and proxy_bypass(url.host):
301
+ raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
302
+
303
+ proxies_in_env = proxies_from_env()
304
+ try:
305
+ proxy_info = proxies_in_env[url.scheme]
306
+ except KeyError:
307
+ raise LookupError(f"No proxies found for `{url!s}` in the env")
308
+ else:
309
+ return proxy_info.proxy, proxy_info.proxy_auth
310
+
311
+
312
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
313
+ class MimeType:
314
+ type: str
315
+ subtype: str
316
+ suffix: str
317
+ parameters: "MultiDictProxy[str]"
318
+
319
+
320
+ @functools.lru_cache(maxsize=56)
321
+ def parse_mimetype(mimetype: str) -> MimeType:
322
+ """Parses a MIME type into its components.
323
+
324
+ mimetype is a MIME type string.
325
+
326
+ Returns a MimeType object.
327
+
328
+ Example:
329
+
330
+ >>> parse_mimetype('text/html; charset=utf-8')
331
+ MimeType(type='text', subtype='html', suffix='',
332
+ parameters={'charset': 'utf-8'})
333
+
334
+ """
335
+ if not mimetype:
336
+ return MimeType(
337
+ type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
338
+ )
339
+
340
+ parts = mimetype.split(";")
341
+ params: MultiDict[str] = MultiDict()
342
+ for item in parts[1:]:
343
+ if not item:
344
+ continue
345
+ key, _, value = item.partition("=")
346
+ params.add(key.lower().strip(), value.strip(' "'))
347
+
348
+ fulltype = parts[0].strip().lower()
349
+ if fulltype == "*":
350
+ fulltype = "*/*"
351
+
352
+ mtype, _, stype = fulltype.partition("/")
353
+ stype, _, suffix = stype.partition("+")
354
+
355
+ return MimeType(
356
+ type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
357
+ )
358
+
359
+
360
+ def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
361
+ name = getattr(obj, "name", None)
362
+ if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
363
+ return Path(name).name
364
+ return default
365
+
366
+
367
+ not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
368
+ QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
369
+
370
+
371
+ def quoted_string(content: str) -> str:
372
+ """Return 7-bit content as quoted-string.
373
+
374
+ Format content into a quoted-string as defined in RFC5322 for
375
+ Internet Message Format. Notice that this is not the 8-bit HTTP
376
+ format, but the 7-bit email format. Content must be in usascii or
377
+ a ValueError is raised.
378
+ """
379
+ if not (QCONTENT > set(content)):
380
+ raise ValueError(f"bad content for quoted-string {content!r}")
381
+ return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
382
+
383
+
384
+ def content_disposition_header(
385
+ disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
386
+ ) -> str:
387
+ """Sets ``Content-Disposition`` header for MIME.
388
+
389
+ This is the MIME payload Content-Disposition header from RFC 2183
390
+ and RFC 7579 section 4.2, not the HTTP Content-Disposition from
391
+ RFC 6266.
392
+
393
+ disptype is a disposition type: inline, attachment, form-data.
394
+ Should be valid extension token (see RFC 2183)
395
+
396
+ quote_fields performs value quoting to 7-bit MIME headers
397
+ according to RFC 7578. Set to quote_fields to False if recipient
398
+ can take 8-bit file names and field values.
399
+
400
+ _charset specifies the charset to use when quote_fields is True.
401
+
402
+ params is a dict with disposition params.
403
+ """
404
+ if not disptype or not (TOKEN > set(disptype)):
405
+ raise ValueError(f"bad content disposition type {disptype!r}")
406
+
407
+ value = disptype
408
+ if params:
409
+ lparams = []
410
+ for key, val in params.items():
411
+ if not key or not (TOKEN > set(key)):
412
+ raise ValueError(f"bad content disposition parameter {key!r}={val!r}")
413
+ if quote_fields:
414
+ if key.lower() == "filename":
415
+ qval = quote(val, "", encoding=_charset)
416
+ lparams.append((key, '"%s"' % qval))
417
+ else:
418
+ try:
419
+ qval = quoted_string(val)
420
+ except ValueError:
421
+ qval = "".join(
422
+ (_charset, "''", quote(val, "", encoding=_charset))
423
+ )
424
+ lparams.append((key + "*", qval))
425
+ else:
426
+ lparams.append((key, '"%s"' % qval))
427
+ else:
428
+ qval = val.replace("\\", "\\\\").replace('"', '\\"')
429
+ lparams.append((key, '"%s"' % qval))
430
+ sparams = "; ".join("=".join(pair) for pair in lparams)
431
+ value = "; ".join((value, sparams))
432
+ return value
433
+
434
+
435
+ def is_ip_address(host: Optional[str]) -> bool:
436
+ """Check if host looks like an IP Address.
437
+
438
+ This check is only meant as a heuristic to ensure that
439
+ a host is not a domain name.
440
+ """
441
+ if not host:
442
+ return False
443
+ # For a host to be an ipv4 address, it must be all numeric.
444
+ # The host must contain a colon to be an IPv6 address.
445
+ return ":" in host or host.replace(".", "").isdigit()
446
+
447
+
448
+ _cached_current_datetime: Optional[int] = None
449
+ _cached_formatted_datetime = ""
450
+
451
+
452
+ def rfc822_formatted_time() -> str:
453
+ global _cached_current_datetime
454
+ global _cached_formatted_datetime
455
+
456
+ now = int(time.time())
457
+ if now != _cached_current_datetime:
458
+ # Weekday and month names for HTTP date/time formatting;
459
+ # always English!
460
+ # Tuples are constants stored in codeobject!
461
+ _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
462
+ _monthname = (
463
+ "", # Dummy so we can use 1-based month numbers
464
+ "Jan",
465
+ "Feb",
466
+ "Mar",
467
+ "Apr",
468
+ "May",
469
+ "Jun",
470
+ "Jul",
471
+ "Aug",
472
+ "Sep",
473
+ "Oct",
474
+ "Nov",
475
+ "Dec",
476
+ )
477
+
478
+ year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
479
+ _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
480
+ _weekdayname[wd],
481
+ day,
482
+ _monthname[month],
483
+ year,
484
+ hh,
485
+ mm,
486
+ ss,
487
+ )
488
+ _cached_current_datetime = now
489
+ return _cached_formatted_datetime
490
+
491
+
492
+ def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
493
+ ref, name = info
494
+ ob = ref()
495
+ if ob is not None:
496
+ with suppress(Exception):
497
+ getattr(ob, name)()
498
+
499
+
500
+ def weakref_handle(
501
+ ob: object,
502
+ name: str,
503
+ timeout: float,
504
+ loop: asyncio.AbstractEventLoop,
505
+ timeout_ceil_threshold: float = 5,
506
+ ) -> Optional[asyncio.TimerHandle]:
507
+ if timeout is not None and timeout > 0:
508
+ when = loop.time() + timeout
509
+ if timeout >= timeout_ceil_threshold:
510
+ when = ceil(when)
511
+
512
+ return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
513
+ return None
514
+
515
+
516
+ def call_later(
517
+ cb: Callable[[], Any],
518
+ timeout: float,
519
+ loop: asyncio.AbstractEventLoop,
520
+ timeout_ceil_threshold: float = 5,
521
+ ) -> Optional[asyncio.TimerHandle]:
522
+ if timeout is None or timeout <= 0:
523
+ return None
524
+ now = loop.time()
525
+ when = calculate_timeout_when(now, timeout, timeout_ceil_threshold)
526
+ return loop.call_at(when, cb)
527
+
528
+
529
+ def calculate_timeout_when(
530
+ loop_time: float,
531
+ timeout: float,
532
+ timeout_ceiling_threshold: float,
533
+ ) -> float:
534
+ """Calculate when to execute a timeout."""
535
+ when = loop_time + timeout
536
+ if timeout > timeout_ceiling_threshold:
537
+ return ceil(when)
538
+ return when
539
+
540
+
541
+ class TimeoutHandle:
542
+ """Timeout handle"""
543
+
544
+ __slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks")
545
+
546
+ def __init__(
547
+ self,
548
+ loop: asyncio.AbstractEventLoop,
549
+ timeout: Optional[float],
550
+ ceil_threshold: float = 5,
551
+ ) -> None:
552
+ self._timeout = timeout
553
+ self._loop = loop
554
+ self._ceil_threshold = ceil_threshold
555
+ self._callbacks: List[
556
+ Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
557
+ ] = []
558
+
559
+ def register(
560
+ self, callback: Callable[..., None], *args: Any, **kwargs: Any
561
+ ) -> None:
562
+ self._callbacks.append((callback, args, kwargs))
563
+
564
+ def close(self) -> None:
565
+ self._callbacks.clear()
566
+
567
+ def start(self) -> Optional[asyncio.TimerHandle]:
568
+ timeout = self._timeout
569
+ if timeout is not None and timeout > 0:
570
+ when = self._loop.time() + timeout
571
+ if timeout >= self._ceil_threshold:
572
+ when = ceil(when)
573
+ return self._loop.call_at(when, self.__call__)
574
+ else:
575
+ return None
576
+
577
+ def timer(self) -> "BaseTimerContext":
578
+ if self._timeout is not None and self._timeout > 0:
579
+ timer = TimerContext(self._loop)
580
+ self.register(timer.timeout)
581
+ return timer
582
+ else:
583
+ return TimerNoop()
584
+
585
+ def __call__(self) -> None:
586
+ for cb, args, kwargs in self._callbacks:
587
+ with suppress(Exception):
588
+ cb(*args, **kwargs)
589
+
590
+ self._callbacks.clear()
591
+
592
+
593
+ class BaseTimerContext(ContextManager["BaseTimerContext"]):
594
+
595
+ __slots__ = ()
596
+
597
+ def assert_timeout(self) -> None:
598
+ """Raise TimeoutError if timeout has been exceeded."""
599
+
600
+
601
+ class TimerNoop(BaseTimerContext):
602
+
603
+ __slots__ = ()
604
+
605
+ def __enter__(self) -> BaseTimerContext:
606
+ return self
607
+
608
+ def __exit__(
609
+ self,
610
+ exc_type: Optional[Type[BaseException]],
611
+ exc_val: Optional[BaseException],
612
+ exc_tb: Optional[TracebackType],
613
+ ) -> None:
614
+ return
615
+
616
+
617
+ class TimerContext(BaseTimerContext):
618
+ """Low resolution timeout context manager"""
619
+
620
+ __slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling")
621
+
622
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
623
+ self._loop = loop
624
+ self._tasks: List[asyncio.Task[Any]] = []
625
+ self._cancelled = False
626
+ self._cancelling = 0
627
+
628
+ def assert_timeout(self) -> None:
629
+ """Raise TimeoutError if timer has already been cancelled."""
630
+ if self._cancelled:
631
+ raise asyncio.TimeoutError from None
632
+
633
+ def __enter__(self) -> BaseTimerContext:
634
+ task = asyncio.current_task(loop=self._loop)
635
+ if task is None:
636
+ raise RuntimeError("Timeout context manager should be used inside a task")
637
+
638
+ if sys.version_info >= (3, 11):
639
+ # Remember if the task was already cancelling
640
+ # so when we __exit__ we can decide if we should
641
+ # raise asyncio.TimeoutError or let the cancellation propagate
642
+ self._cancelling = task.cancelling()
643
+
644
+ if self._cancelled:
645
+ raise asyncio.TimeoutError from None
646
+
647
+ self._tasks.append(task)
648
+ return self
649
+
650
+ def __exit__(
651
+ self,
652
+ exc_type: Optional[Type[BaseException]],
653
+ exc_val: Optional[BaseException],
654
+ exc_tb: Optional[TracebackType],
655
+ ) -> Optional[bool]:
656
+ enter_task: Optional[asyncio.Task[Any]] = None
657
+ if self._tasks:
658
+ enter_task = self._tasks.pop()
659
+
660
+ if exc_type is asyncio.CancelledError and self._cancelled:
661
+ assert enter_task is not None
662
+ # The timeout was hit, and the task was cancelled
663
+ # so we need to uncancel the last task that entered the context manager
664
+ # since the cancellation should not leak out of the context manager
665
+ if sys.version_info >= (3, 11):
666
+ # If the task was already cancelling don't raise
667
+ # asyncio.TimeoutError and instead return None
668
+ # to allow the cancellation to propagate
669
+ if enter_task.uncancel() > self._cancelling:
670
+ return None
671
+ raise asyncio.TimeoutError from exc_val
672
+ return None
673
+
674
+ def timeout(self) -> None:
675
+ if not self._cancelled:
676
+ for task in set(self._tasks):
677
+ task.cancel()
678
+
679
+ self._cancelled = True
680
+
681
+
682
+ def ceil_timeout(
683
+ delay: Optional[float], ceil_threshold: float = 5
684
+ ) -> async_timeout.Timeout:
685
+ if delay is None or delay <= 0:
686
+ return async_timeout.timeout(None)
687
+
688
+ loop = asyncio.get_running_loop()
689
+ now = loop.time()
690
+ when = now + delay
691
+ if delay > ceil_threshold:
692
+ when = ceil(when)
693
+ return async_timeout.timeout_at(when)
694
+
695
+
696
+ class HeadersMixin:
697
+ """Mixin for handling headers."""
698
+
699
+ ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
700
+
701
+ _headers: MultiMapping[str]
702
+ _content_type: Optional[str] = None
703
+ _content_dict: Optional[Dict[str, str]] = None
704
+ _stored_content_type: Union[str, None, _SENTINEL] = sentinel
705
+
706
+ def _parse_content_type(self, raw: Optional[str]) -> None:
707
+ self._stored_content_type = raw
708
+ if raw is None:
709
+ # default value according to RFC 2616
710
+ self._content_type = "application/octet-stream"
711
+ self._content_dict = {}
712
+ else:
713
+ msg = HeaderParser().parsestr("Content-Type: " + raw)
714
+ self._content_type = msg.get_content_type()
715
+ params = msg.get_params(())
716
+ self._content_dict = dict(params[1:]) # First element is content type again
717
+
718
+ @property
719
+ def content_type(self) -> str:
720
+ """The value of content part for Content-Type HTTP header."""
721
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
722
+ if self._stored_content_type != raw:
723
+ self._parse_content_type(raw)
724
+ assert self._content_type is not None
725
+ return self._content_type
726
+
727
+ @property
728
+ def charset(self) -> Optional[str]:
729
+ """The value of charset part for Content-Type HTTP header."""
730
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
731
+ if self._stored_content_type != raw:
732
+ self._parse_content_type(raw)
733
+ assert self._content_dict is not None
734
+ return self._content_dict.get("charset")
735
+
736
+ @property
737
+ def content_length(self) -> Optional[int]:
738
+ """The value of Content-Length HTTP header."""
739
+ content_length = self._headers.get(hdrs.CONTENT_LENGTH)
740
+ return None if content_length is None else int(content_length)
741
+
742
+
743
+ def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
744
+ if not fut.done():
745
+ fut.set_result(result)
746
+
747
+
748
+ _EXC_SENTINEL = BaseException()
749
+
750
+
751
+ class ErrorableProtocol(Protocol):
752
+ def set_exception(
753
+ self,
754
+ exc: BaseException,
755
+ exc_cause: BaseException = ...,
756
+ ) -> None: ... # pragma: no cover
757
+
758
+
759
+ def set_exception(
760
+ fut: "asyncio.Future[_T] | ErrorableProtocol",
761
+ exc: BaseException,
762
+ exc_cause: BaseException = _EXC_SENTINEL,
763
+ ) -> None:
764
+ """Set future exception.
765
+
766
+ If the future is marked as complete, this function is a no-op.
767
+
768
+ :param exc_cause: An exception that is a direct cause of ``exc``.
769
+ Only set if provided.
770
+ """
771
+ if asyncio.isfuture(fut) and fut.done():
772
+ return
773
+
774
+ exc_is_sentinel = exc_cause is _EXC_SENTINEL
775
+ exc_causes_itself = exc is exc_cause
776
+ if not exc_is_sentinel and not exc_causes_itself:
777
+ exc.__cause__ = exc_cause
778
+
779
+ fut.set_exception(exc)
780
+
781
+
782
+ @functools.total_ordering
783
+ class AppKey(Generic[_T]):
784
+ """Keys for static typing support in Application."""
785
+
786
+ __slots__ = ("_name", "_t", "__orig_class__")
787
+
788
+ # This may be set by Python when instantiating with a generic type. We need to
789
+ # support this, in order to support types that are not concrete classes,
790
+ # like Iterable, which can't be passed as the second parameter to __init__.
791
+ __orig_class__: Type[object]
792
+
793
+ def __init__(self, name: str, t: Optional[Type[_T]] = None):
794
+ # Prefix with module name to help deduplicate key names.
795
+ frame = inspect.currentframe()
796
+ while frame:
797
+ if frame.f_code.co_name == "<module>":
798
+ module: str = frame.f_globals["__name__"]
799
+ break
800
+ frame = frame.f_back
801
+
802
+ self._name = module + "." + name
803
+ self._t = t
804
+
805
+ def __lt__(self, other: object) -> bool:
806
+ if isinstance(other, AppKey):
807
+ return self._name < other._name
808
+ return True # Order AppKey above other types.
809
+
810
+ def __repr__(self) -> str:
811
+ t = self._t
812
+ if t is None:
813
+ with suppress(AttributeError):
814
+ # Set to type arg.
815
+ t = get_args(self.__orig_class__)[0]
816
+
817
+ if t is None:
818
+ t_repr = "<<Unknown>>"
819
+ elif isinstance(t, type):
820
+ if t.__module__ == "builtins":
821
+ t_repr = t.__qualname__
822
+ else:
823
+ t_repr = f"{t.__module__}.{t.__qualname__}"
824
+ else:
825
+ t_repr = repr(t)
826
+ return f"<AppKey({self._name}, type={t_repr})>"
827
+
828
+
829
+ class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
830
+ __slots__ = ("_maps",)
831
+
832
+ def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
833
+ self._maps = tuple(maps)
834
+
835
+ def __init_subclass__(cls) -> None:
836
+ raise TypeError(
837
+ "Inheritance class {} from ChainMapProxy "
838
+ "is forbidden".format(cls.__name__)
839
+ )
840
+
841
+ @overload # type: ignore[override]
842
+ def __getitem__(self, key: AppKey[_T]) -> _T: ...
843
+
844
+ @overload
845
+ def __getitem__(self, key: str) -> Any: ...
846
+
847
+ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
848
+ for mapping in self._maps:
849
+ try:
850
+ return mapping[key]
851
+ except KeyError:
852
+ pass
853
+ raise KeyError(key)
854
+
855
+ @overload # type: ignore[override]
856
+ def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ...
857
+
858
+ @overload
859
+ def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ...
860
+
861
+ @overload
862
+ def get(self, key: str, default: Any = ...) -> Any: ...
863
+
864
+ def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
865
+ try:
866
+ return self[key]
867
+ except KeyError:
868
+ return default
869
+
870
+ def __len__(self) -> int:
871
+ # reuses stored hash values if possible
872
+ return len(set().union(*self._maps))
873
+
874
+ def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
875
+ d: Dict[Union[str, AppKey[Any]], Any] = {}
876
+ for mapping in reversed(self._maps):
877
+ # reuses stored hash values if possible
878
+ d.update(mapping)
879
+ return iter(d)
880
+
881
+ def __contains__(self, key: object) -> bool:
882
+ return any(key in m for m in self._maps)
883
+
884
+ def __bool__(self) -> bool:
885
+ return any(self._maps)
886
+
887
+ def __repr__(self) -> str:
888
+ content = ", ".join(map(repr, self._maps))
889
+ return f"ChainMapProxy({content})"
890
+
891
+
892
+ # https://tools.ietf.org/html/rfc7232#section-2.3
893
+ _ETAGC = r"[!\x23-\x7E\x80-\xff]+"
894
+ _ETAGC_RE = re.compile(_ETAGC)
895
+ _QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
896
+ QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
897
+ LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
898
+
899
+ ETAG_ANY = "*"
900
+
901
+
902
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
903
+ class ETag:
904
+ value: str
905
+ is_weak: bool = False
906
+
907
+
908
+ def validate_etag_value(value: str) -> None:
909
+ if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
910
+ raise ValueError(
911
+ f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
912
+ )
913
+
914
+
915
+ def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
916
+ """Process a date string, return a datetime object"""
917
+ if date_str is not None:
918
+ timetuple = parsedate(date_str)
919
+ if timetuple is not None:
920
+ with suppress(ValueError):
921
+ return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
922
+ return None
923
+
924
+
925
+ @functools.lru_cache
926
+ def must_be_empty_body(method: str, code: int) -> bool:
927
+ """Check if a request must return an empty body."""
928
+ return (
929
+ code in EMPTY_BODY_STATUS_CODES
930
+ or method in EMPTY_BODY_METHODS
931
+ or (200 <= code < 300 and method in hdrs.METH_CONNECT_ALL)
932
+ )
933
+
934
+
935
+ def should_remove_content_length(method: str, code: int) -> bool:
936
+ """Check if a Content-Length header should be removed.
937
+
938
+ This should always be a subset of must_be_empty_body
939
+ """
940
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
941
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
942
+ return code in EMPTY_BODY_STATUS_CODES or (
943
+ 200 <= code < 300 and method in hdrs.METH_CONNECT_ALL
944
+ )
deepseek/lib/python3.10/site-packages/aiohttp/http.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from http import HTTPStatus
3
+ from typing import Mapping, Tuple
4
+
5
+ from . import __version__
6
+ from .http_exceptions import HttpProcessingError as HttpProcessingError
7
+ from .http_parser import (
8
+ HeadersParser as HeadersParser,
9
+ HttpParser as HttpParser,
10
+ HttpRequestParser as HttpRequestParser,
11
+ HttpResponseParser as HttpResponseParser,
12
+ RawRequestMessage as RawRequestMessage,
13
+ RawResponseMessage as RawResponseMessage,
14
+ )
15
+ from .http_websocket import (
16
+ WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
17
+ WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
18
+ WS_KEY as WS_KEY,
19
+ WebSocketError as WebSocketError,
20
+ WebSocketReader as WebSocketReader,
21
+ WebSocketWriter as WebSocketWriter,
22
+ WSCloseCode as WSCloseCode,
23
+ WSMessage as WSMessage,
24
+ WSMsgType as WSMsgType,
25
+ ws_ext_gen as ws_ext_gen,
26
+ ws_ext_parse as ws_ext_parse,
27
+ )
28
+ from .http_writer import (
29
+ HttpVersion as HttpVersion,
30
+ HttpVersion10 as HttpVersion10,
31
+ HttpVersion11 as HttpVersion11,
32
+ StreamWriter as StreamWriter,
33
+ )
34
+
35
+ __all__ = (
36
+ "HttpProcessingError",
37
+ "RESPONSES",
38
+ "SERVER_SOFTWARE",
39
+ # .http_writer
40
+ "StreamWriter",
41
+ "HttpVersion",
42
+ "HttpVersion10",
43
+ "HttpVersion11",
44
+ # .http_parser
45
+ "HeadersParser",
46
+ "HttpParser",
47
+ "HttpRequestParser",
48
+ "HttpResponseParser",
49
+ "RawRequestMessage",
50
+ "RawResponseMessage",
51
+ # .http_websocket
52
+ "WS_CLOSED_MESSAGE",
53
+ "WS_CLOSING_MESSAGE",
54
+ "WS_KEY",
55
+ "WebSocketReader",
56
+ "WebSocketWriter",
57
+ "ws_ext_gen",
58
+ "ws_ext_parse",
59
+ "WSMessage",
60
+ "WebSocketError",
61
+ "WSMsgType",
62
+ "WSCloseCode",
63
+ )
64
+
65
+
66
+ SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
67
+ sys.version_info, __version__
68
+ )
69
+
70
+ RESPONSES: Mapping[int, Tuple[str, str]] = {
71
+ v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
72
+ }
deepseek/lib/python3.10/site-packages/aiohttp/http_exceptions.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Low-level http related exceptions."""
2
+
3
+ from textwrap import indent
4
+ from typing import Optional, Union
5
+
6
+ from .typedefs import _CIMultiDict
7
+
8
+ __all__ = ("HttpProcessingError",)
9
+
10
+
11
+ class HttpProcessingError(Exception):
12
+ """HTTP error.
13
+
14
+ Shortcut for raising HTTP errors with custom code, message and headers.
15
+
16
+ code: HTTP Error code.
17
+ message: (optional) Error message.
18
+ headers: (optional) Headers to be sent in response, a list of pairs
19
+ """
20
+
21
+ code = 0
22
+ message = ""
23
+ headers = None
24
+
25
+ def __init__(
26
+ self,
27
+ *,
28
+ code: Optional[int] = None,
29
+ message: str = "",
30
+ headers: Optional[_CIMultiDict] = None,
31
+ ) -> None:
32
+ if code is not None:
33
+ self.code = code
34
+ self.headers = headers
35
+ self.message = message
36
+
37
+ def __str__(self) -> str:
38
+ msg = indent(self.message, " ")
39
+ return f"{self.code}, message:\n{msg}"
40
+
41
+ def __repr__(self) -> str:
42
+ return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
43
+
44
+
45
+ class BadHttpMessage(HttpProcessingError):
46
+
47
+ code = 400
48
+ message = "Bad Request"
49
+
50
+ def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
51
+ super().__init__(message=message, headers=headers)
52
+ self.args = (message,)
53
+
54
+
55
+ class HttpBadRequest(BadHttpMessage):
56
+
57
+ code = 400
58
+ message = "Bad Request"
59
+
60
+
61
+ class PayloadEncodingError(BadHttpMessage):
62
+ """Base class for payload errors"""
63
+
64
+
65
+ class ContentEncodingError(PayloadEncodingError):
66
+ """Content encoding error."""
67
+
68
+
69
+ class TransferEncodingError(PayloadEncodingError):
70
+ """transfer encoding error."""
71
+
72
+
73
+ class ContentLengthError(PayloadEncodingError):
74
+ """Not enough data for satisfy content length header."""
75
+
76
+
77
+ class LineTooLong(BadHttpMessage):
78
+ def __init__(
79
+ self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
80
+ ) -> None:
81
+ super().__init__(
82
+ f"Got more than {limit} bytes ({actual_size}) when reading {line}."
83
+ )
84
+ self.args = (line, limit, actual_size)
85
+
86
+
87
+ class InvalidHeader(BadHttpMessage):
88
+ def __init__(self, hdr: Union[bytes, str]) -> None:
89
+ hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
90
+ super().__init__(f"Invalid HTTP header: {hdr!r}")
91
+ self.hdr = hdr_s
92
+ self.args = (hdr,)
93
+
94
+
95
+ class BadStatusLine(BadHttpMessage):
96
+ def __init__(self, line: str = "", error: Optional[str] = None) -> None:
97
+ if not isinstance(line, str):
98
+ line = repr(line)
99
+ super().__init__(error or f"Bad status line {line!r}")
100
+ self.args = (line,)
101
+ self.line = line
102
+
103
+
104
+ class BadHttpMethod(BadStatusLine):
105
+ """Invalid HTTP method in status line."""
106
+
107
+ def __init__(self, line: str = "", error: Optional[str] = None) -> None:
108
+ super().__init__(line, error or f"Bad HTTP method in status line {line!r}")
109
+
110
+
111
+ class InvalidURLError(BadHttpMessage):
112
+ pass
deepseek/lib/python3.10/site-packages/aiohttp/http_parser.py ADDED
@@ -0,0 +1,1046 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ import asyncio
3
+ import re
4
+ import string
5
+ from contextlib import suppress
6
+ from enum import IntEnum
7
+ from typing import (
8
+ Any,
9
+ ClassVar,
10
+ Final,
11
+ Generic,
12
+ List,
13
+ Literal,
14
+ NamedTuple,
15
+ Optional,
16
+ Pattern,
17
+ Set,
18
+ Tuple,
19
+ Type,
20
+ TypeVar,
21
+ Union,
22
+ )
23
+
24
+ from multidict import CIMultiDict, CIMultiDictProxy, istr
25
+ from yarl import URL
26
+
27
+ from . import hdrs
28
+ from .base_protocol import BaseProtocol
29
+ from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
30
+ from .helpers import (
31
+ _EXC_SENTINEL,
32
+ DEBUG,
33
+ EMPTY_BODY_METHODS,
34
+ EMPTY_BODY_STATUS_CODES,
35
+ NO_EXTENSIONS,
36
+ BaseTimerContext,
37
+ set_exception,
38
+ )
39
+ from .http_exceptions import (
40
+ BadHttpMessage,
41
+ BadHttpMethod,
42
+ BadStatusLine,
43
+ ContentEncodingError,
44
+ ContentLengthError,
45
+ InvalidHeader,
46
+ InvalidURLError,
47
+ LineTooLong,
48
+ TransferEncodingError,
49
+ )
50
+ from .http_writer import HttpVersion, HttpVersion10
51
+ from .streams import EMPTY_PAYLOAD, StreamReader
52
+ from .typedefs import RawHeaders
53
+
54
+ __all__ = (
55
+ "HeadersParser",
56
+ "HttpParser",
57
+ "HttpRequestParser",
58
+ "HttpResponseParser",
59
+ "RawRequestMessage",
60
+ "RawResponseMessage",
61
+ )
62
+
63
+ _SEP = Literal[b"\r\n", b"\n"]
64
+
65
+ ASCIISET: Final[Set[str]] = set(string.printable)
66
+
67
+ # See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
68
+ # and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
69
+ #
70
+ # method = token
71
+ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
72
+ # "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
73
+ # token = 1*tchar
74
+ _TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
75
+ TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
76
+ VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
77
+ DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
78
+ HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
79
+
80
+
81
+ class RawRequestMessage(NamedTuple):
82
+ method: str
83
+ path: str
84
+ version: HttpVersion
85
+ headers: "CIMultiDictProxy[str]"
86
+ raw_headers: RawHeaders
87
+ should_close: bool
88
+ compression: Optional[str]
89
+ upgrade: bool
90
+ chunked: bool
91
+ url: URL
92
+
93
+
94
+ class RawResponseMessage(NamedTuple):
95
+ version: HttpVersion
96
+ code: int
97
+ reason: str
98
+ headers: CIMultiDictProxy[str]
99
+ raw_headers: RawHeaders
100
+ should_close: bool
101
+ compression: Optional[str]
102
+ upgrade: bool
103
+ chunked: bool
104
+
105
+
106
+ _MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
107
+
108
+
109
+ class ParseState(IntEnum):
110
+
111
+ PARSE_NONE = 0
112
+ PARSE_LENGTH = 1
113
+ PARSE_CHUNKED = 2
114
+ PARSE_UNTIL_EOF = 3
115
+
116
+
117
+ class ChunkState(IntEnum):
118
+ PARSE_CHUNKED_SIZE = 0
119
+ PARSE_CHUNKED_CHUNK = 1
120
+ PARSE_CHUNKED_CHUNK_EOF = 2
121
+ PARSE_MAYBE_TRAILERS = 3
122
+ PARSE_TRAILERS = 4
123
+
124
+
125
+ class HeadersParser:
126
+ def __init__(
127
+ self,
128
+ max_line_size: int = 8190,
129
+ max_headers: int = 32768,
130
+ max_field_size: int = 8190,
131
+ lax: bool = False,
132
+ ) -> None:
133
+ self.max_line_size = max_line_size
134
+ self.max_headers = max_headers
135
+ self.max_field_size = max_field_size
136
+ self._lax = lax
137
+
138
+ def parse_headers(
139
+ self, lines: List[bytes]
140
+ ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
141
+ headers: CIMultiDict[str] = CIMultiDict()
142
+ # note: "raw" does not mean inclusion of OWS before/after the field value
143
+ raw_headers = []
144
+
145
+ lines_idx = 1
146
+ line = lines[1]
147
+ line_count = len(lines)
148
+
149
+ while line:
150
+ # Parse initial header name : value pair.
151
+ try:
152
+ bname, bvalue = line.split(b":", 1)
153
+ except ValueError:
154
+ raise InvalidHeader(line) from None
155
+
156
+ if len(bname) == 0:
157
+ raise InvalidHeader(bname)
158
+
159
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
160
+ if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
161
+ raise InvalidHeader(line)
162
+
163
+ bvalue = bvalue.lstrip(b" \t")
164
+ if len(bname) > self.max_field_size:
165
+ raise LineTooLong(
166
+ "request header name {}".format(
167
+ bname.decode("utf8", "backslashreplace")
168
+ ),
169
+ str(self.max_field_size),
170
+ str(len(bname)),
171
+ )
172
+ name = bname.decode("utf-8", "surrogateescape")
173
+ if not TOKENRE.fullmatch(name):
174
+ raise InvalidHeader(bname)
175
+
176
+ header_length = len(bvalue)
177
+
178
+ # next line
179
+ lines_idx += 1
180
+ line = lines[lines_idx]
181
+
182
+ # consume continuation lines
183
+ continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
184
+
185
+ # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
186
+ if continuation:
187
+ bvalue_lst = [bvalue]
188
+ while continuation:
189
+ header_length += len(line)
190
+ if header_length > self.max_field_size:
191
+ raise LineTooLong(
192
+ "request header field {}".format(
193
+ bname.decode("utf8", "backslashreplace")
194
+ ),
195
+ str(self.max_field_size),
196
+ str(header_length),
197
+ )
198
+ bvalue_lst.append(line)
199
+
200
+ # next line
201
+ lines_idx += 1
202
+ if lines_idx < line_count:
203
+ line = lines[lines_idx]
204
+ if line:
205
+ continuation = line[0] in (32, 9) # (' ', '\t')
206
+ else:
207
+ line = b""
208
+ break
209
+ bvalue = b"".join(bvalue_lst)
210
+ else:
211
+ if header_length > self.max_field_size:
212
+ raise LineTooLong(
213
+ "request header field {}".format(
214
+ bname.decode("utf8", "backslashreplace")
215
+ ),
216
+ str(self.max_field_size),
217
+ str(header_length),
218
+ )
219
+
220
+ bvalue = bvalue.strip(b" \t")
221
+ value = bvalue.decode("utf-8", "surrogateescape")
222
+
223
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
224
+ if "\n" in value or "\r" in value or "\x00" in value:
225
+ raise InvalidHeader(bvalue)
226
+
227
+ headers.add(name, value)
228
+ raw_headers.append((bname, bvalue))
229
+
230
+ return (CIMultiDictProxy(headers), tuple(raw_headers))
231
+
232
+
233
+ def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
234
+ """Check if the upgrade header is supported."""
235
+ return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
236
+
237
+
238
+ class HttpParser(abc.ABC, Generic[_MsgT]):
239
+ lax: ClassVar[bool] = False
240
+
241
+ def __init__(
242
+ self,
243
+ protocol: Optional[BaseProtocol] = None,
244
+ loop: Optional[asyncio.AbstractEventLoop] = None,
245
+ limit: int = 2**16,
246
+ max_line_size: int = 8190,
247
+ max_headers: int = 32768,
248
+ max_field_size: int = 8190,
249
+ timer: Optional[BaseTimerContext] = None,
250
+ code: Optional[int] = None,
251
+ method: Optional[str] = None,
252
+ payload_exception: Optional[Type[BaseException]] = None,
253
+ response_with_body: bool = True,
254
+ read_until_eof: bool = False,
255
+ auto_decompress: bool = True,
256
+ ) -> None:
257
+ self.protocol = protocol
258
+ self.loop = loop
259
+ self.max_line_size = max_line_size
260
+ self.max_headers = max_headers
261
+ self.max_field_size = max_field_size
262
+ self.timer = timer
263
+ self.code = code
264
+ self.method = method
265
+ self.payload_exception = payload_exception
266
+ self.response_with_body = response_with_body
267
+ self.read_until_eof = read_until_eof
268
+
269
+ self._lines: List[bytes] = []
270
+ self._tail = b""
271
+ self._upgraded = False
272
+ self._payload = None
273
+ self._payload_parser: Optional[HttpPayloadParser] = None
274
+ self._auto_decompress = auto_decompress
275
+ self._limit = limit
276
+ self._headers_parser = HeadersParser(
277
+ max_line_size, max_headers, max_field_size, self.lax
278
+ )
279
+
280
+ @abc.abstractmethod
281
+ def parse_message(self, lines: List[bytes]) -> _MsgT: ...
282
+
283
+ @abc.abstractmethod
284
+ def _is_chunked_te(self, te: str) -> bool: ...
285
+
286
+ def feed_eof(self) -> Optional[_MsgT]:
287
+ if self._payload_parser is not None:
288
+ self._payload_parser.feed_eof()
289
+ self._payload_parser = None
290
+ else:
291
+ # try to extract partial message
292
+ if self._tail:
293
+ self._lines.append(self._tail)
294
+
295
+ if self._lines:
296
+ if self._lines[-1] != "\r\n":
297
+ self._lines.append(b"")
298
+ with suppress(Exception):
299
+ return self.parse_message(self._lines)
300
+ return None
301
+
302
+ def feed_data(
303
+ self,
304
+ data: bytes,
305
+ SEP: _SEP = b"\r\n",
306
+ EMPTY: bytes = b"",
307
+ CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
308
+ METH_CONNECT: str = hdrs.METH_CONNECT,
309
+ SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
310
+ ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
311
+
312
+ messages = []
313
+
314
+ if self._tail:
315
+ data, self._tail = self._tail + data, b""
316
+
317
+ data_len = len(data)
318
+ start_pos = 0
319
+ loop = self.loop
320
+
321
+ should_close = False
322
+ while start_pos < data_len:
323
+
324
+ # read HTTP message (request/response line + headers), \r\n\r\n
325
+ # and split by lines
326
+ if self._payload_parser is None and not self._upgraded:
327
+ pos = data.find(SEP, start_pos)
328
+ # consume \r\n
329
+ if pos == start_pos and not self._lines:
330
+ start_pos = pos + len(SEP)
331
+ continue
332
+
333
+ if pos >= start_pos:
334
+ if should_close:
335
+ raise BadHttpMessage("Data after `Connection: close`")
336
+
337
+ # line found
338
+ line = data[start_pos:pos]
339
+ if SEP == b"\n": # For lax response parsing
340
+ line = line.rstrip(b"\r")
341
+ self._lines.append(line)
342
+ start_pos = pos + len(SEP)
343
+
344
+ # \r\n\r\n found
345
+ if self._lines[-1] == EMPTY:
346
+ try:
347
+ msg: _MsgT = self.parse_message(self._lines)
348
+ finally:
349
+ self._lines.clear()
350
+
351
+ def get_content_length() -> Optional[int]:
352
+ # payload length
353
+ length_hdr = msg.headers.get(CONTENT_LENGTH)
354
+ if length_hdr is None:
355
+ return None
356
+
357
+ # Shouldn't allow +/- or other number formats.
358
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
359
+ # msg.headers is already stripped of leading/trailing wsp
360
+ if not DIGITS.fullmatch(length_hdr):
361
+ raise InvalidHeader(CONTENT_LENGTH)
362
+
363
+ return int(length_hdr)
364
+
365
+ length = get_content_length()
366
+ # do not support old websocket spec
367
+ if SEC_WEBSOCKET_KEY1 in msg.headers:
368
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
369
+
370
+ self._upgraded = msg.upgrade and _is_supported_upgrade(
371
+ msg.headers
372
+ )
373
+
374
+ method = getattr(msg, "method", self.method)
375
+ # code is only present on responses
376
+ code = getattr(msg, "code", 0)
377
+
378
+ assert self.protocol is not None
379
+ # calculate payload
380
+ empty_body = code in EMPTY_BODY_STATUS_CODES or bool(
381
+ method and method in EMPTY_BODY_METHODS
382
+ )
383
+ if not empty_body and (
384
+ ((length is not None and length > 0) or msg.chunked)
385
+ and not self._upgraded
386
+ ):
387
+ payload = StreamReader(
388
+ self.protocol,
389
+ timer=self.timer,
390
+ loop=loop,
391
+ limit=self._limit,
392
+ )
393
+ payload_parser = HttpPayloadParser(
394
+ payload,
395
+ length=length,
396
+ chunked=msg.chunked,
397
+ method=method,
398
+ compression=msg.compression,
399
+ code=self.code,
400
+ response_with_body=self.response_with_body,
401
+ auto_decompress=self._auto_decompress,
402
+ lax=self.lax,
403
+ )
404
+ if not payload_parser.done:
405
+ self._payload_parser = payload_parser
406
+ elif method == METH_CONNECT:
407
+ assert isinstance(msg, RawRequestMessage)
408
+ payload = StreamReader(
409
+ self.protocol,
410
+ timer=self.timer,
411
+ loop=loop,
412
+ limit=self._limit,
413
+ )
414
+ self._upgraded = True
415
+ self._payload_parser = HttpPayloadParser(
416
+ payload,
417
+ method=msg.method,
418
+ compression=msg.compression,
419
+ auto_decompress=self._auto_decompress,
420
+ lax=self.lax,
421
+ )
422
+ elif not empty_body and length is None and self.read_until_eof:
423
+ payload = StreamReader(
424
+ self.protocol,
425
+ timer=self.timer,
426
+ loop=loop,
427
+ limit=self._limit,
428
+ )
429
+ payload_parser = HttpPayloadParser(
430
+ payload,
431
+ length=length,
432
+ chunked=msg.chunked,
433
+ method=method,
434
+ compression=msg.compression,
435
+ code=self.code,
436
+ response_with_body=self.response_with_body,
437
+ auto_decompress=self._auto_decompress,
438
+ lax=self.lax,
439
+ )
440
+ if not payload_parser.done:
441
+ self._payload_parser = payload_parser
442
+ else:
443
+ payload = EMPTY_PAYLOAD
444
+
445
+ messages.append((msg, payload))
446
+ should_close = msg.should_close
447
+ else:
448
+ self._tail = data[start_pos:]
449
+ data = EMPTY
450
+ break
451
+
452
+ # no parser, just store
453
+ elif self._payload_parser is None and self._upgraded:
454
+ assert not self._lines
455
+ break
456
+
457
+ # feed payload
458
+ elif data and start_pos < data_len:
459
+ assert not self._lines
460
+ assert self._payload_parser is not None
461
+ try:
462
+ eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
463
+ except BaseException as underlying_exc:
464
+ reraised_exc = underlying_exc
465
+ if self.payload_exception is not None:
466
+ reraised_exc = self.payload_exception(str(underlying_exc))
467
+
468
+ set_exception(
469
+ self._payload_parser.payload,
470
+ reraised_exc,
471
+ underlying_exc,
472
+ )
473
+
474
+ eof = True
475
+ data = b""
476
+
477
+ if eof:
478
+ start_pos = 0
479
+ data_len = len(data)
480
+ self._payload_parser = None
481
+ continue
482
+ else:
483
+ break
484
+
485
+ if data and start_pos < data_len:
486
+ data = data[start_pos:]
487
+ else:
488
+ data = EMPTY
489
+
490
+ return messages, self._upgraded, data
491
+
492
+ def parse_headers(
493
+ self, lines: List[bytes]
494
+ ) -> Tuple[
495
+ "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
496
+ ]:
497
+ """Parses RFC 5322 headers from a stream.
498
+
499
+ Line continuations are supported. Returns list of header name
500
+ and value pairs. Header name is in upper case.
501
+ """
502
+ headers, raw_headers = self._headers_parser.parse_headers(lines)
503
+ close_conn = None
504
+ encoding = None
505
+ upgrade = False
506
+ chunked = False
507
+
508
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
509
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
510
+ singletons = (
511
+ hdrs.CONTENT_LENGTH,
512
+ hdrs.CONTENT_LOCATION,
513
+ hdrs.CONTENT_RANGE,
514
+ hdrs.CONTENT_TYPE,
515
+ hdrs.ETAG,
516
+ hdrs.HOST,
517
+ hdrs.MAX_FORWARDS,
518
+ hdrs.SERVER,
519
+ hdrs.TRANSFER_ENCODING,
520
+ hdrs.USER_AGENT,
521
+ )
522
+ bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
523
+ if bad_hdr is not None:
524
+ raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
525
+
526
+ # keep-alive
527
+ conn = headers.get(hdrs.CONNECTION)
528
+ if conn:
529
+ v = conn.lower()
530
+ if v == "close":
531
+ close_conn = True
532
+ elif v == "keep-alive":
533
+ close_conn = False
534
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
535
+ elif v == "upgrade" and headers.get(hdrs.UPGRADE):
536
+ upgrade = True
537
+
538
+ # encoding
539
+ enc = headers.get(hdrs.CONTENT_ENCODING)
540
+ if enc:
541
+ enc = enc.lower()
542
+ if enc in ("gzip", "deflate", "br"):
543
+ encoding = enc
544
+
545
+ # chunking
546
+ te = headers.get(hdrs.TRANSFER_ENCODING)
547
+ if te is not None:
548
+ if self._is_chunked_te(te):
549
+ chunked = True
550
+
551
+ if hdrs.CONTENT_LENGTH in headers:
552
+ raise BadHttpMessage(
553
+ "Transfer-Encoding can't be present with Content-Length",
554
+ )
555
+
556
+ return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
557
+
558
+ def set_upgraded(self, val: bool) -> None:
559
+ """Set connection upgraded (to websocket) mode.
560
+
561
+ :param bool val: new state.
562
+ """
563
+ self._upgraded = val
564
+
565
+
566
+ class HttpRequestParser(HttpParser[RawRequestMessage]):
567
+ """Read request status line.
568
+
569
+ Exception .http_exceptions.BadStatusLine
570
+ could be raised in case of any errors in status line.
571
+ Returns RawRequestMessage.
572
+ """
573
+
574
+ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
575
+ # request line
576
+ line = lines[0].decode("utf-8", "surrogateescape")
577
+ try:
578
+ method, path, version = line.split(" ", maxsplit=2)
579
+ except ValueError:
580
+ raise BadHttpMethod(line) from None
581
+
582
+ if len(path) > self.max_line_size:
583
+ raise LineTooLong(
584
+ "Status line is too long", str(self.max_line_size), str(len(path))
585
+ )
586
+
587
+ # method
588
+ if not TOKENRE.fullmatch(method):
589
+ raise BadHttpMethod(method)
590
+
591
+ # version
592
+ match = VERSRE.fullmatch(version)
593
+ if match is None:
594
+ raise BadStatusLine(line)
595
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
596
+
597
+ if method == "CONNECT":
598
+ # authority-form,
599
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
600
+ url = URL.build(authority=path, encoded=True)
601
+ elif path.startswith("/"):
602
+ # origin-form,
603
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
604
+ path_part, _hash_separator, url_fragment = path.partition("#")
605
+ path_part, _question_mark_separator, qs_part = path_part.partition("?")
606
+
607
+ # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
608
+ # NOTE: parser does, otherwise it results into the same
609
+ # NOTE: HTTP Request-Line input producing different
610
+ # NOTE: `yarl.URL()` objects
611
+ url = URL.build(
612
+ path=path_part,
613
+ query_string=qs_part,
614
+ fragment=url_fragment,
615
+ encoded=True,
616
+ )
617
+ elif path == "*" and method == "OPTIONS":
618
+ # asterisk-form,
619
+ url = URL(path, encoded=True)
620
+ else:
621
+ # absolute-form for proxy maybe,
622
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
623
+ url = URL(path, encoded=True)
624
+ if url.scheme == "":
625
+ # not absolute-form
626
+ raise InvalidURLError(
627
+ path.encode(errors="surrogateescape").decode("latin1")
628
+ )
629
+
630
+ # read headers
631
+ (
632
+ headers,
633
+ raw_headers,
634
+ close,
635
+ compression,
636
+ upgrade,
637
+ chunked,
638
+ ) = self.parse_headers(lines)
639
+
640
+ if close is None: # then the headers weren't set in the request
641
+ if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
642
+ close = True
643
+ else: # HTTP 1.1 must ask to close.
644
+ close = False
645
+
646
+ return RawRequestMessage(
647
+ method,
648
+ path,
649
+ version_o,
650
+ headers,
651
+ raw_headers,
652
+ close,
653
+ compression,
654
+ upgrade,
655
+ chunked,
656
+ url,
657
+ )
658
+
659
+ def _is_chunked_te(self, te: str) -> bool:
660
+ if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked":
661
+ return True
662
+ # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
663
+ raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
664
+
665
+
666
+ class HttpResponseParser(HttpParser[RawResponseMessage]):
667
+ """Read response status line and headers.
668
+
669
+ BadStatusLine could be raised in case of any errors in status line.
670
+ Returns RawResponseMessage.
671
+ """
672
+
673
+ # Lax mode should only be enabled on response parser.
674
+ lax = not DEBUG
675
+
676
+ def feed_data(
677
+ self,
678
+ data: bytes,
679
+ SEP: Optional[_SEP] = None,
680
+ *args: Any,
681
+ **kwargs: Any,
682
+ ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
683
+ if SEP is None:
684
+ SEP = b"\r\n" if DEBUG else b"\n"
685
+ return super().feed_data(data, SEP, *args, **kwargs)
686
+
687
+ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
688
+ line = lines[0].decode("utf-8", "surrogateescape")
689
+ try:
690
+ version, status = line.split(maxsplit=1)
691
+ except ValueError:
692
+ raise BadStatusLine(line) from None
693
+
694
+ try:
695
+ status, reason = status.split(maxsplit=1)
696
+ except ValueError:
697
+ status = status.strip()
698
+ reason = ""
699
+
700
+ if len(reason) > self.max_line_size:
701
+ raise LineTooLong(
702
+ "Status line is too long", str(self.max_line_size), str(len(reason))
703
+ )
704
+
705
+ # version
706
+ match = VERSRE.fullmatch(version)
707
+ if match is None:
708
+ raise BadStatusLine(line)
709
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
710
+
711
+ # The status code is a three-digit ASCII number, no padding
712
+ if len(status) != 3 or not DIGITS.fullmatch(status):
713
+ raise BadStatusLine(line)
714
+ status_i = int(status)
715
+
716
+ # read headers
717
+ (
718
+ headers,
719
+ raw_headers,
720
+ close,
721
+ compression,
722
+ upgrade,
723
+ chunked,
724
+ ) = self.parse_headers(lines)
725
+
726
+ if close is None:
727
+ if version_o <= HttpVersion10:
728
+ close = True
729
+ # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
730
+ elif 100 <= status_i < 200 or status_i in {204, 304}:
731
+ close = False
732
+ elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
733
+ close = False
734
+ else:
735
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
736
+ close = True
737
+
738
+ return RawResponseMessage(
739
+ version_o,
740
+ status_i,
741
+ reason.strip(),
742
+ headers,
743
+ raw_headers,
744
+ close,
745
+ compression,
746
+ upgrade,
747
+ chunked,
748
+ )
749
+
750
+ def _is_chunked_te(self, te: str) -> bool:
751
+ # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
752
+ return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked"
753
+
754
+
755
+ class HttpPayloadParser:
756
+ def __init__(
757
+ self,
758
+ payload: StreamReader,
759
+ length: Optional[int] = None,
760
+ chunked: bool = False,
761
+ compression: Optional[str] = None,
762
+ code: Optional[int] = None,
763
+ method: Optional[str] = None,
764
+ response_with_body: bool = True,
765
+ auto_decompress: bool = True,
766
+ lax: bool = False,
767
+ ) -> None:
768
+ self._length = 0
769
+ self._type = ParseState.PARSE_UNTIL_EOF
770
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
771
+ self._chunk_size = 0
772
+ self._chunk_tail = b""
773
+ self._auto_decompress = auto_decompress
774
+ self._lax = lax
775
+ self.done = False
776
+
777
+ # payload decompression wrapper
778
+ if response_with_body and compression and self._auto_decompress:
779
+ real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
780
+ payload, compression
781
+ )
782
+ else:
783
+ real_payload = payload
784
+
785
+ # payload parser
786
+ if not response_with_body:
787
+ # don't parse payload if it's not expected to be received
788
+ self._type = ParseState.PARSE_NONE
789
+ real_payload.feed_eof()
790
+ self.done = True
791
+ elif chunked:
792
+ self._type = ParseState.PARSE_CHUNKED
793
+ elif length is not None:
794
+ self._type = ParseState.PARSE_LENGTH
795
+ self._length = length
796
+ if self._length == 0:
797
+ real_payload.feed_eof()
798
+ self.done = True
799
+
800
+ self.payload = real_payload
801
+
802
+ def feed_eof(self) -> None:
803
+ if self._type == ParseState.PARSE_UNTIL_EOF:
804
+ self.payload.feed_eof()
805
+ elif self._type == ParseState.PARSE_LENGTH:
806
+ raise ContentLengthError(
807
+ "Not enough data for satisfy content length header."
808
+ )
809
+ elif self._type == ParseState.PARSE_CHUNKED:
810
+ raise TransferEncodingError(
811
+ "Not enough data for satisfy transfer length header."
812
+ )
813
+
814
+ def feed_data(
815
+ self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
816
+ ) -> Tuple[bool, bytes]:
817
+ # Read specified amount of bytes
818
+ if self._type == ParseState.PARSE_LENGTH:
819
+ required = self._length
820
+ chunk_len = len(chunk)
821
+
822
+ if required >= chunk_len:
823
+ self._length = required - chunk_len
824
+ self.payload.feed_data(chunk, chunk_len)
825
+ if self._length == 0:
826
+ self.payload.feed_eof()
827
+ return True, b""
828
+ else:
829
+ self._length = 0
830
+ self.payload.feed_data(chunk[:required], required)
831
+ self.payload.feed_eof()
832
+ return True, chunk[required:]
833
+
834
+ # Chunked transfer encoding parser
835
+ elif self._type == ParseState.PARSE_CHUNKED:
836
+ if self._chunk_tail:
837
+ chunk = self._chunk_tail + chunk
838
+ self._chunk_tail = b""
839
+
840
+ while chunk:
841
+
842
+ # read next chunk size
843
+ if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
844
+ pos = chunk.find(SEP)
845
+ if pos >= 0:
846
+ i = chunk.find(CHUNK_EXT, 0, pos)
847
+ if i >= 0:
848
+ size_b = chunk[:i] # strip chunk-extensions
849
+ # Verify no LF in the chunk-extension
850
+ if b"\n" in (ext := chunk[i:pos]):
851
+ exc = BadHttpMessage(
852
+ f"Unexpected LF in chunk-extension: {ext!r}"
853
+ )
854
+ set_exception(self.payload, exc)
855
+ raise exc
856
+ else:
857
+ size_b = chunk[:pos]
858
+
859
+ if self._lax: # Allow whitespace in lax mode.
860
+ size_b = size_b.strip()
861
+
862
+ if not re.fullmatch(HEXDIGITS, size_b):
863
+ exc = TransferEncodingError(
864
+ chunk[:pos].decode("ascii", "surrogateescape")
865
+ )
866
+ set_exception(self.payload, exc)
867
+ raise exc
868
+ size = int(bytes(size_b), 16)
869
+
870
+ chunk = chunk[pos + len(SEP) :]
871
+ if size == 0: # eof marker
872
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
873
+ if self._lax and chunk.startswith(b"\r"):
874
+ chunk = chunk[1:]
875
+ else:
876
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
877
+ self._chunk_size = size
878
+ self.payload.begin_http_chunk_receiving()
879
+ else:
880
+ self._chunk_tail = chunk
881
+ return False, b""
882
+
883
+ # read chunk and feed buffer
884
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
885
+ required = self._chunk_size
886
+ chunk_len = len(chunk)
887
+
888
+ if required > chunk_len:
889
+ self._chunk_size = required - chunk_len
890
+ self.payload.feed_data(chunk, chunk_len)
891
+ return False, b""
892
+ else:
893
+ self._chunk_size = 0
894
+ self.payload.feed_data(chunk[:required], required)
895
+ chunk = chunk[required:]
896
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
897
+ self.payload.end_http_chunk_receiving()
898
+
899
+ # toss the CRLF at the end of the chunk
900
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
901
+ if self._lax and chunk.startswith(b"\r"):
902
+ chunk = chunk[1:]
903
+ if chunk[: len(SEP)] == SEP:
904
+ chunk = chunk[len(SEP) :]
905
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
906
+ else:
907
+ self._chunk_tail = chunk
908
+ return False, b""
909
+
910
+ # if stream does not contain trailer, after 0\r\n
911
+ # we should get another \r\n otherwise
912
+ # trailers needs to be skipped until \r\n\r\n
913
+ if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
914
+ head = chunk[: len(SEP)]
915
+ if head == SEP:
916
+ # end of stream
917
+ self.payload.feed_eof()
918
+ return True, chunk[len(SEP) :]
919
+ # Both CR and LF, or only LF may not be received yet. It is
920
+ # expected that CRLF or LF will be shown at the very first
921
+ # byte next time, otherwise trailers should come. The last
922
+ # CRLF which marks the end of response might not be
923
+ # contained in the same TCP segment which delivered the
924
+ # size indicator.
925
+ if not head:
926
+ return False, b""
927
+ if head == SEP[:1]:
928
+ self._chunk_tail = head
929
+ return False, b""
930
+ self._chunk = ChunkState.PARSE_TRAILERS
931
+
932
+ # read and discard trailer up to the CRLF terminator
933
+ if self._chunk == ChunkState.PARSE_TRAILERS:
934
+ pos = chunk.find(SEP)
935
+ if pos >= 0:
936
+ chunk = chunk[pos + len(SEP) :]
937
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
938
+ else:
939
+ self._chunk_tail = chunk
940
+ return False, b""
941
+
942
+ # Read all bytes until eof
943
+ elif self._type == ParseState.PARSE_UNTIL_EOF:
944
+ self.payload.feed_data(chunk, len(chunk))
945
+
946
+ return False, b""
947
+
948
+
949
+ class DeflateBuffer:
950
+ """DeflateStream decompress stream and feed data into specified stream."""
951
+
952
+ decompressor: Any
953
+
954
+ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
955
+ self.out = out
956
+ self.size = 0
957
+ self.encoding = encoding
958
+ self._started_decoding = False
959
+
960
+ self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
961
+ if encoding == "br":
962
+ if not HAS_BROTLI: # pragma: no cover
963
+ raise ContentEncodingError(
964
+ "Can not decode content-encoding: brotli (br). "
965
+ "Please install `Brotli`"
966
+ )
967
+ self.decompressor = BrotliDecompressor()
968
+ else:
969
+ self.decompressor = ZLibDecompressor(encoding=encoding)
970
+
971
+ def set_exception(
972
+ self,
973
+ exc: BaseException,
974
+ exc_cause: BaseException = _EXC_SENTINEL,
975
+ ) -> None:
976
+ set_exception(self.out, exc, exc_cause)
977
+
978
+ def feed_data(self, chunk: bytes, size: int) -> None:
979
+ if not size:
980
+ return
981
+
982
+ self.size += size
983
+
984
+ # RFC1950
985
+ # bits 0..3 = CM = 0b1000 = 8 = "deflate"
986
+ # bits 4..7 = CINFO = 1..7 = windows size.
987
+ if (
988
+ not self._started_decoding
989
+ and self.encoding == "deflate"
990
+ and chunk[0] & 0xF != 8
991
+ ):
992
+ # Change the decoder to decompress incorrectly compressed data
993
+ # Actually we should issue a warning about non-RFC-compliant data.
994
+ self.decompressor = ZLibDecompressor(
995
+ encoding=self.encoding, suppress_deflate_header=True
996
+ )
997
+
998
+ try:
999
+ chunk = self.decompressor.decompress_sync(chunk)
1000
+ except Exception:
1001
+ raise ContentEncodingError(
1002
+ "Can not decode content-encoding: %s" % self.encoding
1003
+ )
1004
+
1005
+ self._started_decoding = True
1006
+
1007
+ if chunk:
1008
+ self.out.feed_data(chunk, len(chunk))
1009
+
1010
+ def feed_eof(self) -> None:
1011
+ chunk = self.decompressor.flush()
1012
+
1013
+ if chunk or self.size > 0:
1014
+ self.out.feed_data(chunk, len(chunk))
1015
+ if self.encoding == "deflate" and not self.decompressor.eof:
1016
+ raise ContentEncodingError("deflate")
1017
+
1018
+ self.out.feed_eof()
1019
+
1020
+ def begin_http_chunk_receiving(self) -> None:
1021
+ self.out.begin_http_chunk_receiving()
1022
+
1023
+ def end_http_chunk_receiving(self) -> None:
1024
+ self.out.end_http_chunk_receiving()
1025
+
1026
+
1027
+ HttpRequestParserPy = HttpRequestParser
1028
+ HttpResponseParserPy = HttpResponseParser
1029
+ RawRequestMessagePy = RawRequestMessage
1030
+ RawResponseMessagePy = RawResponseMessage
1031
+
1032
+ try:
1033
+ if not NO_EXTENSIONS:
1034
+ from ._http_parser import ( # type: ignore[import-not-found,no-redef]
1035
+ HttpRequestParser,
1036
+ HttpResponseParser,
1037
+ RawRequestMessage,
1038
+ RawResponseMessage,
1039
+ )
1040
+
1041
+ HttpRequestParserC = HttpRequestParser
1042
+ HttpResponseParserC = HttpResponseParser
1043
+ RawRequestMessageC = RawRequestMessage
1044
+ RawResponseMessageC = RawResponseMessage
1045
+ except ImportError: # pragma: no cover
1046
+ pass
deepseek/lib/python3.10/site-packages/aiohttp/http_websocket.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """WebSocket protocol versions 13 and 8."""
2
+
3
+ from ._websocket.helpers import WS_KEY, ws_ext_gen, ws_ext_parse
4
+ from ._websocket.models import (
5
+ WS_CLOSED_MESSAGE,
6
+ WS_CLOSING_MESSAGE,
7
+ WebSocketError,
8
+ WSCloseCode,
9
+ WSHandshakeError,
10
+ WSMessage,
11
+ WSMsgType,
12
+ )
13
+ from ._websocket.reader import WebSocketReader
14
+ from ._websocket.writer import WebSocketWriter
15
+
16
+ # Messages that the WebSocketResponse.receive needs to handle internally
17
+ _INTERNAL_RECEIVE_TYPES = frozenset(
18
+ (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.PING, WSMsgType.PONG)
19
+ )
20
+
21
+
22
+ __all__ = (
23
+ "WS_CLOSED_MESSAGE",
24
+ "WS_CLOSING_MESSAGE",
25
+ "WS_KEY",
26
+ "WebSocketReader",
27
+ "WebSocketWriter",
28
+ "WSMessage",
29
+ "WebSocketError",
30
+ "WSMsgType",
31
+ "WSCloseCode",
32
+ "ws_ext_gen",
33
+ "ws_ext_parse",
34
+ "WSHandshakeError",
35
+ "WSMessage",
36
+ )
deepseek/lib/python3.10/site-packages/aiohttp/multipart.py ADDED
@@ -0,0 +1,1071 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import base64
2
+ import binascii
3
+ import json
4
+ import re
5
+ import sys
6
+ import uuid
7
+ import warnings
8
+ import zlib
9
+ from collections import deque
10
+ from types import TracebackType
11
+ from typing import (
12
+ TYPE_CHECKING,
13
+ Any,
14
+ Deque,
15
+ Dict,
16
+ Iterator,
17
+ List,
18
+ Mapping,
19
+ Optional,
20
+ Sequence,
21
+ Tuple,
22
+ Type,
23
+ Union,
24
+ cast,
25
+ )
26
+ from urllib.parse import parse_qsl, unquote, urlencode
27
+
28
+ from multidict import CIMultiDict, CIMultiDictProxy
29
+
30
+ from .compression_utils import ZLibCompressor, ZLibDecompressor
31
+ from .hdrs import (
32
+ CONTENT_DISPOSITION,
33
+ CONTENT_ENCODING,
34
+ CONTENT_LENGTH,
35
+ CONTENT_TRANSFER_ENCODING,
36
+ CONTENT_TYPE,
37
+ )
38
+ from .helpers import CHAR, TOKEN, parse_mimetype, reify
39
+ from .http import HeadersParser
40
+ from .payload import (
41
+ JsonPayload,
42
+ LookupError,
43
+ Order,
44
+ Payload,
45
+ StringPayload,
46
+ get_payload,
47
+ payload_type,
48
+ )
49
+ from .streams import StreamReader
50
+
51
+ if sys.version_info >= (3, 11):
52
+ from typing import Self
53
+ else:
54
+ from typing import TypeVar
55
+
56
+ Self = TypeVar("Self", bound="BodyPartReader")
57
+
58
+ __all__ = (
59
+ "MultipartReader",
60
+ "MultipartWriter",
61
+ "BodyPartReader",
62
+ "BadContentDispositionHeader",
63
+ "BadContentDispositionParam",
64
+ "parse_content_disposition",
65
+ "content_disposition_filename",
66
+ )
67
+
68
+
69
+ if TYPE_CHECKING:
70
+ from .client_reqrep import ClientResponse
71
+
72
+
73
+ class BadContentDispositionHeader(RuntimeWarning):
74
+ pass
75
+
76
+
77
+ class BadContentDispositionParam(RuntimeWarning):
78
+ pass
79
+
80
+
81
+ def parse_content_disposition(
82
+ header: Optional[str],
83
+ ) -> Tuple[Optional[str], Dict[str, str]]:
84
+ def is_token(string: str) -> bool:
85
+ return bool(string) and TOKEN >= set(string)
86
+
87
+ def is_quoted(string: str) -> bool:
88
+ return string[0] == string[-1] == '"'
89
+
90
+ def is_rfc5987(string: str) -> bool:
91
+ return is_token(string) and string.count("'") == 2
92
+
93
+ def is_extended_param(string: str) -> bool:
94
+ return string.endswith("*")
95
+
96
+ def is_continuous_param(string: str) -> bool:
97
+ pos = string.find("*") + 1
98
+ if not pos:
99
+ return False
100
+ substring = string[pos:-1] if string.endswith("*") else string[pos:]
101
+ return substring.isdigit()
102
+
103
+ def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
104
+ return re.sub(f"\\\\([{chars}])", "\\1", text)
105
+
106
+ if not header:
107
+ return None, {}
108
+
109
+ disptype, *parts = header.split(";")
110
+ if not is_token(disptype):
111
+ warnings.warn(BadContentDispositionHeader(header))
112
+ return None, {}
113
+
114
+ params: Dict[str, str] = {}
115
+ while parts:
116
+ item = parts.pop(0)
117
+
118
+ if "=" not in item:
119
+ warnings.warn(BadContentDispositionHeader(header))
120
+ return None, {}
121
+
122
+ key, value = item.split("=", 1)
123
+ key = key.lower().strip()
124
+ value = value.lstrip()
125
+
126
+ if key in params:
127
+ warnings.warn(BadContentDispositionHeader(header))
128
+ return None, {}
129
+
130
+ if not is_token(key):
131
+ warnings.warn(BadContentDispositionParam(item))
132
+ continue
133
+
134
+ elif is_continuous_param(key):
135
+ if is_quoted(value):
136
+ value = unescape(value[1:-1])
137
+ elif not is_token(value):
138
+ warnings.warn(BadContentDispositionParam(item))
139
+ continue
140
+
141
+ elif is_extended_param(key):
142
+ if is_rfc5987(value):
143
+ encoding, _, value = value.split("'", 2)
144
+ encoding = encoding or "utf-8"
145
+ else:
146
+ warnings.warn(BadContentDispositionParam(item))
147
+ continue
148
+
149
+ try:
150
+ value = unquote(value, encoding, "strict")
151
+ except UnicodeDecodeError: # pragma: nocover
152
+ warnings.warn(BadContentDispositionParam(item))
153
+ continue
154
+
155
+ else:
156
+ failed = True
157
+ if is_quoted(value):
158
+ failed = False
159
+ value = unescape(value[1:-1].lstrip("\\/"))
160
+ elif is_token(value):
161
+ failed = False
162
+ elif parts:
163
+ # maybe just ; in filename, in any case this is just
164
+ # one case fix, for proper fix we need to redesign parser
165
+ _value = f"{value};{parts[0]}"
166
+ if is_quoted(_value):
167
+ parts.pop(0)
168
+ value = unescape(_value[1:-1].lstrip("\\/"))
169
+ failed = False
170
+
171
+ if failed:
172
+ warnings.warn(BadContentDispositionHeader(header))
173
+ return None, {}
174
+
175
+ params[key] = value
176
+
177
+ return disptype.lower(), params
178
+
179
+
180
+ def content_disposition_filename(
181
+ params: Mapping[str, str], name: str = "filename"
182
+ ) -> Optional[str]:
183
+ name_suf = "%s*" % name
184
+ if not params:
185
+ return None
186
+ elif name_suf in params:
187
+ return params[name_suf]
188
+ elif name in params:
189
+ return params[name]
190
+ else:
191
+ parts = []
192
+ fnparams = sorted(
193
+ (key, value) for key, value in params.items() if key.startswith(name_suf)
194
+ )
195
+ for num, (key, value) in enumerate(fnparams):
196
+ _, tail = key.split("*", 1)
197
+ if tail.endswith("*"):
198
+ tail = tail[:-1]
199
+ if tail == str(num):
200
+ parts.append(value)
201
+ else:
202
+ break
203
+ if not parts:
204
+ return None
205
+ value = "".join(parts)
206
+ if "'" in value:
207
+ encoding, _, value = value.split("'", 2)
208
+ encoding = encoding or "utf-8"
209
+ return unquote(value, encoding, "strict")
210
+ return value
211
+
212
+
213
+ class MultipartResponseWrapper:
214
+ """Wrapper around the MultipartReader.
215
+
216
+ It takes care about
217
+ underlying connection and close it when it needs in.
218
+ """
219
+
220
+ def __init__(
221
+ self,
222
+ resp: "ClientResponse",
223
+ stream: "MultipartReader",
224
+ ) -> None:
225
+ self.resp = resp
226
+ self.stream = stream
227
+
228
+ def __aiter__(self) -> "MultipartResponseWrapper":
229
+ return self
230
+
231
+ async def __anext__(
232
+ self,
233
+ ) -> Union["MultipartReader", "BodyPartReader"]:
234
+ part = await self.next()
235
+ if part is None:
236
+ raise StopAsyncIteration
237
+ return part
238
+
239
+ def at_eof(self) -> bool:
240
+ """Returns True when all response data had been read."""
241
+ return self.resp.content.at_eof()
242
+
243
+ async def next(
244
+ self,
245
+ ) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
246
+ """Emits next multipart reader object."""
247
+ item = await self.stream.next()
248
+ if self.stream.at_eof():
249
+ await self.release()
250
+ return item
251
+
252
+ async def release(self) -> None:
253
+ """Release the connection gracefully.
254
+
255
+ All remaining content is read to the void.
256
+ """
257
+ await self.resp.release()
258
+
259
+
260
+ class BodyPartReader:
261
+ """Multipart reader for single body part."""
262
+
263
+ chunk_size = 8192
264
+
265
+ def __init__(
266
+ self,
267
+ boundary: bytes,
268
+ headers: "CIMultiDictProxy[str]",
269
+ content: StreamReader,
270
+ *,
271
+ subtype: str = "mixed",
272
+ default_charset: Optional[str] = None,
273
+ ) -> None:
274
+ self.headers = headers
275
+ self._boundary = boundary
276
+ self._boundary_len = len(boundary) + 2 # Boundary + \r\n
277
+ self._content = content
278
+ self._default_charset = default_charset
279
+ self._at_eof = False
280
+ self._is_form_data = subtype == "form-data"
281
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
282
+ length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None)
283
+ self._length = int(length) if length is not None else None
284
+ self._read_bytes = 0
285
+ self._unread: Deque[bytes] = deque()
286
+ self._prev_chunk: Optional[bytes] = None
287
+ self._content_eof = 0
288
+ self._cache: Dict[str, Any] = {}
289
+
290
+ def __aiter__(self: Self) -> Self:
291
+ return self
292
+
293
+ async def __anext__(self) -> bytes:
294
+ part = await self.next()
295
+ if part is None:
296
+ raise StopAsyncIteration
297
+ return part
298
+
299
+ async def next(self) -> Optional[bytes]:
300
+ item = await self.read()
301
+ if not item:
302
+ return None
303
+ return item
304
+
305
+ async def read(self, *, decode: bool = False) -> bytes:
306
+ """Reads body part data.
307
+
308
+ decode: Decodes data following by encoding
309
+ method from Content-Encoding header. If it missed
310
+ data remains untouched
311
+ """
312
+ if self._at_eof:
313
+ return b""
314
+ data = bytearray()
315
+ while not self._at_eof:
316
+ data.extend(await self.read_chunk(self.chunk_size))
317
+ if decode:
318
+ return self.decode(data)
319
+ return data
320
+
321
+ async def read_chunk(self, size: int = chunk_size) -> bytes:
322
+ """Reads body part content chunk of the specified size.
323
+
324
+ size: chunk size
325
+ """
326
+ if self._at_eof:
327
+ return b""
328
+ if self._length:
329
+ chunk = await self._read_chunk_from_length(size)
330
+ else:
331
+ chunk = await self._read_chunk_from_stream(size)
332
+
333
+ # For the case of base64 data, we must read a fragment of size with a
334
+ # remainder of 0 by dividing by 4 for string without symbols \n or \r
335
+ encoding = self.headers.get(CONTENT_TRANSFER_ENCODING)
336
+ if encoding and encoding.lower() == "base64":
337
+ stripped_chunk = b"".join(chunk.split())
338
+ remainder = len(stripped_chunk) % 4
339
+
340
+ while remainder != 0 and not self.at_eof():
341
+ over_chunk_size = 4 - remainder
342
+ over_chunk = b""
343
+
344
+ if self._prev_chunk:
345
+ over_chunk = self._prev_chunk[:over_chunk_size]
346
+ self._prev_chunk = self._prev_chunk[len(over_chunk) :]
347
+
348
+ if len(over_chunk) != over_chunk_size:
349
+ over_chunk += await self._content.read(4 - len(over_chunk))
350
+
351
+ if not over_chunk:
352
+ self._at_eof = True
353
+
354
+ stripped_chunk += b"".join(over_chunk.split())
355
+ chunk += over_chunk
356
+ remainder = len(stripped_chunk) % 4
357
+
358
+ self._read_bytes += len(chunk)
359
+ if self._read_bytes == self._length:
360
+ self._at_eof = True
361
+ if self._at_eof:
362
+ clrf = await self._content.readline()
363
+ assert (
364
+ b"\r\n" == clrf
365
+ ), "reader did not read all the data or it is malformed"
366
+ return chunk
367
+
368
+ async def _read_chunk_from_length(self, size: int) -> bytes:
369
+ # Reads body part content chunk of the specified size.
370
+ # The body part must has Content-Length header with proper value.
371
+ assert self._length is not None, "Content-Length required for chunked read"
372
+ chunk_size = min(size, self._length - self._read_bytes)
373
+ chunk = await self._content.read(chunk_size)
374
+ if self._content.at_eof():
375
+ self._at_eof = True
376
+ return chunk
377
+
378
+ async def _read_chunk_from_stream(self, size: int) -> bytes:
379
+ # Reads content chunk of body part with unknown length.
380
+ # The Content-Length header for body part is not necessary.
381
+ assert (
382
+ size >= self._boundary_len
383
+ ), "Chunk size must be greater or equal than boundary length + 2"
384
+ first_chunk = self._prev_chunk is None
385
+ if first_chunk:
386
+ self._prev_chunk = await self._content.read(size)
387
+
388
+ chunk = b""
389
+ # content.read() may return less than size, so we need to loop to ensure
390
+ # we have enough data to detect the boundary.
391
+ while len(chunk) < self._boundary_len:
392
+ chunk += await self._content.read(size)
393
+ self._content_eof += int(self._content.at_eof())
394
+ assert self._content_eof < 3, "Reading after EOF"
395
+ if self._content_eof:
396
+ break
397
+ if len(chunk) > size:
398
+ self._content.unread_data(chunk[size:])
399
+ chunk = chunk[:size]
400
+
401
+ assert self._prev_chunk is not None
402
+ window = self._prev_chunk + chunk
403
+ sub = b"\r\n" + self._boundary
404
+ if first_chunk:
405
+ idx = window.find(sub)
406
+ else:
407
+ idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
408
+ if idx >= 0:
409
+ # pushing boundary back to content
410
+ with warnings.catch_warnings():
411
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
412
+ self._content.unread_data(window[idx:])
413
+ if size > idx:
414
+ self._prev_chunk = self._prev_chunk[:idx]
415
+ chunk = window[len(self._prev_chunk) : idx]
416
+ if not chunk:
417
+ self._at_eof = True
418
+ result = self._prev_chunk
419
+ self._prev_chunk = chunk
420
+ return result
421
+
422
+ async def readline(self) -> bytes:
423
+ """Reads body part by line by line."""
424
+ if self._at_eof:
425
+ return b""
426
+
427
+ if self._unread:
428
+ line = self._unread.popleft()
429
+ else:
430
+ line = await self._content.readline()
431
+
432
+ if line.startswith(self._boundary):
433
+ # the very last boundary may not come with \r\n,
434
+ # so set single rules for everyone
435
+ sline = line.rstrip(b"\r\n")
436
+ boundary = self._boundary
437
+ last_boundary = self._boundary + b"--"
438
+ # ensure that we read exactly the boundary, not something alike
439
+ if sline == boundary or sline == last_boundary:
440
+ self._at_eof = True
441
+ self._unread.append(line)
442
+ return b""
443
+ else:
444
+ next_line = await self._content.readline()
445
+ if next_line.startswith(self._boundary):
446
+ line = line[:-2] # strip CRLF but only once
447
+ self._unread.append(next_line)
448
+
449
+ return line
450
+
451
+ async def release(self) -> None:
452
+ """Like read(), but reads all the data to the void."""
453
+ if self._at_eof:
454
+ return
455
+ while not self._at_eof:
456
+ await self.read_chunk(self.chunk_size)
457
+
458
+ async def text(self, *, encoding: Optional[str] = None) -> str:
459
+ """Like read(), but assumes that body part contains text data."""
460
+ data = await self.read(decode=True)
461
+ # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm
462
+ # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send
463
+ encoding = encoding or self.get_charset(default="utf-8")
464
+ return data.decode(encoding)
465
+
466
+ async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
467
+ """Like read(), but assumes that body parts contains JSON data."""
468
+ data = await self.read(decode=True)
469
+ if not data:
470
+ return None
471
+ encoding = encoding or self.get_charset(default="utf-8")
472
+ return cast(Dict[str, Any], json.loads(data.decode(encoding)))
473
+
474
+ async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
475
+ """Like read(), but assumes that body parts contain form urlencoded data."""
476
+ data = await self.read(decode=True)
477
+ if not data:
478
+ return []
479
+ if encoding is not None:
480
+ real_encoding = encoding
481
+ else:
482
+ real_encoding = self.get_charset(default="utf-8")
483
+ try:
484
+ decoded_data = data.rstrip().decode(real_encoding)
485
+ except UnicodeDecodeError:
486
+ raise ValueError("data cannot be decoded with %s encoding" % real_encoding)
487
+
488
+ return parse_qsl(
489
+ decoded_data,
490
+ keep_blank_values=True,
491
+ encoding=real_encoding,
492
+ )
493
+
494
+ def at_eof(self) -> bool:
495
+ """Returns True if the boundary was reached or False otherwise."""
496
+ return self._at_eof
497
+
498
+ def decode(self, data: bytes) -> bytes:
499
+ """Decodes data.
500
+
501
+ Decoding is done according the specified Content-Encoding
502
+ or Content-Transfer-Encoding headers value.
503
+ """
504
+ if CONTENT_TRANSFER_ENCODING in self.headers:
505
+ data = self._decode_content_transfer(data)
506
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
507
+ if not self._is_form_data and CONTENT_ENCODING in self.headers:
508
+ return self._decode_content(data)
509
+ return data
510
+
511
+ def _decode_content(self, data: bytes) -> bytes:
512
+ encoding = self.headers.get(CONTENT_ENCODING, "").lower()
513
+ if encoding == "identity":
514
+ return data
515
+ if encoding in {"deflate", "gzip"}:
516
+ return ZLibDecompressor(
517
+ encoding=encoding,
518
+ suppress_deflate_header=True,
519
+ ).decompress_sync(data)
520
+
521
+ raise RuntimeError(f"unknown content encoding: {encoding}")
522
+
523
+ def _decode_content_transfer(self, data: bytes) -> bytes:
524
+ encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
525
+
526
+ if encoding == "base64":
527
+ return base64.b64decode(data)
528
+ elif encoding == "quoted-printable":
529
+ return binascii.a2b_qp(data)
530
+ elif encoding in ("binary", "8bit", "7bit"):
531
+ return data
532
+ else:
533
+ raise RuntimeError(f"unknown content transfer encoding: {encoding}")
534
+
535
+ def get_charset(self, default: str) -> str:
536
+ """Returns charset parameter from Content-Type header or default."""
537
+ ctype = self.headers.get(CONTENT_TYPE, "")
538
+ mimetype = parse_mimetype(ctype)
539
+ return mimetype.parameters.get("charset", self._default_charset or default)
540
+
541
+ @reify
542
+ def name(self) -> Optional[str]:
543
+ """Returns name specified in Content-Disposition header.
544
+
545
+ If the header is missing or malformed, returns None.
546
+ """
547
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
548
+ return content_disposition_filename(params, "name")
549
+
550
+ @reify
551
+ def filename(self) -> Optional[str]:
552
+ """Returns filename specified in Content-Disposition header.
553
+
554
+ Returns None if the header is missing or malformed.
555
+ """
556
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
557
+ return content_disposition_filename(params, "filename")
558
+
559
+
560
+ @payload_type(BodyPartReader, order=Order.try_first)
561
+ class BodyPartReaderPayload(Payload):
562
+ _value: BodyPartReader
563
+
564
+ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
565
+ super().__init__(value, *args, **kwargs)
566
+
567
+ params: Dict[str, str] = {}
568
+ if value.name is not None:
569
+ params["name"] = value.name
570
+ if value.filename is not None:
571
+ params["filename"] = value.filename
572
+
573
+ if params:
574
+ self.set_content_disposition("attachment", True, **params)
575
+
576
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
577
+ raise TypeError("Unable to decode.")
578
+
579
+ async def write(self, writer: Any) -> None:
580
+ field = self._value
581
+ chunk = await field.read_chunk(size=2**16)
582
+ while chunk:
583
+ await writer.write(field.decode(chunk))
584
+ chunk = await field.read_chunk(size=2**16)
585
+
586
+
587
+ class MultipartReader:
588
+ """Multipart body reader."""
589
+
590
+ #: Response wrapper, used when multipart readers constructs from response.
591
+ response_wrapper_cls = MultipartResponseWrapper
592
+ #: Multipart reader class, used to handle multipart/* body parts.
593
+ #: None points to type(self)
594
+ multipart_reader_cls: Optional[Type["MultipartReader"]] = None
595
+ #: Body part reader class for non multipart/* content types.
596
+ part_reader_cls = BodyPartReader
597
+
598
+ def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
599
+ self._mimetype = parse_mimetype(headers[CONTENT_TYPE])
600
+ assert self._mimetype.type == "multipart", "multipart/* content type expected"
601
+ if "boundary" not in self._mimetype.parameters:
602
+ raise ValueError(
603
+ "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE]
604
+ )
605
+
606
+ self.headers = headers
607
+ self._boundary = ("--" + self._get_boundary()).encode()
608
+ self._content = content
609
+ self._default_charset: Optional[str] = None
610
+ self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
611
+ self._at_eof = False
612
+ self._at_bof = True
613
+ self._unread: List[bytes] = []
614
+
615
+ def __aiter__(self: Self) -> Self:
616
+ return self
617
+
618
+ async def __anext__(
619
+ self,
620
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
621
+ part = await self.next()
622
+ if part is None:
623
+ raise StopAsyncIteration
624
+ return part
625
+
626
+ @classmethod
627
+ def from_response(
628
+ cls,
629
+ response: "ClientResponse",
630
+ ) -> MultipartResponseWrapper:
631
+ """Constructs reader instance from HTTP response.
632
+
633
+ :param response: :class:`~aiohttp.client.ClientResponse` instance
634
+ """
635
+ obj = cls.response_wrapper_cls(
636
+ response, cls(response.headers, response.content)
637
+ )
638
+ return obj
639
+
640
+ def at_eof(self) -> bool:
641
+ """Returns True if the final boundary was reached, false otherwise."""
642
+ return self._at_eof
643
+
644
+ async def next(
645
+ self,
646
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
647
+ """Emits the next multipart body part."""
648
+ # So, if we're at BOF, we need to skip till the boundary.
649
+ if self._at_eof:
650
+ return None
651
+ await self._maybe_release_last_part()
652
+ if self._at_bof:
653
+ await self._read_until_first_boundary()
654
+ self._at_bof = False
655
+ else:
656
+ await self._read_boundary()
657
+ if self._at_eof: # we just read the last boundary, nothing to do there
658
+ return None
659
+
660
+ part = await self.fetch_next_part()
661
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6
662
+ if (
663
+ self._last_part is None
664
+ and self._mimetype.subtype == "form-data"
665
+ and isinstance(part, BodyPartReader)
666
+ ):
667
+ _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION))
668
+ if params.get("name") == "_charset_":
669
+ # Longest encoding in https://encoding.spec.whatwg.org/encodings.json
670
+ # is 19 characters, so 32 should be more than enough for any valid encoding.
671
+ charset = await part.read_chunk(32)
672
+ if len(charset) > 31:
673
+ raise RuntimeError("Invalid default charset")
674
+ self._default_charset = charset.strip().decode()
675
+ part = await self.fetch_next_part()
676
+ self._last_part = part
677
+ return self._last_part
678
+
679
+ async def release(self) -> None:
680
+ """Reads all the body parts to the void till the final boundary."""
681
+ while not self._at_eof:
682
+ item = await self.next()
683
+ if item is None:
684
+ break
685
+ await item.release()
686
+
687
+ async def fetch_next_part(
688
+ self,
689
+ ) -> Union["MultipartReader", BodyPartReader]:
690
+ """Returns the next body part reader."""
691
+ headers = await self._read_headers()
692
+ return self._get_part_reader(headers)
693
+
694
+ def _get_part_reader(
695
+ self,
696
+ headers: "CIMultiDictProxy[str]",
697
+ ) -> Union["MultipartReader", BodyPartReader]:
698
+ """Dispatches the response by the `Content-Type` header.
699
+
700
+ Returns a suitable reader instance.
701
+
702
+ :param dict headers: Response headers
703
+ """
704
+ ctype = headers.get(CONTENT_TYPE, "")
705
+ mimetype = parse_mimetype(ctype)
706
+
707
+ if mimetype.type == "multipart":
708
+ if self.multipart_reader_cls is None:
709
+ return type(self)(headers, self._content)
710
+ return self.multipart_reader_cls(headers, self._content)
711
+ else:
712
+ return self.part_reader_cls(
713
+ self._boundary,
714
+ headers,
715
+ self._content,
716
+ subtype=self._mimetype.subtype,
717
+ default_charset=self._default_charset,
718
+ )
719
+
720
+ def _get_boundary(self) -> str:
721
+ boundary = self._mimetype.parameters["boundary"]
722
+ if len(boundary) > 70:
723
+ raise ValueError("boundary %r is too long (70 chars max)" % boundary)
724
+
725
+ return boundary
726
+
727
+ async def _readline(self) -> bytes:
728
+ if self._unread:
729
+ return self._unread.pop()
730
+ return await self._content.readline()
731
+
732
+ async def _read_until_first_boundary(self) -> None:
733
+ while True:
734
+ chunk = await self._readline()
735
+ if chunk == b"":
736
+ raise ValueError(
737
+ "Could not find starting boundary %r" % (self._boundary)
738
+ )
739
+ chunk = chunk.rstrip()
740
+ if chunk == self._boundary:
741
+ return
742
+ elif chunk == self._boundary + b"--":
743
+ self._at_eof = True
744
+ return
745
+
746
+ async def _read_boundary(self) -> None:
747
+ chunk = (await self._readline()).rstrip()
748
+ if chunk == self._boundary:
749
+ pass
750
+ elif chunk == self._boundary + b"--":
751
+ self._at_eof = True
752
+ epilogue = await self._readline()
753
+ next_line = await self._readline()
754
+
755
+ # the epilogue is expected and then either the end of input or the
756
+ # parent multipart boundary, if the parent boundary is found then
757
+ # it should be marked as unread and handed to the parent for
758
+ # processing
759
+ if next_line[:2] == b"--":
760
+ self._unread.append(next_line)
761
+ # otherwise the request is likely missing an epilogue and both
762
+ # lines should be passed to the parent for processing
763
+ # (this handles the old behavior gracefully)
764
+ else:
765
+ self._unread.extend([next_line, epilogue])
766
+ else:
767
+ raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
768
+
769
+ async def _read_headers(self) -> "CIMultiDictProxy[str]":
770
+ lines = [b""]
771
+ while True:
772
+ chunk = await self._content.readline()
773
+ chunk = chunk.strip()
774
+ lines.append(chunk)
775
+ if not chunk:
776
+ break
777
+ parser = HeadersParser()
778
+ headers, raw_headers = parser.parse_headers(lines)
779
+ return headers
780
+
781
+ async def _maybe_release_last_part(self) -> None:
782
+ """Ensures that the last read body part is read completely."""
783
+ if self._last_part is not None:
784
+ if not self._last_part.at_eof():
785
+ await self._last_part.release()
786
+ self._unread.extend(self._last_part._unread)
787
+ self._last_part = None
788
+
789
+
790
+ _Part = Tuple[Payload, str, str]
791
+
792
+
793
+ class MultipartWriter(Payload):
794
+ """Multipart body writer."""
795
+
796
+ _value: None
797
+
798
+ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
799
+ boundary = boundary if boundary is not None else uuid.uuid4().hex
800
+ # The underlying Payload API demands a str (utf-8), not bytes,
801
+ # so we need to ensure we don't lose anything during conversion.
802
+ # As a result, require the boundary to be ASCII only.
803
+ # In both situations.
804
+
805
+ try:
806
+ self._boundary = boundary.encode("ascii")
807
+ except UnicodeEncodeError:
808
+ raise ValueError("boundary should contain ASCII only chars") from None
809
+ ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
810
+
811
+ super().__init__(None, content_type=ctype)
812
+
813
+ self._parts: List[_Part] = []
814
+ self._is_form_data = subtype == "form-data"
815
+
816
+ def __enter__(self) -> "MultipartWriter":
817
+ return self
818
+
819
+ def __exit__(
820
+ self,
821
+ exc_type: Optional[Type[BaseException]],
822
+ exc_val: Optional[BaseException],
823
+ exc_tb: Optional[TracebackType],
824
+ ) -> None:
825
+ pass
826
+
827
+ def __iter__(self) -> Iterator[_Part]:
828
+ return iter(self._parts)
829
+
830
+ def __len__(self) -> int:
831
+ return len(self._parts)
832
+
833
+ def __bool__(self) -> bool:
834
+ return True
835
+
836
+ _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
837
+ _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
838
+
839
+ @property
840
+ def _boundary_value(self) -> str:
841
+ """Wrap boundary parameter value in quotes, if necessary.
842
+
843
+ Reads self.boundary and returns a unicode string.
844
+ """
845
+ # Refer to RFCs 7231, 7230, 5234.
846
+ #
847
+ # parameter = token "=" ( token / quoted-string )
848
+ # token = 1*tchar
849
+ # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
850
+ # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
851
+ # obs-text = %x80-FF
852
+ # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
853
+ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
854
+ # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
855
+ # / DIGIT / ALPHA
856
+ # ; any VCHAR, except delimiters
857
+ # VCHAR = %x21-7E
858
+ value = self._boundary
859
+ if re.match(self._valid_tchar_regex, value):
860
+ return value.decode("ascii") # cannot fail
861
+
862
+ if re.search(self._invalid_qdtext_char_regex, value):
863
+ raise ValueError("boundary value contains invalid characters")
864
+
865
+ # escape %x5C and %x22
866
+ quoted_value_content = value.replace(b"\\", b"\\\\")
867
+ quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
868
+
869
+ return '"' + quoted_value_content.decode("ascii") + '"'
870
+
871
+ @property
872
+ def boundary(self) -> str:
873
+ return self._boundary.decode("ascii")
874
+
875
+ def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload:
876
+ if headers is None:
877
+ headers = CIMultiDict()
878
+
879
+ if isinstance(obj, Payload):
880
+ obj.headers.update(headers)
881
+ return self.append_payload(obj)
882
+ else:
883
+ try:
884
+ payload = get_payload(obj, headers=headers)
885
+ except LookupError:
886
+ raise TypeError("Cannot create payload from %r" % obj)
887
+ else:
888
+ return self.append_payload(payload)
889
+
890
+ def append_payload(self, payload: Payload) -> Payload:
891
+ """Adds a new body part to multipart writer."""
892
+ encoding: Optional[str] = None
893
+ te_encoding: Optional[str] = None
894
+ if self._is_form_data:
895
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7
896
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
897
+ assert (
898
+ not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING}
899
+ & payload.headers.keys()
900
+ )
901
+ # Set default Content-Disposition in case user doesn't create one
902
+ if CONTENT_DISPOSITION not in payload.headers:
903
+ name = f"section-{len(self._parts)}"
904
+ payload.set_content_disposition("form-data", name=name)
905
+ else:
906
+ # compression
907
+ encoding = payload.headers.get(CONTENT_ENCODING, "").lower()
908
+ if encoding and encoding not in ("deflate", "gzip", "identity"):
909
+ raise RuntimeError(f"unknown content encoding: {encoding}")
910
+ if encoding == "identity":
911
+ encoding = None
912
+
913
+ # te encoding
914
+ te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
915
+ if te_encoding not in ("", "base64", "quoted-printable", "binary"):
916
+ raise RuntimeError(f"unknown content transfer encoding: {te_encoding}")
917
+ if te_encoding == "binary":
918
+ te_encoding = None
919
+
920
+ # size
921
+ size = payload.size
922
+ if size is not None and not (encoding or te_encoding):
923
+ payload.headers[CONTENT_LENGTH] = str(size)
924
+
925
+ self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
926
+ return payload
927
+
928
+ def append_json(
929
+ self, obj: Any, headers: Optional[Mapping[str, str]] = None
930
+ ) -> Payload:
931
+ """Helper to append JSON part."""
932
+ if headers is None:
933
+ headers = CIMultiDict()
934
+
935
+ return self.append_payload(JsonPayload(obj, headers=headers))
936
+
937
+ def append_form(
938
+ self,
939
+ obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
940
+ headers: Optional[Mapping[str, str]] = None,
941
+ ) -> Payload:
942
+ """Helper to append form urlencoded part."""
943
+ assert isinstance(obj, (Sequence, Mapping))
944
+
945
+ if headers is None:
946
+ headers = CIMultiDict()
947
+
948
+ if isinstance(obj, Mapping):
949
+ obj = list(obj.items())
950
+ data = urlencode(obj, doseq=True)
951
+
952
+ return self.append_payload(
953
+ StringPayload(
954
+ data, headers=headers, content_type="application/x-www-form-urlencoded"
955
+ )
956
+ )
957
+
958
+ @property
959
+ def size(self) -> Optional[int]:
960
+ """Size of the payload."""
961
+ total = 0
962
+ for part, encoding, te_encoding in self._parts:
963
+ if encoding or te_encoding or part.size is None:
964
+ return None
965
+
966
+ total += int(
967
+ 2
968
+ + len(self._boundary)
969
+ + 2
970
+ + part.size # b'--'+self._boundary+b'\r\n'
971
+ + len(part._binary_headers)
972
+ + 2 # b'\r\n'
973
+ )
974
+
975
+ total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
976
+ return total
977
+
978
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
979
+ return "".join(
980
+ "--"
981
+ + self.boundary
982
+ + "\n"
983
+ + part._binary_headers.decode(encoding, errors)
984
+ + part.decode()
985
+ for part, _e, _te in self._parts
986
+ )
987
+
988
+ async def write(self, writer: Any, close_boundary: bool = True) -> None:
989
+ """Write body."""
990
+ for part, encoding, te_encoding in self._parts:
991
+ if self._is_form_data:
992
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
993
+ assert CONTENT_DISPOSITION in part.headers
994
+ assert "name=" in part.headers[CONTENT_DISPOSITION]
995
+
996
+ await writer.write(b"--" + self._boundary + b"\r\n")
997
+ await writer.write(part._binary_headers)
998
+
999
+ if encoding or te_encoding:
1000
+ w = MultipartPayloadWriter(writer)
1001
+ if encoding:
1002
+ w.enable_compression(encoding)
1003
+ if te_encoding:
1004
+ w.enable_encoding(te_encoding)
1005
+ await part.write(w) # type: ignore[arg-type]
1006
+ await w.write_eof()
1007
+ else:
1008
+ await part.write(writer)
1009
+
1010
+ await writer.write(b"\r\n")
1011
+
1012
+ if close_boundary:
1013
+ await writer.write(b"--" + self._boundary + b"--\r\n")
1014
+
1015
+
1016
+ class MultipartPayloadWriter:
1017
+ def __init__(self, writer: Any) -> None:
1018
+ self._writer = writer
1019
+ self._encoding: Optional[str] = None
1020
+ self._compress: Optional[ZLibCompressor] = None
1021
+ self._encoding_buffer: Optional[bytearray] = None
1022
+
1023
+ def enable_encoding(self, encoding: str) -> None:
1024
+ if encoding == "base64":
1025
+ self._encoding = encoding
1026
+ self._encoding_buffer = bytearray()
1027
+ elif encoding == "quoted-printable":
1028
+ self._encoding = "quoted-printable"
1029
+
1030
+ def enable_compression(
1031
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
1032
+ ) -> None:
1033
+ self._compress = ZLibCompressor(
1034
+ encoding=encoding,
1035
+ suppress_deflate_header=True,
1036
+ strategy=strategy,
1037
+ )
1038
+
1039
+ async def write_eof(self) -> None:
1040
+ if self._compress is not None:
1041
+ chunk = self._compress.flush()
1042
+ if chunk:
1043
+ self._compress = None
1044
+ await self.write(chunk)
1045
+
1046
+ if self._encoding == "base64":
1047
+ if self._encoding_buffer:
1048
+ await self._writer.write(base64.b64encode(self._encoding_buffer))
1049
+
1050
+ async def write(self, chunk: bytes) -> None:
1051
+ if self._compress is not None:
1052
+ if chunk:
1053
+ chunk = await self._compress.compress(chunk)
1054
+ if not chunk:
1055
+ return
1056
+
1057
+ if self._encoding == "base64":
1058
+ buf = self._encoding_buffer
1059
+ assert buf is not None
1060
+ buf.extend(chunk)
1061
+
1062
+ if buf:
1063
+ div, mod = divmod(len(buf), 3)
1064
+ enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
1065
+ if enc_chunk:
1066
+ b64chunk = base64.b64encode(enc_chunk)
1067
+ await self._writer.write(b64chunk)
1068
+ elif self._encoding == "quoted-printable":
1069
+ await self._writer.write(binascii.b2a_qp(chunk))
1070
+ else:
1071
+ await self._writer.write(chunk)
deepseek/lib/python3.10/site-packages/aiohttp/payload.py ADDED
@@ -0,0 +1,519 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import enum
3
+ import io
4
+ import json
5
+ import mimetypes
6
+ import os
7
+ import sys
8
+ import warnings
9
+ from abc import ABC, abstractmethod
10
+ from itertools import chain
11
+ from typing import (
12
+ IO,
13
+ TYPE_CHECKING,
14
+ Any,
15
+ Dict,
16
+ Final,
17
+ Iterable,
18
+ Optional,
19
+ TextIO,
20
+ Tuple,
21
+ Type,
22
+ Union,
23
+ )
24
+
25
+ from multidict import CIMultiDict
26
+
27
+ from . import hdrs
28
+ from .abc import AbstractStreamWriter
29
+ from .helpers import (
30
+ _SENTINEL,
31
+ content_disposition_header,
32
+ guess_filename,
33
+ parse_mimetype,
34
+ sentinel,
35
+ )
36
+ from .streams import StreamReader
37
+ from .typedefs import JSONEncoder, _CIMultiDict
38
+
39
+ __all__ = (
40
+ "PAYLOAD_REGISTRY",
41
+ "get_payload",
42
+ "payload_type",
43
+ "Payload",
44
+ "BytesPayload",
45
+ "StringPayload",
46
+ "IOBasePayload",
47
+ "BytesIOPayload",
48
+ "BufferedReaderPayload",
49
+ "TextIOPayload",
50
+ "StringIOPayload",
51
+ "JsonPayload",
52
+ "AsyncIterablePayload",
53
+ )
54
+
55
+ TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
56
+
57
+ if TYPE_CHECKING:
58
+ from typing import List
59
+
60
+
61
+ class LookupError(Exception):
62
+ pass
63
+
64
+
65
+ class Order(str, enum.Enum):
66
+ normal = "normal"
67
+ try_first = "try_first"
68
+ try_last = "try_last"
69
+
70
+
71
+ def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
72
+ return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
73
+
74
+
75
+ def register_payload(
76
+ factory: Type["Payload"], type: Any, *, order: Order = Order.normal
77
+ ) -> None:
78
+ PAYLOAD_REGISTRY.register(factory, type, order=order)
79
+
80
+
81
+ class payload_type:
82
+ def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
83
+ self.type = type
84
+ self.order = order
85
+
86
+ def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
87
+ register_payload(factory, self.type, order=self.order)
88
+ return factory
89
+
90
+
91
+ PayloadType = Type["Payload"]
92
+ _PayloadRegistryItem = Tuple[PayloadType, Any]
93
+
94
+
95
+ class PayloadRegistry:
96
+ """Payload registry.
97
+
98
+ note: we need zope.interface for more efficient adapter search
99
+ """
100
+
101
+ __slots__ = ("_first", "_normal", "_last", "_normal_lookup")
102
+
103
+ def __init__(self) -> None:
104
+ self._first: List[_PayloadRegistryItem] = []
105
+ self._normal: List[_PayloadRegistryItem] = []
106
+ self._last: List[_PayloadRegistryItem] = []
107
+ self._normal_lookup: Dict[Any, PayloadType] = {}
108
+
109
+ def get(
110
+ self,
111
+ data: Any,
112
+ *args: Any,
113
+ _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
114
+ **kwargs: Any,
115
+ ) -> "Payload":
116
+ if self._first:
117
+ for factory, type_ in self._first:
118
+ if isinstance(data, type_):
119
+ return factory(data, *args, **kwargs)
120
+ # Try the fast lookup first
121
+ if lookup_factory := self._normal_lookup.get(type(data)):
122
+ return lookup_factory(data, *args, **kwargs)
123
+ # Bail early if its already a Payload
124
+ if isinstance(data, Payload):
125
+ return data
126
+ # Fallback to the slower linear search
127
+ for factory, type_ in _CHAIN(self._normal, self._last):
128
+ if isinstance(data, type_):
129
+ return factory(data, *args, **kwargs)
130
+ raise LookupError()
131
+
132
+ def register(
133
+ self, factory: PayloadType, type: Any, *, order: Order = Order.normal
134
+ ) -> None:
135
+ if order is Order.try_first:
136
+ self._first.append((factory, type))
137
+ elif order is Order.normal:
138
+ self._normal.append((factory, type))
139
+ if isinstance(type, Iterable):
140
+ for t in type:
141
+ self._normal_lookup[t] = factory
142
+ else:
143
+ self._normal_lookup[type] = factory
144
+ elif order is Order.try_last:
145
+ self._last.append((factory, type))
146
+ else:
147
+ raise ValueError(f"Unsupported order {order!r}")
148
+
149
+
150
+ class Payload(ABC):
151
+
152
+ _default_content_type: str = "application/octet-stream"
153
+ _size: Optional[int] = None
154
+
155
+ def __init__(
156
+ self,
157
+ value: Any,
158
+ headers: Optional[
159
+ Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
160
+ ] = None,
161
+ content_type: Union[str, None, _SENTINEL] = sentinel,
162
+ filename: Optional[str] = None,
163
+ encoding: Optional[str] = None,
164
+ **kwargs: Any,
165
+ ) -> None:
166
+ self._encoding = encoding
167
+ self._filename = filename
168
+ self._headers: _CIMultiDict = CIMultiDict()
169
+ self._value = value
170
+ if content_type is not sentinel and content_type is not None:
171
+ self._headers[hdrs.CONTENT_TYPE] = content_type
172
+ elif self._filename is not None:
173
+ if sys.version_info >= (3, 13):
174
+ guesser = mimetypes.guess_file_type
175
+ else:
176
+ guesser = mimetypes.guess_type
177
+ content_type = guesser(self._filename)[0]
178
+ if content_type is None:
179
+ content_type = self._default_content_type
180
+ self._headers[hdrs.CONTENT_TYPE] = content_type
181
+ else:
182
+ self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
183
+ if headers:
184
+ self._headers.update(headers)
185
+
186
+ @property
187
+ def size(self) -> Optional[int]:
188
+ """Size of the payload."""
189
+ return self._size
190
+
191
+ @property
192
+ def filename(self) -> Optional[str]:
193
+ """Filename of the payload."""
194
+ return self._filename
195
+
196
+ @property
197
+ def headers(self) -> _CIMultiDict:
198
+ """Custom item headers"""
199
+ return self._headers
200
+
201
+ @property
202
+ def _binary_headers(self) -> bytes:
203
+ return (
204
+ "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
205
+ "utf-8"
206
+ )
207
+ + b"\r\n"
208
+ )
209
+
210
+ @property
211
+ def encoding(self) -> Optional[str]:
212
+ """Payload encoding"""
213
+ return self._encoding
214
+
215
+ @property
216
+ def content_type(self) -> str:
217
+ """Content type"""
218
+ return self._headers[hdrs.CONTENT_TYPE]
219
+
220
+ def set_content_disposition(
221
+ self,
222
+ disptype: str,
223
+ quote_fields: bool = True,
224
+ _charset: str = "utf-8",
225
+ **params: Any,
226
+ ) -> None:
227
+ """Sets ``Content-Disposition`` header."""
228
+ self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
229
+ disptype, quote_fields=quote_fields, _charset=_charset, **params
230
+ )
231
+
232
+ @abstractmethod
233
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
234
+ """Return string representation of the value.
235
+
236
+ This is named decode() to allow compatibility with bytes objects.
237
+ """
238
+
239
+ @abstractmethod
240
+ async def write(self, writer: AbstractStreamWriter) -> None:
241
+ """Write payload.
242
+
243
+ writer is an AbstractStreamWriter instance:
244
+ """
245
+
246
+
247
+ class BytesPayload(Payload):
248
+ _value: bytes
249
+
250
+ def __init__(
251
+ self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any
252
+ ) -> None:
253
+ if "content_type" not in kwargs:
254
+ kwargs["content_type"] = "application/octet-stream"
255
+
256
+ super().__init__(value, *args, **kwargs)
257
+
258
+ if isinstance(value, memoryview):
259
+ self._size = value.nbytes
260
+ elif isinstance(value, (bytes, bytearray)):
261
+ self._size = len(value)
262
+ else:
263
+ raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
264
+
265
+ if self._size > TOO_LARGE_BYTES_BODY:
266
+ kwargs = {"source": self}
267
+ warnings.warn(
268
+ "Sending a large body directly with raw bytes might"
269
+ " lock the event loop. You should probably pass an "
270
+ "io.BytesIO object instead",
271
+ ResourceWarning,
272
+ **kwargs,
273
+ )
274
+
275
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
276
+ return self._value.decode(encoding, errors)
277
+
278
+ async def write(self, writer: AbstractStreamWriter) -> None:
279
+ await writer.write(self._value)
280
+
281
+
282
+ class StringPayload(BytesPayload):
283
+ def __init__(
284
+ self,
285
+ value: str,
286
+ *args: Any,
287
+ encoding: Optional[str] = None,
288
+ content_type: Optional[str] = None,
289
+ **kwargs: Any,
290
+ ) -> None:
291
+
292
+ if encoding is None:
293
+ if content_type is None:
294
+ real_encoding = "utf-8"
295
+ content_type = "text/plain; charset=utf-8"
296
+ else:
297
+ mimetype = parse_mimetype(content_type)
298
+ real_encoding = mimetype.parameters.get("charset", "utf-8")
299
+ else:
300
+ if content_type is None:
301
+ content_type = "text/plain; charset=%s" % encoding
302
+ real_encoding = encoding
303
+
304
+ super().__init__(
305
+ value.encode(real_encoding),
306
+ encoding=real_encoding,
307
+ content_type=content_type,
308
+ *args,
309
+ **kwargs,
310
+ )
311
+
312
+
313
+ class StringIOPayload(StringPayload):
314
+ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
315
+ super().__init__(value.read(), *args, **kwargs)
316
+
317
+
318
+ class IOBasePayload(Payload):
319
+ _value: io.IOBase
320
+
321
+ def __init__(
322
+ self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
323
+ ) -> None:
324
+ if "filename" not in kwargs:
325
+ kwargs["filename"] = guess_filename(value)
326
+
327
+ super().__init__(value, *args, **kwargs)
328
+
329
+ if self._filename is not None and disposition is not None:
330
+ if hdrs.CONTENT_DISPOSITION not in self.headers:
331
+ self.set_content_disposition(disposition, filename=self._filename)
332
+
333
+ async def write(self, writer: AbstractStreamWriter) -> None:
334
+ loop = asyncio.get_event_loop()
335
+ try:
336
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
337
+ while chunk:
338
+ await writer.write(chunk)
339
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
340
+ finally:
341
+ await loop.run_in_executor(None, self._value.close)
342
+
343
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
344
+ return "".join(r.decode(encoding, errors) for r in self._value.readlines())
345
+
346
+
347
+ class TextIOPayload(IOBasePayload):
348
+ _value: io.TextIOBase
349
+
350
+ def __init__(
351
+ self,
352
+ value: TextIO,
353
+ *args: Any,
354
+ encoding: Optional[str] = None,
355
+ content_type: Optional[str] = None,
356
+ **kwargs: Any,
357
+ ) -> None:
358
+
359
+ if encoding is None:
360
+ if content_type is None:
361
+ encoding = "utf-8"
362
+ content_type = "text/plain; charset=utf-8"
363
+ else:
364
+ mimetype = parse_mimetype(content_type)
365
+ encoding = mimetype.parameters.get("charset", "utf-8")
366
+ else:
367
+ if content_type is None:
368
+ content_type = "text/plain; charset=%s" % encoding
369
+
370
+ super().__init__(
371
+ value,
372
+ content_type=content_type,
373
+ encoding=encoding,
374
+ *args,
375
+ **kwargs,
376
+ )
377
+
378
+ @property
379
+ def size(self) -> Optional[int]:
380
+ try:
381
+ return os.fstat(self._value.fileno()).st_size - self._value.tell()
382
+ except OSError:
383
+ return None
384
+
385
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
386
+ return self._value.read()
387
+
388
+ async def write(self, writer: AbstractStreamWriter) -> None:
389
+ loop = asyncio.get_event_loop()
390
+ try:
391
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
392
+ while chunk:
393
+ data = (
394
+ chunk.encode(encoding=self._encoding)
395
+ if self._encoding
396
+ else chunk.encode()
397
+ )
398
+ await writer.write(data)
399
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
400
+ finally:
401
+ await loop.run_in_executor(None, self._value.close)
402
+
403
+
404
+ class BytesIOPayload(IOBasePayload):
405
+ _value: io.BytesIO
406
+
407
+ @property
408
+ def size(self) -> int:
409
+ position = self._value.tell()
410
+ end = self._value.seek(0, os.SEEK_END)
411
+ self._value.seek(position)
412
+ return end - position
413
+
414
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
415
+ return self._value.read().decode(encoding, errors)
416
+
417
+
418
+ class BufferedReaderPayload(IOBasePayload):
419
+ _value: io.BufferedIOBase
420
+
421
+ @property
422
+ def size(self) -> Optional[int]:
423
+ try:
424
+ return os.fstat(self._value.fileno()).st_size - self._value.tell()
425
+ except (OSError, AttributeError):
426
+ # data.fileno() is not supported, e.g.
427
+ # io.BufferedReader(io.BytesIO(b'data'))
428
+ # For some file-like objects (e.g. tarfile), the fileno() attribute may
429
+ # not exist at all, and will instead raise an AttributeError.
430
+ return None
431
+
432
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
433
+ return self._value.read().decode(encoding, errors)
434
+
435
+
436
+ class JsonPayload(BytesPayload):
437
+ def __init__(
438
+ self,
439
+ value: Any,
440
+ encoding: str = "utf-8",
441
+ content_type: str = "application/json",
442
+ dumps: JSONEncoder = json.dumps,
443
+ *args: Any,
444
+ **kwargs: Any,
445
+ ) -> None:
446
+
447
+ super().__init__(
448
+ dumps(value).encode(encoding),
449
+ content_type=content_type,
450
+ encoding=encoding,
451
+ *args,
452
+ **kwargs,
453
+ )
454
+
455
+
456
+ if TYPE_CHECKING:
457
+ from typing import AsyncIterable, AsyncIterator
458
+
459
+ _AsyncIterator = AsyncIterator[bytes]
460
+ _AsyncIterable = AsyncIterable[bytes]
461
+ else:
462
+ from collections.abc import AsyncIterable, AsyncIterator
463
+
464
+ _AsyncIterator = AsyncIterator
465
+ _AsyncIterable = AsyncIterable
466
+
467
+
468
+ class AsyncIterablePayload(Payload):
469
+
470
+ _iter: Optional[_AsyncIterator] = None
471
+ _value: _AsyncIterable
472
+
473
+ def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
474
+ if not isinstance(value, AsyncIterable):
475
+ raise TypeError(
476
+ "value argument must support "
477
+ "collections.abc.AsyncIterable interface, "
478
+ "got {!r}".format(type(value))
479
+ )
480
+
481
+ if "content_type" not in kwargs:
482
+ kwargs["content_type"] = "application/octet-stream"
483
+
484
+ super().__init__(value, *args, **kwargs)
485
+
486
+ self._iter = value.__aiter__()
487
+
488
+ async def write(self, writer: AbstractStreamWriter) -> None:
489
+ if self._iter:
490
+ try:
491
+ # iter is not None check prevents rare cases
492
+ # when the case iterable is used twice
493
+ while True:
494
+ chunk = await self._iter.__anext__()
495
+ await writer.write(chunk)
496
+ except StopAsyncIteration:
497
+ self._iter = None
498
+
499
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
500
+ raise TypeError("Unable to decode.")
501
+
502
+
503
+ class StreamReaderPayload(AsyncIterablePayload):
504
+ def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
505
+ super().__init__(value.iter_any(), *args, **kwargs)
506
+
507
+
508
+ PAYLOAD_REGISTRY = PayloadRegistry()
509
+ PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
510
+ PAYLOAD_REGISTRY.register(StringPayload, str)
511
+ PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
512
+ PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
513
+ PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
514
+ PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
515
+ PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
516
+ PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
517
+ # try_last for giving a chance to more specialized async interables like
518
+ # multidict.BodyPartReaderPayload override the default
519
+ PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
deepseek/lib/python3.10/site-packages/aiohttp/payload_streamer.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Payload implementation for coroutines as data provider.
3
+
4
+ As a simple case, you can upload data from file::
5
+
6
+ @aiohttp.streamer
7
+ async def file_sender(writer, file_name=None):
8
+ with open(file_name, 'rb') as f:
9
+ chunk = f.read(2**16)
10
+ while chunk:
11
+ await writer.write(chunk)
12
+
13
+ chunk = f.read(2**16)
14
+
15
+ Then you can use `file_sender` like this:
16
+
17
+ async with session.post('http://httpbin.org/post',
18
+ data=file_sender(file_name='huge_file')) as resp:
19
+ print(await resp.text())
20
+
21
+ ..note:: Coroutine must accept `writer` as first argument
22
+
23
+ """
24
+
25
+ import types
26
+ import warnings
27
+ from typing import Any, Awaitable, Callable, Dict, Tuple
28
+
29
+ from .abc import AbstractStreamWriter
30
+ from .payload import Payload, payload_type
31
+
32
+ __all__ = ("streamer",)
33
+
34
+
35
+ class _stream_wrapper:
36
+ def __init__(
37
+ self,
38
+ coro: Callable[..., Awaitable[None]],
39
+ args: Tuple[Any, ...],
40
+ kwargs: Dict[str, Any],
41
+ ) -> None:
42
+ self.coro = types.coroutine(coro)
43
+ self.args = args
44
+ self.kwargs = kwargs
45
+
46
+ async def __call__(self, writer: AbstractStreamWriter) -> None:
47
+ await self.coro(writer, *self.args, **self.kwargs)
48
+
49
+
50
+ class streamer:
51
+ def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
52
+ warnings.warn(
53
+ "@streamer is deprecated, use async generators instead",
54
+ DeprecationWarning,
55
+ stacklevel=2,
56
+ )
57
+ self.coro = coro
58
+
59
+ def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
60
+ return _stream_wrapper(self.coro, args, kwargs)
61
+
62
+
63
+ @payload_type(_stream_wrapper)
64
+ class StreamWrapperPayload(Payload):
65
+ async def write(self, writer: AbstractStreamWriter) -> None:
66
+ await self._value(writer)
67
+
68
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
69
+ raise TypeError("Unable to decode.")
70
+
71
+
72
+ @payload_type(streamer)
73
+ class StreamPayload(StreamWrapperPayload):
74
+ def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
75
+ super().__init__(value(), *args, **kwargs)
76
+
77
+ async def write(self, writer: AbstractStreamWriter) -> None:
78
+ await self._value(writer)
deepseek/lib/python3.10/site-packages/aiohttp/py.typed ADDED
@@ -0,0 +1 @@
 
 
1
+ Marker
deepseek/lib/python3.10/site-packages/aiohttp/pytest_plugin.py ADDED
@@ -0,0 +1,436 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import contextlib
3
+ import inspect
4
+ import warnings
5
+ from typing import (
6
+ Any,
7
+ Awaitable,
8
+ Callable,
9
+ Dict,
10
+ Iterator,
11
+ Optional,
12
+ Protocol,
13
+ Type,
14
+ Union,
15
+ overload,
16
+ )
17
+
18
+ import pytest
19
+
20
+ from .test_utils import (
21
+ BaseTestServer,
22
+ RawTestServer,
23
+ TestClient,
24
+ TestServer,
25
+ loop_context,
26
+ setup_test_loop,
27
+ teardown_test_loop,
28
+ unused_port as _unused_port,
29
+ )
30
+ from .web import Application, BaseRequest, Request
31
+ from .web_protocol import _RequestHandler
32
+
33
+ try:
34
+ import uvloop
35
+ except ImportError: # pragma: no cover
36
+ uvloop = None # type: ignore[assignment]
37
+
38
+
39
+ class AiohttpClient(Protocol):
40
+ @overload
41
+ async def __call__(
42
+ self,
43
+ __param: Application,
44
+ *,
45
+ server_kwargs: Optional[Dict[str, Any]] = None,
46
+ **kwargs: Any,
47
+ ) -> TestClient[Request, Application]: ...
48
+ @overload
49
+ async def __call__(
50
+ self,
51
+ __param: BaseTestServer,
52
+ *,
53
+ server_kwargs: Optional[Dict[str, Any]] = None,
54
+ **kwargs: Any,
55
+ ) -> TestClient[BaseRequest, None]: ...
56
+
57
+
58
+ class AiohttpServer(Protocol):
59
+ def __call__(
60
+ self, app: Application, *, port: Optional[int] = None, **kwargs: Any
61
+ ) -> Awaitable[TestServer]: ...
62
+
63
+
64
+ class AiohttpRawServer(Protocol):
65
+ def __call__(
66
+ self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
67
+ ) -> Awaitable[RawTestServer]: ...
68
+
69
+
70
+ def pytest_addoption(parser): # type: ignore[no-untyped-def]
71
+ parser.addoption(
72
+ "--aiohttp-fast",
73
+ action="store_true",
74
+ default=False,
75
+ help="run tests faster by disabling extra checks",
76
+ )
77
+ parser.addoption(
78
+ "--aiohttp-loop",
79
+ action="store",
80
+ default="pyloop",
81
+ help="run tests with specific loop: pyloop, uvloop or all",
82
+ )
83
+ parser.addoption(
84
+ "--aiohttp-enable-loop-debug",
85
+ action="store_true",
86
+ default=False,
87
+ help="enable event loop debug mode",
88
+ )
89
+
90
+
91
+ def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
92
+ """Set up pytest fixture.
93
+
94
+ Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
95
+ """
96
+ func = fixturedef.func
97
+
98
+ if inspect.isasyncgenfunction(func):
99
+ # async generator fixture
100
+ is_async_gen = True
101
+ elif asyncio.iscoroutinefunction(func):
102
+ # regular async fixture
103
+ is_async_gen = False
104
+ else:
105
+ # not an async fixture, nothing to do
106
+ return
107
+
108
+ strip_request = False
109
+ if "request" not in fixturedef.argnames:
110
+ fixturedef.argnames += ("request",)
111
+ strip_request = True
112
+
113
+ def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
114
+ request = kwargs["request"]
115
+ if strip_request:
116
+ del kwargs["request"]
117
+
118
+ # if neither the fixture nor the test use the 'loop' fixture,
119
+ # 'getfixturevalue' will fail because the test is not parameterized
120
+ # (this can be removed someday if 'loop' is no longer parameterized)
121
+ if "loop" not in request.fixturenames:
122
+ raise Exception(
123
+ "Asynchronous fixtures must depend on the 'loop' fixture or "
124
+ "be used in tests depending from it."
125
+ )
126
+
127
+ _loop = request.getfixturevalue("loop")
128
+
129
+ if is_async_gen:
130
+ # for async generators, we need to advance the generator once,
131
+ # then advance it again in a finalizer
132
+ gen = func(*args, **kwargs)
133
+
134
+ def finalizer(): # type: ignore[no-untyped-def]
135
+ try:
136
+ return _loop.run_until_complete(gen.__anext__())
137
+ except StopAsyncIteration:
138
+ pass
139
+
140
+ request.addfinalizer(finalizer)
141
+ return _loop.run_until_complete(gen.__anext__())
142
+ else:
143
+ return _loop.run_until_complete(func(*args, **kwargs))
144
+
145
+ fixturedef.func = wrapper
146
+
147
+
148
+ @pytest.fixture
149
+ def fast(request): # type: ignore[no-untyped-def]
150
+ """--fast config option"""
151
+ return request.config.getoption("--aiohttp-fast")
152
+
153
+
154
+ @pytest.fixture
155
+ def loop_debug(request): # type: ignore[no-untyped-def]
156
+ """--enable-loop-debug config option"""
157
+ return request.config.getoption("--aiohttp-enable-loop-debug")
158
+
159
+
160
+ @contextlib.contextmanager
161
+ def _runtime_warning_context(): # type: ignore[no-untyped-def]
162
+ """Context manager which checks for RuntimeWarnings.
163
+
164
+ This exists specifically to
165
+ avoid "coroutine 'X' was never awaited" warnings being missed.
166
+
167
+ If RuntimeWarnings occur in the context a RuntimeError is raised.
168
+ """
169
+ with warnings.catch_warnings(record=True) as _warnings:
170
+ yield
171
+ rw = [
172
+ "{w.filename}:{w.lineno}:{w.message}".format(w=w)
173
+ for w in _warnings
174
+ if w.category == RuntimeWarning
175
+ ]
176
+ if rw:
177
+ raise RuntimeError(
178
+ "{} Runtime Warning{},\n{}".format(
179
+ len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
180
+ )
181
+ )
182
+
183
+
184
+ @contextlib.contextmanager
185
+ def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
186
+ """Passthrough loop context.
187
+
188
+ Sets up and tears down a loop unless one is passed in via the loop
189
+ argument when it's passed straight through.
190
+ """
191
+ if loop:
192
+ # loop already exists, pass it straight through
193
+ yield loop
194
+ else:
195
+ # this shadows loop_context's standard behavior
196
+ loop = setup_test_loop()
197
+ yield loop
198
+ teardown_test_loop(loop, fast=fast)
199
+
200
+
201
+ def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
202
+ """Fix pytest collecting for coroutines."""
203
+ if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
204
+ return list(collector._genfunctions(name, obj))
205
+
206
+
207
+ def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
208
+ """Run coroutines in an event loop instead of a normal function call."""
209
+ fast = pyfuncitem.config.getoption("--aiohttp-fast")
210
+ if asyncio.iscoroutinefunction(pyfuncitem.function):
211
+ existing_loop = pyfuncitem.funcargs.get(
212
+ "proactor_loop"
213
+ ) or pyfuncitem.funcargs.get("loop", None)
214
+ with _runtime_warning_context():
215
+ with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
216
+ testargs = {
217
+ arg: pyfuncitem.funcargs[arg]
218
+ for arg in pyfuncitem._fixtureinfo.argnames
219
+ }
220
+ _loop.run_until_complete(pyfuncitem.obj(**testargs))
221
+
222
+ return True
223
+
224
+
225
+ def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
226
+ if "loop_factory" not in metafunc.fixturenames:
227
+ return
228
+
229
+ loops = metafunc.config.option.aiohttp_loop
230
+ avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
231
+ avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
232
+
233
+ if uvloop is not None: # pragma: no cover
234
+ avail_factories["uvloop"] = uvloop.EventLoopPolicy
235
+
236
+ if loops == "all":
237
+ loops = "pyloop,uvloop?"
238
+
239
+ factories = {} # type: ignore[var-annotated]
240
+ for name in loops.split(","):
241
+ required = not name.endswith("?")
242
+ name = name.strip(" ?")
243
+ if name not in avail_factories: # pragma: no cover
244
+ if required:
245
+ raise ValueError(
246
+ "Unknown loop '%s', available loops: %s"
247
+ % (name, list(factories.keys()))
248
+ )
249
+ else:
250
+ continue
251
+ factories[name] = avail_factories[name]
252
+ metafunc.parametrize(
253
+ "loop_factory", list(factories.values()), ids=list(factories.keys())
254
+ )
255
+
256
+
257
+ @pytest.fixture
258
+ def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
259
+ """Return an instance of the event loop."""
260
+ policy = loop_factory()
261
+ asyncio.set_event_loop_policy(policy)
262
+ with loop_context(fast=fast) as _loop:
263
+ if loop_debug:
264
+ _loop.set_debug(True) # pragma: no cover
265
+ asyncio.set_event_loop(_loop)
266
+ yield _loop
267
+
268
+
269
+ @pytest.fixture
270
+ def proactor_loop(): # type: ignore[no-untyped-def]
271
+ policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
272
+ asyncio.set_event_loop_policy(policy)
273
+
274
+ with loop_context(policy.new_event_loop) as _loop:
275
+ asyncio.set_event_loop(_loop)
276
+ yield _loop
277
+
278
+
279
+ @pytest.fixture
280
+ def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
281
+ warnings.warn(
282
+ "Deprecated, use aiohttp_unused_port fixture instead",
283
+ DeprecationWarning,
284
+ stacklevel=2,
285
+ )
286
+ return aiohttp_unused_port
287
+
288
+
289
+ @pytest.fixture
290
+ def aiohttp_unused_port() -> Callable[[], int]:
291
+ """Return a port that is unused on the current host."""
292
+ return _unused_port
293
+
294
+
295
+ @pytest.fixture
296
+ def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
297
+ """Factory to create a TestServer instance, given an app.
298
+
299
+ aiohttp_server(app, **kwargs)
300
+ """
301
+ servers = []
302
+
303
+ async def go(
304
+ app: Application, *, port: Optional[int] = None, **kwargs: Any
305
+ ) -> TestServer:
306
+ server = TestServer(app, port=port)
307
+ await server.start_server(loop=loop, **kwargs)
308
+ servers.append(server)
309
+ return server
310
+
311
+ yield go
312
+
313
+ async def finalize() -> None:
314
+ while servers:
315
+ await servers.pop().close()
316
+
317
+ loop.run_until_complete(finalize())
318
+
319
+
320
+ @pytest.fixture
321
+ def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
322
+ warnings.warn(
323
+ "Deprecated, use aiohttp_server fixture instead",
324
+ DeprecationWarning,
325
+ stacklevel=2,
326
+ )
327
+ return aiohttp_server
328
+
329
+
330
+ @pytest.fixture
331
+ def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
332
+ """Factory to create a RawTestServer instance, given a web handler.
333
+
334
+ aiohttp_raw_server(handler, **kwargs)
335
+ """
336
+ servers = []
337
+
338
+ async def go(
339
+ handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
340
+ ) -> RawTestServer:
341
+ server = RawTestServer(handler, port=port)
342
+ await server.start_server(loop=loop, **kwargs)
343
+ servers.append(server)
344
+ return server
345
+
346
+ yield go
347
+
348
+ async def finalize() -> None:
349
+ while servers:
350
+ await servers.pop().close()
351
+
352
+ loop.run_until_complete(finalize())
353
+
354
+
355
+ @pytest.fixture
356
+ def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
357
+ aiohttp_raw_server,
358
+ ):
359
+ warnings.warn(
360
+ "Deprecated, use aiohttp_raw_server fixture instead",
361
+ DeprecationWarning,
362
+ stacklevel=2,
363
+ )
364
+ return aiohttp_raw_server
365
+
366
+
367
+ @pytest.fixture
368
+ def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]:
369
+ """Factory to create a TestClient instance.
370
+
371
+ aiohttp_client(app, **kwargs)
372
+ aiohttp_client(server, **kwargs)
373
+ aiohttp_client(raw_server, **kwargs)
374
+ """
375
+ clients = []
376
+
377
+ @overload
378
+ async def go(
379
+ __param: Application,
380
+ *,
381
+ server_kwargs: Optional[Dict[str, Any]] = None,
382
+ **kwargs: Any,
383
+ ) -> TestClient[Request, Application]: ...
384
+
385
+ @overload
386
+ async def go(
387
+ __param: BaseTestServer,
388
+ *,
389
+ server_kwargs: Optional[Dict[str, Any]] = None,
390
+ **kwargs: Any,
391
+ ) -> TestClient[BaseRequest, None]: ...
392
+
393
+ async def go(
394
+ __param: Union[Application, BaseTestServer],
395
+ *args: Any,
396
+ server_kwargs: Optional[Dict[str, Any]] = None,
397
+ **kwargs: Any,
398
+ ) -> TestClient[Any, Any]:
399
+ if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
400
+ __param, (Application, BaseTestServer)
401
+ ):
402
+ __param = __param(loop, *args, **kwargs)
403
+ kwargs = {}
404
+ else:
405
+ assert not args, "args should be empty"
406
+
407
+ if isinstance(__param, Application):
408
+ server_kwargs = server_kwargs or {}
409
+ server = TestServer(__param, loop=loop, **server_kwargs)
410
+ client = TestClient(server, loop=loop, **kwargs)
411
+ elif isinstance(__param, BaseTestServer):
412
+ client = TestClient(__param, loop=loop, **kwargs)
413
+ else:
414
+ raise ValueError("Unknown argument type: %r" % type(__param))
415
+
416
+ await client.start_server()
417
+ clients.append(client)
418
+ return client
419
+
420
+ yield go
421
+
422
+ async def finalize() -> None:
423
+ while clients:
424
+ await clients.pop().close()
425
+
426
+ loop.run_until_complete(finalize())
427
+
428
+
429
+ @pytest.fixture
430
+ def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
431
+ warnings.warn(
432
+ "Deprecated, use aiohttp_client fixture instead",
433
+ DeprecationWarning,
434
+ stacklevel=2,
435
+ )
436
+ return aiohttp_client
deepseek/lib/python3.10/site-packages/aiohttp/streams.py ADDED
@@ -0,0 +1,723 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import collections
3
+ import warnings
4
+ from typing import (
5
+ Awaitable,
6
+ Callable,
7
+ Deque,
8
+ Final,
9
+ Generic,
10
+ List,
11
+ Optional,
12
+ Tuple,
13
+ TypeVar,
14
+ )
15
+
16
+ from .base_protocol import BaseProtocol
17
+ from .helpers import (
18
+ _EXC_SENTINEL,
19
+ BaseTimerContext,
20
+ TimerNoop,
21
+ set_exception,
22
+ set_result,
23
+ )
24
+ from .log import internal_logger
25
+
26
+ __all__ = (
27
+ "EMPTY_PAYLOAD",
28
+ "EofStream",
29
+ "StreamReader",
30
+ "DataQueue",
31
+ )
32
+
33
+ _T = TypeVar("_T")
34
+
35
+
36
+ class EofStream(Exception):
37
+ """eof stream indication."""
38
+
39
+
40
+ class AsyncStreamIterator(Generic[_T]):
41
+
42
+ __slots__ = ("read_func",)
43
+
44
+ def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
45
+ self.read_func = read_func
46
+
47
+ def __aiter__(self) -> "AsyncStreamIterator[_T]":
48
+ return self
49
+
50
+ async def __anext__(self) -> _T:
51
+ try:
52
+ rv = await self.read_func()
53
+ except EofStream:
54
+ raise StopAsyncIteration
55
+ if rv == b"":
56
+ raise StopAsyncIteration
57
+ return rv
58
+
59
+
60
+ class ChunkTupleAsyncStreamIterator:
61
+
62
+ __slots__ = ("_stream",)
63
+
64
+ def __init__(self, stream: "StreamReader") -> None:
65
+ self._stream = stream
66
+
67
+ def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
68
+ return self
69
+
70
+ async def __anext__(self) -> Tuple[bytes, bool]:
71
+ rv = await self._stream.readchunk()
72
+ if rv == (b"", False):
73
+ raise StopAsyncIteration
74
+ return rv
75
+
76
+
77
+ class AsyncStreamReaderMixin:
78
+
79
+ __slots__ = ()
80
+
81
+ def __aiter__(self) -> AsyncStreamIterator[bytes]:
82
+ return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
83
+
84
+ def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
85
+ """Returns an asynchronous iterator that yields chunks of size n."""
86
+ return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined]
87
+
88
+ def iter_any(self) -> AsyncStreamIterator[bytes]:
89
+ """Yield all available data as soon as it is received."""
90
+ return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
91
+
92
+ def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
93
+ """Yield chunks of data as they are received by the server.
94
+
95
+ The yielded objects are tuples
96
+ of (bytes, bool) as returned by the StreamReader.readchunk method.
97
+ """
98
+ return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
99
+
100
+
101
+ class StreamReader(AsyncStreamReaderMixin):
102
+ """An enhancement of asyncio.StreamReader.
103
+
104
+ Supports asynchronous iteration by line, chunk or as available::
105
+
106
+ async for line in reader:
107
+ ...
108
+ async for chunk in reader.iter_chunked(1024):
109
+ ...
110
+ async for slice in reader.iter_any():
111
+ ...
112
+
113
+ """
114
+
115
+ __slots__ = (
116
+ "_protocol",
117
+ "_low_water",
118
+ "_high_water",
119
+ "_loop",
120
+ "_size",
121
+ "_cursor",
122
+ "_http_chunk_splits",
123
+ "_buffer",
124
+ "_buffer_offset",
125
+ "_eof",
126
+ "_waiter",
127
+ "_eof_waiter",
128
+ "_exception",
129
+ "_timer",
130
+ "_eof_callbacks",
131
+ "_eof_counter",
132
+ "total_bytes",
133
+ )
134
+
135
+ def __init__(
136
+ self,
137
+ protocol: BaseProtocol,
138
+ limit: int,
139
+ *,
140
+ timer: Optional[BaseTimerContext] = None,
141
+ loop: Optional[asyncio.AbstractEventLoop] = None,
142
+ ) -> None:
143
+ self._protocol = protocol
144
+ self._low_water = limit
145
+ self._high_water = limit * 2
146
+ if loop is None:
147
+ loop = asyncio.get_event_loop()
148
+ self._loop = loop
149
+ self._size = 0
150
+ self._cursor = 0
151
+ self._http_chunk_splits: Optional[List[int]] = None
152
+ self._buffer: Deque[bytes] = collections.deque()
153
+ self._buffer_offset = 0
154
+ self._eof = False
155
+ self._waiter: Optional[asyncio.Future[None]] = None
156
+ self._eof_waiter: Optional[asyncio.Future[None]] = None
157
+ self._exception: Optional[BaseException] = None
158
+ self._timer = TimerNoop() if timer is None else timer
159
+ self._eof_callbacks: List[Callable[[], None]] = []
160
+ self._eof_counter = 0
161
+ self.total_bytes = 0
162
+
163
+ def __repr__(self) -> str:
164
+ info = [self.__class__.__name__]
165
+ if self._size:
166
+ info.append("%d bytes" % self._size)
167
+ if self._eof:
168
+ info.append("eof")
169
+ if self._low_water != 2**16: # default limit
170
+ info.append("low=%d high=%d" % (self._low_water, self._high_water))
171
+ if self._waiter:
172
+ info.append("w=%r" % self._waiter)
173
+ if self._exception:
174
+ info.append("e=%r" % self._exception)
175
+ return "<%s>" % " ".join(info)
176
+
177
+ def get_read_buffer_limits(self) -> Tuple[int, int]:
178
+ return (self._low_water, self._high_water)
179
+
180
+ def exception(self) -> Optional[BaseException]:
181
+ return self._exception
182
+
183
+ def set_exception(
184
+ self,
185
+ exc: BaseException,
186
+ exc_cause: BaseException = _EXC_SENTINEL,
187
+ ) -> None:
188
+ self._exception = exc
189
+ self._eof_callbacks.clear()
190
+
191
+ waiter = self._waiter
192
+ if waiter is not None:
193
+ self._waiter = None
194
+ set_exception(waiter, exc, exc_cause)
195
+
196
+ waiter = self._eof_waiter
197
+ if waiter is not None:
198
+ self._eof_waiter = None
199
+ set_exception(waiter, exc, exc_cause)
200
+
201
+ def on_eof(self, callback: Callable[[], None]) -> None:
202
+ if self._eof:
203
+ try:
204
+ callback()
205
+ except Exception:
206
+ internal_logger.exception("Exception in eof callback")
207
+ else:
208
+ self._eof_callbacks.append(callback)
209
+
210
+ def feed_eof(self) -> None:
211
+ self._eof = True
212
+
213
+ waiter = self._waiter
214
+ if waiter is not None:
215
+ self._waiter = None
216
+ set_result(waiter, None)
217
+
218
+ waiter = self._eof_waiter
219
+ if waiter is not None:
220
+ self._eof_waiter = None
221
+ set_result(waiter, None)
222
+
223
+ for cb in self._eof_callbacks:
224
+ try:
225
+ cb()
226
+ except Exception:
227
+ internal_logger.exception("Exception in eof callback")
228
+
229
+ self._eof_callbacks.clear()
230
+
231
+ def is_eof(self) -> bool:
232
+ """Return True if 'feed_eof' was called."""
233
+ return self._eof
234
+
235
+ def at_eof(self) -> bool:
236
+ """Return True if the buffer is empty and 'feed_eof' was called."""
237
+ return self._eof and not self._buffer
238
+
239
+ async def wait_eof(self) -> None:
240
+ if self._eof:
241
+ return
242
+
243
+ assert self._eof_waiter is None
244
+ self._eof_waiter = self._loop.create_future()
245
+ try:
246
+ await self._eof_waiter
247
+ finally:
248
+ self._eof_waiter = None
249
+
250
+ def unread_data(self, data: bytes) -> None:
251
+ """rollback reading some data from stream, inserting it to buffer head."""
252
+ warnings.warn(
253
+ "unread_data() is deprecated "
254
+ "and will be removed in future releases (#3260)",
255
+ DeprecationWarning,
256
+ stacklevel=2,
257
+ )
258
+ if not data:
259
+ return
260
+
261
+ if self._buffer_offset:
262
+ self._buffer[0] = self._buffer[0][self._buffer_offset :]
263
+ self._buffer_offset = 0
264
+ self._size += len(data)
265
+ self._cursor -= len(data)
266
+ self._buffer.appendleft(data)
267
+ self._eof_counter = 0
268
+
269
+ # TODO: size is ignored, remove the param later
270
+ def feed_data(self, data: bytes, size: int = 0) -> None:
271
+ assert not self._eof, "feed_data after feed_eof"
272
+
273
+ if not data:
274
+ return
275
+
276
+ data_len = len(data)
277
+ self._size += data_len
278
+ self._buffer.append(data)
279
+ self.total_bytes += data_len
280
+
281
+ waiter = self._waiter
282
+ if waiter is not None:
283
+ self._waiter = None
284
+ set_result(waiter, None)
285
+
286
+ if self._size > self._high_water and not self._protocol._reading_paused:
287
+ self._protocol.pause_reading()
288
+
289
+ def begin_http_chunk_receiving(self) -> None:
290
+ if self._http_chunk_splits is None:
291
+ if self.total_bytes:
292
+ raise RuntimeError(
293
+ "Called begin_http_chunk_receiving when some data was already fed"
294
+ )
295
+ self._http_chunk_splits = []
296
+
297
+ def end_http_chunk_receiving(self) -> None:
298
+ if self._http_chunk_splits is None:
299
+ raise RuntimeError(
300
+ "Called end_chunk_receiving without calling "
301
+ "begin_chunk_receiving first"
302
+ )
303
+
304
+ # self._http_chunk_splits contains logical byte offsets from start of
305
+ # the body transfer. Each offset is the offset of the end of a chunk.
306
+ # "Logical" means bytes, accessible for a user.
307
+ # If no chunks containing logical data were received, current position
308
+ # is difinitely zero.
309
+ pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
310
+
311
+ if self.total_bytes == pos:
312
+ # We should not add empty chunks here. So we check for that.
313
+ # Note, when chunked + gzip is used, we can receive a chunk
314
+ # of compressed data, but that data may not be enough for gzip FSM
315
+ # to yield any uncompressed data. That's why current position may
316
+ # not change after receiving a chunk.
317
+ return
318
+
319
+ self._http_chunk_splits.append(self.total_bytes)
320
+
321
+ # wake up readchunk when end of http chunk received
322
+ waiter = self._waiter
323
+ if waiter is not None:
324
+ self._waiter = None
325
+ set_result(waiter, None)
326
+
327
+ async def _wait(self, func_name: str) -> None:
328
+ if not self._protocol.connected:
329
+ raise RuntimeError("Connection closed.")
330
+
331
+ # StreamReader uses a future to link the protocol feed_data() method
332
+ # to a read coroutine. Running two read coroutines at the same time
333
+ # would have an unexpected behaviour. It would not possible to know
334
+ # which coroutine would get the next data.
335
+ if self._waiter is not None:
336
+ raise RuntimeError(
337
+ "%s() called while another coroutine is "
338
+ "already waiting for incoming data" % func_name
339
+ )
340
+
341
+ waiter = self._waiter = self._loop.create_future()
342
+ try:
343
+ with self._timer:
344
+ await waiter
345
+ finally:
346
+ self._waiter = None
347
+
348
+ async def readline(self) -> bytes:
349
+ return await self.readuntil()
350
+
351
+ async def readuntil(self, separator: bytes = b"\n") -> bytes:
352
+ seplen = len(separator)
353
+ if seplen == 0:
354
+ raise ValueError("Separator should be at least one-byte string")
355
+
356
+ if self._exception is not None:
357
+ raise self._exception
358
+
359
+ chunk = b""
360
+ chunk_size = 0
361
+ not_enough = True
362
+
363
+ while not_enough:
364
+ while self._buffer and not_enough:
365
+ offset = self._buffer_offset
366
+ ichar = self._buffer[0].find(separator, offset) + 1
367
+ # Read from current offset to found separator or to the end.
368
+ data = self._read_nowait_chunk(
369
+ ichar - offset + seplen - 1 if ichar else -1
370
+ )
371
+ chunk += data
372
+ chunk_size += len(data)
373
+ if ichar:
374
+ not_enough = False
375
+
376
+ if chunk_size > self._high_water:
377
+ raise ValueError("Chunk too big")
378
+
379
+ if self._eof:
380
+ break
381
+
382
+ if not_enough:
383
+ await self._wait("readuntil")
384
+
385
+ return chunk
386
+
387
+ async def read(self, n: int = -1) -> bytes:
388
+ if self._exception is not None:
389
+ raise self._exception
390
+
391
+ # migration problem; with DataQueue you have to catch
392
+ # EofStream exception, so common way is to run payload.read() inside
393
+ # infinite loop. what can cause real infinite loop with StreamReader
394
+ # lets keep this code one major release.
395
+ if __debug__:
396
+ if self._eof and not self._buffer:
397
+ self._eof_counter = getattr(self, "_eof_counter", 0) + 1
398
+ if self._eof_counter > 5:
399
+ internal_logger.warning(
400
+ "Multiple access to StreamReader in eof state, "
401
+ "might be infinite loop.",
402
+ stack_info=True,
403
+ )
404
+
405
+ if not n:
406
+ return b""
407
+
408
+ if n < 0:
409
+ # This used to just loop creating a new waiter hoping to
410
+ # collect everything in self._buffer, but that would
411
+ # deadlock if the subprocess sends more than self.limit
412
+ # bytes. So just call self.readany() until EOF.
413
+ blocks = []
414
+ while True:
415
+ block = await self.readany()
416
+ if not block:
417
+ break
418
+ blocks.append(block)
419
+ return b"".join(blocks)
420
+
421
+ # TODO: should be `if` instead of `while`
422
+ # because waiter maybe triggered on chunk end,
423
+ # without feeding any data
424
+ while not self._buffer and not self._eof:
425
+ await self._wait("read")
426
+
427
+ return self._read_nowait(n)
428
+
429
+ async def readany(self) -> bytes:
430
+ if self._exception is not None:
431
+ raise self._exception
432
+
433
+ # TODO: should be `if` instead of `while`
434
+ # because waiter maybe triggered on chunk end,
435
+ # without feeding any data
436
+ while not self._buffer and not self._eof:
437
+ await self._wait("readany")
438
+
439
+ return self._read_nowait(-1)
440
+
441
+ async def readchunk(self) -> Tuple[bytes, bool]:
442
+ """Returns a tuple of (data, end_of_http_chunk).
443
+
444
+ When chunked transfer
445
+ encoding is used, end_of_http_chunk is a boolean indicating if the end
446
+ of the data corresponds to the end of a HTTP chunk , otherwise it is
447
+ always False.
448
+ """
449
+ while True:
450
+ if self._exception is not None:
451
+ raise self._exception
452
+
453
+ while self._http_chunk_splits:
454
+ pos = self._http_chunk_splits.pop(0)
455
+ if pos == self._cursor:
456
+ return (b"", True)
457
+ if pos > self._cursor:
458
+ return (self._read_nowait(pos - self._cursor), True)
459
+ internal_logger.warning(
460
+ "Skipping HTTP chunk end due to data "
461
+ "consumption beyond chunk boundary"
462
+ )
463
+
464
+ if self._buffer:
465
+ return (self._read_nowait_chunk(-1), False)
466
+ # return (self._read_nowait(-1), False)
467
+
468
+ if self._eof:
469
+ # Special case for signifying EOF.
470
+ # (b'', True) is not a final return value actually.
471
+ return (b"", False)
472
+
473
+ await self._wait("readchunk")
474
+
475
+ async def readexactly(self, n: int) -> bytes:
476
+ if self._exception is not None:
477
+ raise self._exception
478
+
479
+ blocks: List[bytes] = []
480
+ while n > 0:
481
+ block = await self.read(n)
482
+ if not block:
483
+ partial = b"".join(blocks)
484
+ raise asyncio.IncompleteReadError(partial, len(partial) + n)
485
+ blocks.append(block)
486
+ n -= len(block)
487
+
488
+ return b"".join(blocks)
489
+
490
+ def read_nowait(self, n: int = -1) -> bytes:
491
+ # default was changed to be consistent with .read(-1)
492
+ #
493
+ # I believe the most users don't know about the method and
494
+ # they are not affected.
495
+ if self._exception is not None:
496
+ raise self._exception
497
+
498
+ if self._waiter and not self._waiter.done():
499
+ raise RuntimeError(
500
+ "Called while some coroutine is waiting for incoming data."
501
+ )
502
+
503
+ return self._read_nowait(n)
504
+
505
+ def _read_nowait_chunk(self, n: int) -> bytes:
506
+ first_buffer = self._buffer[0]
507
+ offset = self._buffer_offset
508
+ if n != -1 and len(first_buffer) - offset > n:
509
+ data = first_buffer[offset : offset + n]
510
+ self._buffer_offset += n
511
+
512
+ elif offset:
513
+ self._buffer.popleft()
514
+ data = first_buffer[offset:]
515
+ self._buffer_offset = 0
516
+
517
+ else:
518
+ data = self._buffer.popleft()
519
+
520
+ data_len = len(data)
521
+ self._size -= data_len
522
+ self._cursor += data_len
523
+
524
+ chunk_splits = self._http_chunk_splits
525
+ # Prevent memory leak: drop useless chunk splits
526
+ while chunk_splits and chunk_splits[0] < self._cursor:
527
+ chunk_splits.pop(0)
528
+
529
+ if self._size < self._low_water and self._protocol._reading_paused:
530
+ self._protocol.resume_reading()
531
+ return data
532
+
533
+ def _read_nowait(self, n: int) -> bytes:
534
+ """Read not more than n bytes, or whole buffer if n == -1"""
535
+ self._timer.assert_timeout()
536
+
537
+ chunks = []
538
+ while self._buffer:
539
+ chunk = self._read_nowait_chunk(n)
540
+ chunks.append(chunk)
541
+ if n != -1:
542
+ n -= len(chunk)
543
+ if n == 0:
544
+ break
545
+
546
+ return b"".join(chunks) if chunks else b""
547
+
548
+
549
+ class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
550
+
551
+ __slots__ = ("_read_eof_chunk",)
552
+
553
+ def __init__(self) -> None:
554
+ self._read_eof_chunk = False
555
+
556
+ def __repr__(self) -> str:
557
+ return "<%s>" % self.__class__.__name__
558
+
559
+ def exception(self) -> Optional[BaseException]:
560
+ return None
561
+
562
+ def set_exception(
563
+ self,
564
+ exc: BaseException,
565
+ exc_cause: BaseException = _EXC_SENTINEL,
566
+ ) -> None:
567
+ pass
568
+
569
+ def on_eof(self, callback: Callable[[], None]) -> None:
570
+ try:
571
+ callback()
572
+ except Exception:
573
+ internal_logger.exception("Exception in eof callback")
574
+
575
+ def feed_eof(self) -> None:
576
+ pass
577
+
578
+ def is_eof(self) -> bool:
579
+ return True
580
+
581
+ def at_eof(self) -> bool:
582
+ return True
583
+
584
+ async def wait_eof(self) -> None:
585
+ return
586
+
587
+ def feed_data(self, data: bytes, n: int = 0) -> None:
588
+ pass
589
+
590
+ async def readline(self) -> bytes:
591
+ return b""
592
+
593
+ async def read(self, n: int = -1) -> bytes:
594
+ return b""
595
+
596
+ # TODO add async def readuntil
597
+
598
+ async def readany(self) -> bytes:
599
+ return b""
600
+
601
+ async def readchunk(self) -> Tuple[bytes, bool]:
602
+ if not self._read_eof_chunk:
603
+ self._read_eof_chunk = True
604
+ return (b"", False)
605
+
606
+ return (b"", True)
607
+
608
+ async def readexactly(self, n: int) -> bytes:
609
+ raise asyncio.IncompleteReadError(b"", n)
610
+
611
+ def read_nowait(self, n: int = -1) -> bytes:
612
+ return b""
613
+
614
+
615
+ EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
616
+
617
+
618
+ class DataQueue(Generic[_T]):
619
+ """DataQueue is a general-purpose blocking queue with one reader."""
620
+
621
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
622
+ self._loop = loop
623
+ self._eof = False
624
+ self._waiter: Optional[asyncio.Future[None]] = None
625
+ self._exception: Optional[BaseException] = None
626
+ self._buffer: Deque[Tuple[_T, int]] = collections.deque()
627
+
628
+ def __len__(self) -> int:
629
+ return len(self._buffer)
630
+
631
+ def is_eof(self) -> bool:
632
+ return self._eof
633
+
634
+ def at_eof(self) -> bool:
635
+ return self._eof and not self._buffer
636
+
637
+ def exception(self) -> Optional[BaseException]:
638
+ return self._exception
639
+
640
+ def set_exception(
641
+ self,
642
+ exc: BaseException,
643
+ exc_cause: BaseException = _EXC_SENTINEL,
644
+ ) -> None:
645
+ self._eof = True
646
+ self._exception = exc
647
+ if (waiter := self._waiter) is not None:
648
+ self._waiter = None
649
+ set_exception(waiter, exc, exc_cause)
650
+
651
+ def feed_data(self, data: _T, size: int = 0) -> None:
652
+ self._buffer.append((data, size))
653
+ if (waiter := self._waiter) is not None:
654
+ self._waiter = None
655
+ set_result(waiter, None)
656
+
657
+ def feed_eof(self) -> None:
658
+ self._eof = True
659
+ if (waiter := self._waiter) is not None:
660
+ self._waiter = None
661
+ set_result(waiter, None)
662
+
663
+ async def read(self) -> _T:
664
+ if not self._buffer and not self._eof:
665
+ assert not self._waiter
666
+ self._waiter = self._loop.create_future()
667
+ try:
668
+ await self._waiter
669
+ except (asyncio.CancelledError, asyncio.TimeoutError):
670
+ self._waiter = None
671
+ raise
672
+ if self._buffer:
673
+ data, _ = self._buffer.popleft()
674
+ return data
675
+ if self._exception is not None:
676
+ raise self._exception
677
+ raise EofStream
678
+
679
+ def __aiter__(self) -> AsyncStreamIterator[_T]:
680
+ return AsyncStreamIterator(self.read)
681
+
682
+
683
+ class FlowControlDataQueue(DataQueue[_T]):
684
+ """FlowControlDataQueue resumes and pauses an underlying stream.
685
+
686
+ It is a destination for parsed data.
687
+
688
+ This class is deprecated and will be removed in version 4.0.
689
+ """
690
+
691
+ def __init__(
692
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
693
+ ) -> None:
694
+ super().__init__(loop=loop)
695
+ self._size = 0
696
+ self._protocol = protocol
697
+ self._limit = limit * 2
698
+
699
+ def feed_data(self, data: _T, size: int = 0) -> None:
700
+ super().feed_data(data, size)
701
+ self._size += size
702
+
703
+ if self._size > self._limit and not self._protocol._reading_paused:
704
+ self._protocol.pause_reading()
705
+
706
+ async def read(self) -> _T:
707
+ if not self._buffer and not self._eof:
708
+ assert not self._waiter
709
+ self._waiter = self._loop.create_future()
710
+ try:
711
+ await self._waiter
712
+ except (asyncio.CancelledError, asyncio.TimeoutError):
713
+ self._waiter = None
714
+ raise
715
+ if self._buffer:
716
+ data, size = self._buffer.popleft()
717
+ self._size -= size
718
+ if self._size < self._limit and self._protocol._reading_paused:
719
+ self._protocol.resume_reading()
720
+ return data
721
+ if self._exception is not None:
722
+ raise self._exception
723
+ raise EofStream
deepseek/lib/python3.10/site-packages/aiohttp/test_utils.py ADDED
@@ -0,0 +1,770 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Utilities shared by tests."""
2
+
3
+ import asyncio
4
+ import contextlib
5
+ import gc
6
+ import inspect
7
+ import ipaddress
8
+ import os
9
+ import socket
10
+ import sys
11
+ import warnings
12
+ from abc import ABC, abstractmethod
13
+ from types import TracebackType
14
+ from typing import (
15
+ TYPE_CHECKING,
16
+ Any,
17
+ Callable,
18
+ Generic,
19
+ Iterator,
20
+ List,
21
+ Optional,
22
+ Type,
23
+ TypeVar,
24
+ cast,
25
+ overload,
26
+ )
27
+ from unittest import IsolatedAsyncioTestCase, mock
28
+
29
+ from aiosignal import Signal
30
+ from multidict import CIMultiDict, CIMultiDictProxy
31
+ from yarl import URL
32
+
33
+ import aiohttp
34
+ from aiohttp.client import (
35
+ _RequestContextManager,
36
+ _RequestOptions,
37
+ _WSRequestContextManager,
38
+ )
39
+
40
+ from . import ClientSession, hdrs
41
+ from .abc import AbstractCookieJar
42
+ from .client_reqrep import ClientResponse
43
+ from .client_ws import ClientWebSocketResponse
44
+ from .helpers import sentinel
45
+ from .http import HttpVersion, RawRequestMessage
46
+ from .streams import EMPTY_PAYLOAD, StreamReader
47
+ from .typedefs import StrOrURL
48
+ from .web import (
49
+ Application,
50
+ AppRunner,
51
+ BaseRequest,
52
+ BaseRunner,
53
+ Request,
54
+ Server,
55
+ ServerRunner,
56
+ SockSite,
57
+ UrlMappingMatchInfo,
58
+ )
59
+ from .web_protocol import _RequestHandler
60
+
61
+ if TYPE_CHECKING:
62
+ from ssl import SSLContext
63
+ else:
64
+ SSLContext = None
65
+
66
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
67
+ from typing import Unpack
68
+
69
+ if sys.version_info >= (3, 11):
70
+ from typing import Self
71
+ else:
72
+ Self = Any
73
+
74
+ _ApplicationNone = TypeVar("_ApplicationNone", Application, None)
75
+ _Request = TypeVar("_Request", bound=BaseRequest)
76
+
77
+ REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
78
+
79
+
80
+ def get_unused_port_socket(
81
+ host: str, family: socket.AddressFamily = socket.AF_INET
82
+ ) -> socket.socket:
83
+ return get_port_socket(host, 0, family)
84
+
85
+
86
+ def get_port_socket(
87
+ host: str, port: int, family: socket.AddressFamily
88
+ ) -> socket.socket:
89
+ s = socket.socket(family, socket.SOCK_STREAM)
90
+ if REUSE_ADDRESS:
91
+ # Windows has different semantics for SO_REUSEADDR,
92
+ # so don't set it. Ref:
93
+ # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
94
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
95
+ s.bind((host, port))
96
+ return s
97
+
98
+
99
+ def unused_port() -> int:
100
+ """Return a port that is unused on the current host."""
101
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
102
+ s.bind(("127.0.0.1", 0))
103
+ return cast(int, s.getsockname()[1])
104
+
105
+
106
+ class BaseTestServer(ABC):
107
+ __test__ = False
108
+
109
+ def __init__(
110
+ self,
111
+ *,
112
+ scheme: str = "",
113
+ loop: Optional[asyncio.AbstractEventLoop] = None,
114
+ host: str = "127.0.0.1",
115
+ port: Optional[int] = None,
116
+ skip_url_asserts: bool = False,
117
+ socket_factory: Callable[
118
+ [str, int, socket.AddressFamily], socket.socket
119
+ ] = get_port_socket,
120
+ **kwargs: Any,
121
+ ) -> None:
122
+ self._loop = loop
123
+ self.runner: Optional[BaseRunner] = None
124
+ self._root: Optional[URL] = None
125
+ self.host = host
126
+ self.port = port
127
+ self._closed = False
128
+ self.scheme = scheme
129
+ self.skip_url_asserts = skip_url_asserts
130
+ self.socket_factory = socket_factory
131
+
132
+ async def start_server(
133
+ self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
134
+ ) -> None:
135
+ if self.runner:
136
+ return
137
+ self._loop = loop
138
+ self._ssl = kwargs.pop("ssl", None)
139
+ self.runner = await self._make_runner(handler_cancellation=True, **kwargs)
140
+ await self.runner.setup()
141
+ if not self.port:
142
+ self.port = 0
143
+ absolute_host = self.host
144
+ try:
145
+ version = ipaddress.ip_address(self.host).version
146
+ except ValueError:
147
+ version = 4
148
+ if version == 6:
149
+ absolute_host = f"[{self.host}]"
150
+ family = socket.AF_INET6 if version == 6 else socket.AF_INET
151
+ _sock = self.socket_factory(self.host, self.port, family)
152
+ self.host, self.port = _sock.getsockname()[:2]
153
+ site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
154
+ await site.start()
155
+ server = site._server
156
+ assert server is not None
157
+ sockets = server.sockets # type: ignore[attr-defined]
158
+ assert sockets is not None
159
+ self.port = sockets[0].getsockname()[1]
160
+ if not self.scheme:
161
+ self.scheme = "https" if self._ssl else "http"
162
+ self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}")
163
+
164
+ @abstractmethod # pragma: no cover
165
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
166
+ pass
167
+
168
+ def make_url(self, path: StrOrURL) -> URL:
169
+ assert self._root is not None
170
+ url = URL(path)
171
+ if not self.skip_url_asserts:
172
+ assert not url.absolute
173
+ return self._root.join(url)
174
+ else:
175
+ return URL(str(self._root) + str(path))
176
+
177
+ @property
178
+ def started(self) -> bool:
179
+ return self.runner is not None
180
+
181
+ @property
182
+ def closed(self) -> bool:
183
+ return self._closed
184
+
185
+ @property
186
+ def handler(self) -> Server:
187
+ # for backward compatibility
188
+ # web.Server instance
189
+ runner = self.runner
190
+ assert runner is not None
191
+ assert runner.server is not None
192
+ return runner.server
193
+
194
+ async def close(self) -> None:
195
+ """Close all fixtures created by the test client.
196
+
197
+ After that point, the TestClient is no longer usable.
198
+
199
+ This is an idempotent function: running close multiple times
200
+ will not have any additional effects.
201
+
202
+ close is also run when the object is garbage collected, and on
203
+ exit when used as a context manager.
204
+
205
+ """
206
+ if self.started and not self.closed:
207
+ assert self.runner is not None
208
+ await self.runner.cleanup()
209
+ self._root = None
210
+ self.port = None
211
+ self._closed = True
212
+
213
+ def __enter__(self) -> None:
214
+ raise TypeError("Use async with instead")
215
+
216
+ def __exit__(
217
+ self,
218
+ exc_type: Optional[Type[BaseException]],
219
+ exc_value: Optional[BaseException],
220
+ traceback: Optional[TracebackType],
221
+ ) -> None:
222
+ # __exit__ should exist in pair with __enter__ but never executed
223
+ pass # pragma: no cover
224
+
225
+ async def __aenter__(self) -> "BaseTestServer":
226
+ await self.start_server(loop=self._loop)
227
+ return self
228
+
229
+ async def __aexit__(
230
+ self,
231
+ exc_type: Optional[Type[BaseException]],
232
+ exc_value: Optional[BaseException],
233
+ traceback: Optional[TracebackType],
234
+ ) -> None:
235
+ await self.close()
236
+
237
+
238
+ class TestServer(BaseTestServer):
239
+ def __init__(
240
+ self,
241
+ app: Application,
242
+ *,
243
+ scheme: str = "",
244
+ host: str = "127.0.0.1",
245
+ port: Optional[int] = None,
246
+ **kwargs: Any,
247
+ ):
248
+ self.app = app
249
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
250
+
251
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
252
+ return AppRunner(self.app, **kwargs)
253
+
254
+
255
+ class RawTestServer(BaseTestServer):
256
+ def __init__(
257
+ self,
258
+ handler: _RequestHandler,
259
+ *,
260
+ scheme: str = "",
261
+ host: str = "127.0.0.1",
262
+ port: Optional[int] = None,
263
+ **kwargs: Any,
264
+ ) -> None:
265
+ self._handler = handler
266
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
267
+
268
+ async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
269
+ srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
270
+ return ServerRunner(srv, debug=debug, **kwargs)
271
+
272
+
273
+ class TestClient(Generic[_Request, _ApplicationNone]):
274
+ """
275
+ A test client implementation.
276
+
277
+ To write functional tests for aiohttp based servers.
278
+
279
+ """
280
+
281
+ __test__ = False
282
+
283
+ @overload
284
+ def __init__(
285
+ self: "TestClient[Request, Application]",
286
+ server: TestServer,
287
+ *,
288
+ cookie_jar: Optional[AbstractCookieJar] = None,
289
+ **kwargs: Any,
290
+ ) -> None: ...
291
+ @overload
292
+ def __init__(
293
+ self: "TestClient[_Request, None]",
294
+ server: BaseTestServer,
295
+ *,
296
+ cookie_jar: Optional[AbstractCookieJar] = None,
297
+ **kwargs: Any,
298
+ ) -> None: ...
299
+ def __init__(
300
+ self,
301
+ server: BaseTestServer,
302
+ *,
303
+ cookie_jar: Optional[AbstractCookieJar] = None,
304
+ loop: Optional[asyncio.AbstractEventLoop] = None,
305
+ **kwargs: Any,
306
+ ) -> None:
307
+ if not isinstance(server, BaseTestServer):
308
+ raise TypeError(
309
+ "server must be TestServer instance, found type: %r" % type(server)
310
+ )
311
+ self._server = server
312
+ self._loop = loop
313
+ if cookie_jar is None:
314
+ cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
315
+ self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
316
+ self._session._retry_connection = False
317
+ self._closed = False
318
+ self._responses: List[ClientResponse] = []
319
+ self._websockets: List[ClientWebSocketResponse] = []
320
+
321
+ async def start_server(self) -> None:
322
+ await self._server.start_server(loop=self._loop)
323
+
324
+ @property
325
+ def host(self) -> str:
326
+ return self._server.host
327
+
328
+ @property
329
+ def port(self) -> Optional[int]:
330
+ return self._server.port
331
+
332
+ @property
333
+ def server(self) -> BaseTestServer:
334
+ return self._server
335
+
336
+ @property
337
+ def app(self) -> _ApplicationNone:
338
+ return getattr(self._server, "app", None) # type: ignore[return-value]
339
+
340
+ @property
341
+ def session(self) -> ClientSession:
342
+ """An internal aiohttp.ClientSession.
343
+
344
+ Unlike the methods on the TestClient, client session requests
345
+ do not automatically include the host in the url queried, and
346
+ will require an absolute path to the resource.
347
+
348
+ """
349
+ return self._session
350
+
351
+ def make_url(self, path: StrOrURL) -> URL:
352
+ return self._server.make_url(path)
353
+
354
+ async def _request(
355
+ self, method: str, path: StrOrURL, **kwargs: Any
356
+ ) -> ClientResponse:
357
+ resp = await self._session.request(method, self.make_url(path), **kwargs)
358
+ # save it to close later
359
+ self._responses.append(resp)
360
+ return resp
361
+
362
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
363
+
364
+ def request(
365
+ self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions]
366
+ ) -> _RequestContextManager: ...
367
+
368
+ def get(
369
+ self,
370
+ path: StrOrURL,
371
+ **kwargs: Unpack[_RequestOptions],
372
+ ) -> _RequestContextManager: ...
373
+
374
+ def options(
375
+ self,
376
+ path: StrOrURL,
377
+ **kwargs: Unpack[_RequestOptions],
378
+ ) -> _RequestContextManager: ...
379
+
380
+ def head(
381
+ self,
382
+ path: StrOrURL,
383
+ **kwargs: Unpack[_RequestOptions],
384
+ ) -> _RequestContextManager: ...
385
+
386
+ def post(
387
+ self,
388
+ path: StrOrURL,
389
+ **kwargs: Unpack[_RequestOptions],
390
+ ) -> _RequestContextManager: ...
391
+
392
+ def put(
393
+ self,
394
+ path: StrOrURL,
395
+ **kwargs: Unpack[_RequestOptions],
396
+ ) -> _RequestContextManager: ...
397
+
398
+ def patch(
399
+ self,
400
+ path: StrOrURL,
401
+ **kwargs: Unpack[_RequestOptions],
402
+ ) -> _RequestContextManager: ...
403
+
404
+ def delete(
405
+ self,
406
+ path: StrOrURL,
407
+ **kwargs: Unpack[_RequestOptions],
408
+ ) -> _RequestContextManager: ...
409
+
410
+ else:
411
+
412
+ def request(
413
+ self, method: str, path: StrOrURL, **kwargs: Any
414
+ ) -> _RequestContextManager:
415
+ """Routes a request to tested http server.
416
+
417
+ The interface is identical to aiohttp.ClientSession.request,
418
+ except the loop kwarg is overridden by the instance used by the
419
+ test server.
420
+
421
+ """
422
+ return _RequestContextManager(self._request(method, path, **kwargs))
423
+
424
+ def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
425
+ """Perform an HTTP GET request."""
426
+ return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
427
+
428
+ def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
429
+ """Perform an HTTP POST request."""
430
+ return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
431
+
432
+ def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
433
+ """Perform an HTTP OPTIONS request."""
434
+ return _RequestContextManager(
435
+ self._request(hdrs.METH_OPTIONS, path, **kwargs)
436
+ )
437
+
438
+ def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
439
+ """Perform an HTTP HEAD request."""
440
+ return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
441
+
442
+ def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
443
+ """Perform an HTTP PUT request."""
444
+ return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
445
+
446
+ def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
447
+ """Perform an HTTP PATCH request."""
448
+ return _RequestContextManager(
449
+ self._request(hdrs.METH_PATCH, path, **kwargs)
450
+ )
451
+
452
+ def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
453
+ """Perform an HTTP PATCH request."""
454
+ return _RequestContextManager(
455
+ self._request(hdrs.METH_DELETE, path, **kwargs)
456
+ )
457
+
458
+ def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager:
459
+ """Initiate websocket connection.
460
+
461
+ The api corresponds to aiohttp.ClientSession.ws_connect.
462
+
463
+ """
464
+ return _WSRequestContextManager(self._ws_connect(path, **kwargs))
465
+
466
+ async def _ws_connect(
467
+ self, path: StrOrURL, **kwargs: Any
468
+ ) -> ClientWebSocketResponse:
469
+ ws = await self._session.ws_connect(self.make_url(path), **kwargs)
470
+ self._websockets.append(ws)
471
+ return ws
472
+
473
+ async def close(self) -> None:
474
+ """Close all fixtures created by the test client.
475
+
476
+ After that point, the TestClient is no longer usable.
477
+
478
+ This is an idempotent function: running close multiple times
479
+ will not have any additional effects.
480
+
481
+ close is also run on exit when used as a(n) (asynchronous)
482
+ context manager.
483
+
484
+ """
485
+ if not self._closed:
486
+ for resp in self._responses:
487
+ resp.close()
488
+ for ws in self._websockets:
489
+ await ws.close()
490
+ await self._session.close()
491
+ await self._server.close()
492
+ self._closed = True
493
+
494
+ def __enter__(self) -> None:
495
+ raise TypeError("Use async with instead")
496
+
497
+ def __exit__(
498
+ self,
499
+ exc_type: Optional[Type[BaseException]],
500
+ exc: Optional[BaseException],
501
+ tb: Optional[TracebackType],
502
+ ) -> None:
503
+ # __exit__ should exist in pair with __enter__ but never executed
504
+ pass # pragma: no cover
505
+
506
+ async def __aenter__(self) -> Self:
507
+ await self.start_server()
508
+ return self
509
+
510
+ async def __aexit__(
511
+ self,
512
+ exc_type: Optional[Type[BaseException]],
513
+ exc: Optional[BaseException],
514
+ tb: Optional[TracebackType],
515
+ ) -> None:
516
+ await self.close()
517
+
518
+
519
+ class AioHTTPTestCase(IsolatedAsyncioTestCase):
520
+ """A base class to allow for unittest web applications using aiohttp.
521
+
522
+ Provides the following:
523
+
524
+ * self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
525
+ * self.loop (asyncio.BaseEventLoop): the event loop in which the
526
+ application and server are running.
527
+ * self.app (aiohttp.web.Application): the application returned by
528
+ self.get_application()
529
+
530
+ Note that the TestClient's methods are asynchronous: you have to
531
+ execute function on the test client using asynchronous methods.
532
+ """
533
+
534
+ async def get_application(self) -> Application:
535
+ """Get application.
536
+
537
+ This method should be overridden
538
+ to return the aiohttp.web.Application
539
+ object to test.
540
+ """
541
+ return self.get_app()
542
+
543
+ def get_app(self) -> Application:
544
+ """Obsolete method used to constructing web application.
545
+
546
+ Use .get_application() coroutine instead.
547
+ """
548
+ raise RuntimeError("Did you forget to define get_application()?")
549
+
550
+ async def asyncSetUp(self) -> None:
551
+ self.loop = asyncio.get_running_loop()
552
+ return await self.setUpAsync()
553
+
554
+ async def setUpAsync(self) -> None:
555
+ self.app = await self.get_application()
556
+ self.server = await self.get_server(self.app)
557
+ self.client = await self.get_client(self.server)
558
+
559
+ await self.client.start_server()
560
+
561
+ async def asyncTearDown(self) -> None:
562
+ return await self.tearDownAsync()
563
+
564
+ async def tearDownAsync(self) -> None:
565
+ await self.client.close()
566
+
567
+ async def get_server(self, app: Application) -> TestServer:
568
+ """Return a TestServer instance."""
569
+ return TestServer(app, loop=self.loop)
570
+
571
+ async def get_client(self, server: TestServer) -> TestClient[Request, Application]:
572
+ """Return a TestClient instance."""
573
+ return TestClient(server, loop=self.loop)
574
+
575
+
576
+ def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
577
+ """
578
+ A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
579
+
580
+ In 3.8+, this does nothing.
581
+ """
582
+ warnings.warn(
583
+ "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
584
+ DeprecationWarning,
585
+ stacklevel=2,
586
+ )
587
+ return func
588
+
589
+
590
+ _LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
591
+
592
+
593
+ @contextlib.contextmanager
594
+ def loop_context(
595
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
596
+ ) -> Iterator[asyncio.AbstractEventLoop]:
597
+ """A contextmanager that creates an event_loop, for test purposes.
598
+
599
+ Handles the creation and cleanup of a test loop.
600
+ """
601
+ loop = setup_test_loop(loop_factory)
602
+ yield loop
603
+ teardown_test_loop(loop, fast=fast)
604
+
605
+
606
+ def setup_test_loop(
607
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
608
+ ) -> asyncio.AbstractEventLoop:
609
+ """Create and return an asyncio.BaseEventLoop instance.
610
+
611
+ The caller should also call teardown_test_loop,
612
+ once they are done with the loop.
613
+ """
614
+ loop = loop_factory()
615
+ asyncio.set_event_loop(loop)
616
+ return loop
617
+
618
+
619
+ def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
620
+ """Teardown and cleanup an event_loop created by setup_test_loop."""
621
+ closed = loop.is_closed()
622
+ if not closed:
623
+ loop.call_soon(loop.stop)
624
+ loop.run_forever()
625
+ loop.close()
626
+
627
+ if not fast:
628
+ gc.collect()
629
+
630
+ asyncio.set_event_loop(None)
631
+
632
+
633
+ def _create_app_mock() -> mock.MagicMock:
634
+ def get_dict(app: Any, key: str) -> Any:
635
+ return app.__app_dict[key]
636
+
637
+ def set_dict(app: Any, key: str, value: Any) -> None:
638
+ app.__app_dict[key] = value
639
+
640
+ app = mock.MagicMock(spec=Application)
641
+ app.__app_dict = {}
642
+ app.__getitem__ = get_dict
643
+ app.__setitem__ = set_dict
644
+
645
+ app._debug = False
646
+ app.on_response_prepare = Signal(app)
647
+ app.on_response_prepare.freeze()
648
+ return app
649
+
650
+
651
+ def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
652
+ transport = mock.Mock()
653
+
654
+ def get_extra_info(key: str) -> Optional[SSLContext]:
655
+ if key == "sslcontext":
656
+ return sslcontext
657
+ else:
658
+ return None
659
+
660
+ transport.get_extra_info.side_effect = get_extra_info
661
+ return transport
662
+
663
+
664
+ def make_mocked_request(
665
+ method: str,
666
+ path: str,
667
+ headers: Any = None,
668
+ *,
669
+ match_info: Any = sentinel,
670
+ version: HttpVersion = HttpVersion(1, 1),
671
+ closing: bool = False,
672
+ app: Any = None,
673
+ writer: Any = sentinel,
674
+ protocol: Any = sentinel,
675
+ transport: Any = sentinel,
676
+ payload: StreamReader = EMPTY_PAYLOAD,
677
+ sslcontext: Optional[SSLContext] = None,
678
+ client_max_size: int = 1024**2,
679
+ loop: Any = ...,
680
+ ) -> Request:
681
+ """Creates mocked web.Request testing purposes.
682
+
683
+ Useful in unit tests, when spinning full web server is overkill or
684
+ specific conditions and errors are hard to trigger.
685
+ """
686
+ task = mock.Mock()
687
+ if loop is ...:
688
+ # no loop passed, try to get the current one if
689
+ # its is running as we need a real loop to create
690
+ # executor jobs to be able to do testing
691
+ # with a real executor
692
+ try:
693
+ loop = asyncio.get_running_loop()
694
+ except RuntimeError:
695
+ loop = mock.Mock()
696
+ loop.create_future.return_value = ()
697
+
698
+ if version < HttpVersion(1, 1):
699
+ closing = True
700
+
701
+ if headers:
702
+ headers = CIMultiDictProxy(CIMultiDict(headers))
703
+ raw_hdrs = tuple(
704
+ (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
705
+ )
706
+ else:
707
+ headers = CIMultiDictProxy(CIMultiDict())
708
+ raw_hdrs = ()
709
+
710
+ chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
711
+
712
+ message = RawRequestMessage(
713
+ method,
714
+ path,
715
+ version,
716
+ headers,
717
+ raw_hdrs,
718
+ closing,
719
+ None,
720
+ False,
721
+ chunked,
722
+ URL(path),
723
+ )
724
+ if app is None:
725
+ app = _create_app_mock()
726
+
727
+ if transport is sentinel:
728
+ transport = _create_transport(sslcontext)
729
+
730
+ if protocol is sentinel:
731
+ protocol = mock.Mock()
732
+ protocol.transport = transport
733
+
734
+ if writer is sentinel:
735
+ writer = mock.Mock()
736
+ writer.write_headers = make_mocked_coro(None)
737
+ writer.write = make_mocked_coro(None)
738
+ writer.write_eof = make_mocked_coro(None)
739
+ writer.drain = make_mocked_coro(None)
740
+ writer.transport = transport
741
+
742
+ protocol.transport = transport
743
+ protocol.writer = writer
744
+
745
+ req = Request(
746
+ message, payload, protocol, writer, task, loop, client_max_size=client_max_size
747
+ )
748
+
749
+ match_info = UrlMappingMatchInfo(
750
+ {} if match_info is sentinel else match_info, mock.Mock()
751
+ )
752
+ match_info.add_app(app)
753
+ req._match_info = match_info
754
+
755
+ return req
756
+
757
+
758
+ def make_mocked_coro(
759
+ return_value: Any = sentinel, raise_exception: Any = sentinel
760
+ ) -> Any:
761
+ """Creates a coroutine mock."""
762
+
763
+ async def mock_coro(*args: Any, **kwargs: Any) -> Any:
764
+ if raise_exception is not sentinel:
765
+ raise raise_exception
766
+ if not inspect.isawaitable(return_value):
767
+ return return_value
768
+ await return_value
769
+
770
+ return mock.Mock(wraps=mock_coro)
deepseek/lib/python3.10/site-packages/aiohttp/typedefs.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ from typing import (
4
+ TYPE_CHECKING,
5
+ Any,
6
+ Awaitable,
7
+ Callable,
8
+ Iterable,
9
+ Mapping,
10
+ Protocol,
11
+ Tuple,
12
+ Union,
13
+ )
14
+
15
+ from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
16
+ from yarl import URL, Query as _Query
17
+
18
+ Query = _Query
19
+
20
+ DEFAULT_JSON_ENCODER = json.dumps
21
+ DEFAULT_JSON_DECODER = json.loads
22
+
23
+ if TYPE_CHECKING:
24
+ _CIMultiDict = CIMultiDict[str]
25
+ _CIMultiDictProxy = CIMultiDictProxy[str]
26
+ _MultiDict = MultiDict[str]
27
+ _MultiDictProxy = MultiDictProxy[str]
28
+ from http.cookies import BaseCookie, Morsel
29
+
30
+ from .web import Request, StreamResponse
31
+ else:
32
+ _CIMultiDict = CIMultiDict
33
+ _CIMultiDictProxy = CIMultiDictProxy
34
+ _MultiDict = MultiDict
35
+ _MultiDictProxy = MultiDictProxy
36
+
37
+ Byteish = Union[bytes, bytearray, memoryview]
38
+ JSONEncoder = Callable[[Any], str]
39
+ JSONDecoder = Callable[[str], Any]
40
+ LooseHeaders = Union[
41
+ Mapping[str, str],
42
+ Mapping[istr, str],
43
+ _CIMultiDict,
44
+ _CIMultiDictProxy,
45
+ Iterable[Tuple[Union[str, istr], str]],
46
+ ]
47
+ RawHeaders = Tuple[Tuple[bytes, bytes], ...]
48
+ StrOrURL = Union[str, URL]
49
+
50
+ LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
51
+ LooseCookiesIterables = Iterable[
52
+ Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
53
+ ]
54
+ LooseCookies = Union[
55
+ LooseCookiesMappings,
56
+ LooseCookiesIterables,
57
+ "BaseCookie[str]",
58
+ ]
59
+
60
+ Handler = Callable[["Request"], Awaitable["StreamResponse"]]
61
+
62
+
63
+ class Middleware(Protocol):
64
+ def __call__(
65
+ self, request: "Request", handler: Handler
66
+ ) -> Awaitable["StreamResponse"]: ...
67
+
68
+
69
+ PathLike = Union[str, "os.PathLike[str]"]
deepseek/lib/python3.10/site-packages/aiohttp/web.py ADDED
@@ -0,0 +1,601 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import logging
3
+ import os
4
+ import socket
5
+ import sys
6
+ import warnings
7
+ from argparse import ArgumentParser
8
+ from collections.abc import Iterable
9
+ from contextlib import suppress
10
+ from importlib import import_module
11
+ from typing import (
12
+ Any,
13
+ Awaitable,
14
+ Callable,
15
+ Iterable as TypingIterable,
16
+ List,
17
+ Optional,
18
+ Set,
19
+ Type,
20
+ Union,
21
+ cast,
22
+ )
23
+
24
+ from .abc import AbstractAccessLogger
25
+ from .helpers import AppKey as AppKey
26
+ from .log import access_logger
27
+ from .typedefs import PathLike
28
+ from .web_app import Application as Application, CleanupError as CleanupError
29
+ from .web_exceptions import (
30
+ HTTPAccepted as HTTPAccepted,
31
+ HTTPBadGateway as HTTPBadGateway,
32
+ HTTPBadRequest as HTTPBadRequest,
33
+ HTTPClientError as HTTPClientError,
34
+ HTTPConflict as HTTPConflict,
35
+ HTTPCreated as HTTPCreated,
36
+ HTTPError as HTTPError,
37
+ HTTPException as HTTPException,
38
+ HTTPExpectationFailed as HTTPExpectationFailed,
39
+ HTTPFailedDependency as HTTPFailedDependency,
40
+ HTTPForbidden as HTTPForbidden,
41
+ HTTPFound as HTTPFound,
42
+ HTTPGatewayTimeout as HTTPGatewayTimeout,
43
+ HTTPGone as HTTPGone,
44
+ HTTPInsufficientStorage as HTTPInsufficientStorage,
45
+ HTTPInternalServerError as HTTPInternalServerError,
46
+ HTTPLengthRequired as HTTPLengthRequired,
47
+ HTTPMethodNotAllowed as HTTPMethodNotAllowed,
48
+ HTTPMisdirectedRequest as HTTPMisdirectedRequest,
49
+ HTTPMove as HTTPMove,
50
+ HTTPMovedPermanently as HTTPMovedPermanently,
51
+ HTTPMultipleChoices as HTTPMultipleChoices,
52
+ HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
53
+ HTTPNoContent as HTTPNoContent,
54
+ HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
55
+ HTTPNotAcceptable as HTTPNotAcceptable,
56
+ HTTPNotExtended as HTTPNotExtended,
57
+ HTTPNotFound as HTTPNotFound,
58
+ HTTPNotImplemented as HTTPNotImplemented,
59
+ HTTPNotModified as HTTPNotModified,
60
+ HTTPOk as HTTPOk,
61
+ HTTPPartialContent as HTTPPartialContent,
62
+ HTTPPaymentRequired as HTTPPaymentRequired,
63
+ HTTPPermanentRedirect as HTTPPermanentRedirect,
64
+ HTTPPreconditionFailed as HTTPPreconditionFailed,
65
+ HTTPPreconditionRequired as HTTPPreconditionRequired,
66
+ HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
67
+ HTTPRedirection as HTTPRedirection,
68
+ HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
69
+ HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
70
+ HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
71
+ HTTPRequestTimeout as HTTPRequestTimeout,
72
+ HTTPRequestURITooLong as HTTPRequestURITooLong,
73
+ HTTPResetContent as HTTPResetContent,
74
+ HTTPSeeOther as HTTPSeeOther,
75
+ HTTPServerError as HTTPServerError,
76
+ HTTPServiceUnavailable as HTTPServiceUnavailable,
77
+ HTTPSuccessful as HTTPSuccessful,
78
+ HTTPTemporaryRedirect as HTTPTemporaryRedirect,
79
+ HTTPTooManyRequests as HTTPTooManyRequests,
80
+ HTTPUnauthorized as HTTPUnauthorized,
81
+ HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
82
+ HTTPUnprocessableEntity as HTTPUnprocessableEntity,
83
+ HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
84
+ HTTPUpgradeRequired as HTTPUpgradeRequired,
85
+ HTTPUseProxy as HTTPUseProxy,
86
+ HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
87
+ HTTPVersionNotSupported as HTTPVersionNotSupported,
88
+ NotAppKeyWarning as NotAppKeyWarning,
89
+ )
90
+ from .web_fileresponse import FileResponse as FileResponse
91
+ from .web_log import AccessLogger
92
+ from .web_middlewares import (
93
+ middleware as middleware,
94
+ normalize_path_middleware as normalize_path_middleware,
95
+ )
96
+ from .web_protocol import (
97
+ PayloadAccessError as PayloadAccessError,
98
+ RequestHandler as RequestHandler,
99
+ RequestPayloadError as RequestPayloadError,
100
+ )
101
+ from .web_request import (
102
+ BaseRequest as BaseRequest,
103
+ FileField as FileField,
104
+ Request as Request,
105
+ )
106
+ from .web_response import (
107
+ ContentCoding as ContentCoding,
108
+ Response as Response,
109
+ StreamResponse as StreamResponse,
110
+ json_response as json_response,
111
+ )
112
+ from .web_routedef import (
113
+ AbstractRouteDef as AbstractRouteDef,
114
+ RouteDef as RouteDef,
115
+ RouteTableDef as RouteTableDef,
116
+ StaticDef as StaticDef,
117
+ delete as delete,
118
+ get as get,
119
+ head as head,
120
+ options as options,
121
+ patch as patch,
122
+ post as post,
123
+ put as put,
124
+ route as route,
125
+ static as static,
126
+ view as view,
127
+ )
128
+ from .web_runner import (
129
+ AppRunner as AppRunner,
130
+ BaseRunner as BaseRunner,
131
+ BaseSite as BaseSite,
132
+ GracefulExit as GracefulExit,
133
+ NamedPipeSite as NamedPipeSite,
134
+ ServerRunner as ServerRunner,
135
+ SockSite as SockSite,
136
+ TCPSite as TCPSite,
137
+ UnixSite as UnixSite,
138
+ )
139
+ from .web_server import Server as Server
140
+ from .web_urldispatcher import (
141
+ AbstractResource as AbstractResource,
142
+ AbstractRoute as AbstractRoute,
143
+ DynamicResource as DynamicResource,
144
+ PlainResource as PlainResource,
145
+ PrefixedSubAppResource as PrefixedSubAppResource,
146
+ Resource as Resource,
147
+ ResourceRoute as ResourceRoute,
148
+ StaticResource as StaticResource,
149
+ UrlDispatcher as UrlDispatcher,
150
+ UrlMappingMatchInfo as UrlMappingMatchInfo,
151
+ View as View,
152
+ )
153
+ from .web_ws import (
154
+ WebSocketReady as WebSocketReady,
155
+ WebSocketResponse as WebSocketResponse,
156
+ WSMsgType as WSMsgType,
157
+ )
158
+
159
+ __all__ = (
160
+ # web_app
161
+ "AppKey",
162
+ "Application",
163
+ "CleanupError",
164
+ # web_exceptions
165
+ "NotAppKeyWarning",
166
+ "HTTPAccepted",
167
+ "HTTPBadGateway",
168
+ "HTTPBadRequest",
169
+ "HTTPClientError",
170
+ "HTTPConflict",
171
+ "HTTPCreated",
172
+ "HTTPError",
173
+ "HTTPException",
174
+ "HTTPExpectationFailed",
175
+ "HTTPFailedDependency",
176
+ "HTTPForbidden",
177
+ "HTTPFound",
178
+ "HTTPGatewayTimeout",
179
+ "HTTPGone",
180
+ "HTTPInsufficientStorage",
181
+ "HTTPInternalServerError",
182
+ "HTTPLengthRequired",
183
+ "HTTPMethodNotAllowed",
184
+ "HTTPMisdirectedRequest",
185
+ "HTTPMove",
186
+ "HTTPMovedPermanently",
187
+ "HTTPMultipleChoices",
188
+ "HTTPNetworkAuthenticationRequired",
189
+ "HTTPNoContent",
190
+ "HTTPNonAuthoritativeInformation",
191
+ "HTTPNotAcceptable",
192
+ "HTTPNotExtended",
193
+ "HTTPNotFound",
194
+ "HTTPNotImplemented",
195
+ "HTTPNotModified",
196
+ "HTTPOk",
197
+ "HTTPPartialContent",
198
+ "HTTPPaymentRequired",
199
+ "HTTPPermanentRedirect",
200
+ "HTTPPreconditionFailed",
201
+ "HTTPPreconditionRequired",
202
+ "HTTPProxyAuthenticationRequired",
203
+ "HTTPRedirection",
204
+ "HTTPRequestEntityTooLarge",
205
+ "HTTPRequestHeaderFieldsTooLarge",
206
+ "HTTPRequestRangeNotSatisfiable",
207
+ "HTTPRequestTimeout",
208
+ "HTTPRequestURITooLong",
209
+ "HTTPResetContent",
210
+ "HTTPSeeOther",
211
+ "HTTPServerError",
212
+ "HTTPServiceUnavailable",
213
+ "HTTPSuccessful",
214
+ "HTTPTemporaryRedirect",
215
+ "HTTPTooManyRequests",
216
+ "HTTPUnauthorized",
217
+ "HTTPUnavailableForLegalReasons",
218
+ "HTTPUnprocessableEntity",
219
+ "HTTPUnsupportedMediaType",
220
+ "HTTPUpgradeRequired",
221
+ "HTTPUseProxy",
222
+ "HTTPVariantAlsoNegotiates",
223
+ "HTTPVersionNotSupported",
224
+ # web_fileresponse
225
+ "FileResponse",
226
+ # web_middlewares
227
+ "middleware",
228
+ "normalize_path_middleware",
229
+ # web_protocol
230
+ "PayloadAccessError",
231
+ "RequestHandler",
232
+ "RequestPayloadError",
233
+ # web_request
234
+ "BaseRequest",
235
+ "FileField",
236
+ "Request",
237
+ # web_response
238
+ "ContentCoding",
239
+ "Response",
240
+ "StreamResponse",
241
+ "json_response",
242
+ # web_routedef
243
+ "AbstractRouteDef",
244
+ "RouteDef",
245
+ "RouteTableDef",
246
+ "StaticDef",
247
+ "delete",
248
+ "get",
249
+ "head",
250
+ "options",
251
+ "patch",
252
+ "post",
253
+ "put",
254
+ "route",
255
+ "static",
256
+ "view",
257
+ # web_runner
258
+ "AppRunner",
259
+ "BaseRunner",
260
+ "BaseSite",
261
+ "GracefulExit",
262
+ "ServerRunner",
263
+ "SockSite",
264
+ "TCPSite",
265
+ "UnixSite",
266
+ "NamedPipeSite",
267
+ # web_server
268
+ "Server",
269
+ # web_urldispatcher
270
+ "AbstractResource",
271
+ "AbstractRoute",
272
+ "DynamicResource",
273
+ "PlainResource",
274
+ "PrefixedSubAppResource",
275
+ "Resource",
276
+ "ResourceRoute",
277
+ "StaticResource",
278
+ "UrlDispatcher",
279
+ "UrlMappingMatchInfo",
280
+ "View",
281
+ # web_ws
282
+ "WebSocketReady",
283
+ "WebSocketResponse",
284
+ "WSMsgType",
285
+ # web
286
+ "run_app",
287
+ )
288
+
289
+
290
+ try:
291
+ from ssl import SSLContext
292
+ except ImportError: # pragma: no cover
293
+ SSLContext = Any # type: ignore[misc,assignment]
294
+
295
+ # Only display warning when using -Wdefault, -We, -X dev or similar.
296
+ warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
297
+
298
+ HostSequence = TypingIterable[str]
299
+
300
+
301
+ async def _run_app(
302
+ app: Union[Application, Awaitable[Application]],
303
+ *,
304
+ host: Optional[Union[str, HostSequence]] = None,
305
+ port: Optional[int] = None,
306
+ path: Union[PathLike, TypingIterable[PathLike], None] = None,
307
+ sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
308
+ shutdown_timeout: float = 60.0,
309
+ keepalive_timeout: float = 75.0,
310
+ ssl_context: Optional[SSLContext] = None,
311
+ print: Optional[Callable[..., None]] = print,
312
+ backlog: int = 128,
313
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
314
+ access_log_format: str = AccessLogger.LOG_FORMAT,
315
+ access_log: Optional[logging.Logger] = access_logger,
316
+ handle_signals: bool = True,
317
+ reuse_address: Optional[bool] = None,
318
+ reuse_port: Optional[bool] = None,
319
+ handler_cancellation: bool = False,
320
+ ) -> None:
321
+ # An internal function to actually do all dirty job for application running
322
+ if asyncio.iscoroutine(app):
323
+ app = await app
324
+
325
+ app = cast(Application, app)
326
+
327
+ runner = AppRunner(
328
+ app,
329
+ handle_signals=handle_signals,
330
+ access_log_class=access_log_class,
331
+ access_log_format=access_log_format,
332
+ access_log=access_log,
333
+ keepalive_timeout=keepalive_timeout,
334
+ shutdown_timeout=shutdown_timeout,
335
+ handler_cancellation=handler_cancellation,
336
+ )
337
+
338
+ await runner.setup()
339
+
340
+ sites: List[BaseSite] = []
341
+
342
+ try:
343
+ if host is not None:
344
+ if isinstance(host, (str, bytes, bytearray, memoryview)):
345
+ sites.append(
346
+ TCPSite(
347
+ runner,
348
+ host,
349
+ port,
350
+ ssl_context=ssl_context,
351
+ backlog=backlog,
352
+ reuse_address=reuse_address,
353
+ reuse_port=reuse_port,
354
+ )
355
+ )
356
+ else:
357
+ for h in host:
358
+ sites.append(
359
+ TCPSite(
360
+ runner,
361
+ h,
362
+ port,
363
+ ssl_context=ssl_context,
364
+ backlog=backlog,
365
+ reuse_address=reuse_address,
366
+ reuse_port=reuse_port,
367
+ )
368
+ )
369
+ elif path is None and sock is None or port is not None:
370
+ sites.append(
371
+ TCPSite(
372
+ runner,
373
+ port=port,
374
+ ssl_context=ssl_context,
375
+ backlog=backlog,
376
+ reuse_address=reuse_address,
377
+ reuse_port=reuse_port,
378
+ )
379
+ )
380
+
381
+ if path is not None:
382
+ if isinstance(path, (str, os.PathLike)):
383
+ sites.append(
384
+ UnixSite(
385
+ runner,
386
+ path,
387
+ ssl_context=ssl_context,
388
+ backlog=backlog,
389
+ )
390
+ )
391
+ else:
392
+ for p in path:
393
+ sites.append(
394
+ UnixSite(
395
+ runner,
396
+ p,
397
+ ssl_context=ssl_context,
398
+ backlog=backlog,
399
+ )
400
+ )
401
+
402
+ if sock is not None:
403
+ if not isinstance(sock, Iterable):
404
+ sites.append(
405
+ SockSite(
406
+ runner,
407
+ sock,
408
+ ssl_context=ssl_context,
409
+ backlog=backlog,
410
+ )
411
+ )
412
+ else:
413
+ for s in sock:
414
+ sites.append(
415
+ SockSite(
416
+ runner,
417
+ s,
418
+ ssl_context=ssl_context,
419
+ backlog=backlog,
420
+ )
421
+ )
422
+ for site in sites:
423
+ await site.start()
424
+
425
+ if print: # pragma: no branch
426
+ names = sorted(str(s.name) for s in runner.sites)
427
+ print(
428
+ "======== Running on {} ========\n"
429
+ "(Press CTRL+C to quit)".format(", ".join(names))
430
+ )
431
+
432
+ # sleep forever by 1 hour intervals,
433
+ while True:
434
+ await asyncio.sleep(3600)
435
+ finally:
436
+ await runner.cleanup()
437
+
438
+
439
+ def _cancel_tasks(
440
+ to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
441
+ ) -> None:
442
+ if not to_cancel:
443
+ return
444
+
445
+ for task in to_cancel:
446
+ task.cancel()
447
+
448
+ loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
449
+
450
+ for task in to_cancel:
451
+ if task.cancelled():
452
+ continue
453
+ if task.exception() is not None:
454
+ loop.call_exception_handler(
455
+ {
456
+ "message": "unhandled exception during asyncio.run() shutdown",
457
+ "exception": task.exception(),
458
+ "task": task,
459
+ }
460
+ )
461
+
462
+
463
+ def run_app(
464
+ app: Union[Application, Awaitable[Application]],
465
+ *,
466
+ host: Optional[Union[str, HostSequence]] = None,
467
+ port: Optional[int] = None,
468
+ path: Union[PathLike, TypingIterable[PathLike], None] = None,
469
+ sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
470
+ shutdown_timeout: float = 60.0,
471
+ keepalive_timeout: float = 75.0,
472
+ ssl_context: Optional[SSLContext] = None,
473
+ print: Optional[Callable[..., None]] = print,
474
+ backlog: int = 128,
475
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
476
+ access_log_format: str = AccessLogger.LOG_FORMAT,
477
+ access_log: Optional[logging.Logger] = access_logger,
478
+ handle_signals: bool = True,
479
+ reuse_address: Optional[bool] = None,
480
+ reuse_port: Optional[bool] = None,
481
+ handler_cancellation: bool = False,
482
+ loop: Optional[asyncio.AbstractEventLoop] = None,
483
+ ) -> None:
484
+ """Run an app locally"""
485
+ if loop is None:
486
+ loop = asyncio.new_event_loop()
487
+
488
+ # Configure if and only if in debugging mode and using the default logger
489
+ if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
490
+ if access_log.level == logging.NOTSET:
491
+ access_log.setLevel(logging.DEBUG)
492
+ if not access_log.hasHandlers():
493
+ access_log.addHandler(logging.StreamHandler())
494
+
495
+ main_task = loop.create_task(
496
+ _run_app(
497
+ app,
498
+ host=host,
499
+ port=port,
500
+ path=path,
501
+ sock=sock,
502
+ shutdown_timeout=shutdown_timeout,
503
+ keepalive_timeout=keepalive_timeout,
504
+ ssl_context=ssl_context,
505
+ print=print,
506
+ backlog=backlog,
507
+ access_log_class=access_log_class,
508
+ access_log_format=access_log_format,
509
+ access_log=access_log,
510
+ handle_signals=handle_signals,
511
+ reuse_address=reuse_address,
512
+ reuse_port=reuse_port,
513
+ handler_cancellation=handler_cancellation,
514
+ )
515
+ )
516
+
517
+ try:
518
+ asyncio.set_event_loop(loop)
519
+ loop.run_until_complete(main_task)
520
+ except (GracefulExit, KeyboardInterrupt): # pragma: no cover
521
+ pass
522
+ finally:
523
+ try:
524
+ main_task.cancel()
525
+ with suppress(asyncio.CancelledError):
526
+ loop.run_until_complete(main_task)
527
+ finally:
528
+ _cancel_tasks(asyncio.all_tasks(loop), loop)
529
+ loop.run_until_complete(loop.shutdown_asyncgens())
530
+ loop.close()
531
+
532
+
533
+ def main(argv: List[str]) -> None:
534
+ arg_parser = ArgumentParser(
535
+ description="aiohttp.web Application server", prog="aiohttp.web"
536
+ )
537
+ arg_parser.add_argument(
538
+ "entry_func",
539
+ help=(
540
+ "Callable returning the `aiohttp.web.Application` instance to "
541
+ "run. Should be specified in the 'module:function' syntax."
542
+ ),
543
+ metavar="entry-func",
544
+ )
545
+ arg_parser.add_argument(
546
+ "-H",
547
+ "--hostname",
548
+ help="TCP/IP hostname to serve on (default: localhost)",
549
+ default=None,
550
+ )
551
+ arg_parser.add_argument(
552
+ "-P",
553
+ "--port",
554
+ help="TCP/IP port to serve on (default: %(default)r)",
555
+ type=int,
556
+ default=8080,
557
+ )
558
+ arg_parser.add_argument(
559
+ "-U",
560
+ "--path",
561
+ help="Unix file system path to serve on. Can be combined with hostname "
562
+ "to serve on both Unix and TCP.",
563
+ )
564
+ args, extra_argv = arg_parser.parse_known_args(argv)
565
+
566
+ # Import logic
567
+ mod_str, _, func_str = args.entry_func.partition(":")
568
+ if not func_str or not mod_str:
569
+ arg_parser.error("'entry-func' not in 'module:function' syntax")
570
+ if mod_str.startswith("."):
571
+ arg_parser.error("relative module names not supported")
572
+ try:
573
+ module = import_module(mod_str)
574
+ except ImportError as ex:
575
+ arg_parser.error(f"unable to import {mod_str}: {ex}")
576
+ try:
577
+ func = getattr(module, func_str)
578
+ except AttributeError:
579
+ arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
580
+
581
+ # Compatibility logic
582
+ if args.path is not None and not hasattr(socket, "AF_UNIX"):
583
+ arg_parser.error(
584
+ "file system paths not supported by your operating environment"
585
+ )
586
+
587
+ logging.basicConfig(level=logging.DEBUG)
588
+
589
+ if args.path and args.hostname is None:
590
+ host = port = None
591
+ else:
592
+ host = args.hostname or "localhost"
593
+ port = args.port
594
+
595
+ app = func(extra_argv)
596
+ run_app(app, host=host, port=port, path=args.path)
597
+ arg_parser.exit(message="Stopped\n")
598
+
599
+
600
+ if __name__ == "__main__": # pragma: no branch
601
+ main(sys.argv[1:]) # pragma: no cover
deepseek/lib/python3.10/site-packages/aiohttp/web_fileresponse.py ADDED
@@ -0,0 +1,418 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import io
3
+ import os
4
+ import pathlib
5
+ import sys
6
+ from contextlib import suppress
7
+ from enum import Enum, auto
8
+ from mimetypes import MimeTypes
9
+ from stat import S_ISREG
10
+ from types import MappingProxyType
11
+ from typing import ( # noqa
12
+ IO,
13
+ TYPE_CHECKING,
14
+ Any,
15
+ Awaitable,
16
+ Callable,
17
+ Final,
18
+ Iterator,
19
+ List,
20
+ Optional,
21
+ Set,
22
+ Tuple,
23
+ Union,
24
+ cast,
25
+ )
26
+
27
+ from . import hdrs
28
+ from .abc import AbstractStreamWriter
29
+ from .helpers import ETAG_ANY, ETag, must_be_empty_body
30
+ from .typedefs import LooseHeaders, PathLike
31
+ from .web_exceptions import (
32
+ HTTPForbidden,
33
+ HTTPNotFound,
34
+ HTTPNotModified,
35
+ HTTPPartialContent,
36
+ HTTPPreconditionFailed,
37
+ HTTPRequestRangeNotSatisfiable,
38
+ )
39
+ from .web_response import StreamResponse
40
+
41
+ __all__ = ("FileResponse",)
42
+
43
+ if TYPE_CHECKING:
44
+ from .web_request import BaseRequest
45
+
46
+
47
+ _T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
48
+
49
+
50
+ NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
51
+
52
+ CONTENT_TYPES: Final[MimeTypes] = MimeTypes()
53
+
54
+ # File extension to IANA encodings map that will be checked in the order defined.
55
+ ENCODING_EXTENSIONS = MappingProxyType(
56
+ {ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")}
57
+ )
58
+
59
+ FALLBACK_CONTENT_TYPE = "application/octet-stream"
60
+
61
+ # Provide additional MIME type/extension pairs to be recognized.
62
+ # https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only
63
+ ADDITIONAL_CONTENT_TYPES = MappingProxyType(
64
+ {
65
+ "application/gzip": ".gz",
66
+ "application/x-brotli": ".br",
67
+ "application/x-bzip2": ".bz2",
68
+ "application/x-compress": ".Z",
69
+ "application/x-xz": ".xz",
70
+ }
71
+ )
72
+
73
+
74
+ class _FileResponseResult(Enum):
75
+ """The result of the file response."""
76
+
77
+ SEND_FILE = auto() # Ie a regular file to send
78
+ NOT_ACCEPTABLE = auto() # Ie a socket, or non-regular file
79
+ PRE_CONDITION_FAILED = auto() # Ie If-Match or If-None-Match failed
80
+ NOT_MODIFIED = auto() # 304 Not Modified
81
+
82
+
83
+ # Add custom pairs and clear the encodings map so guess_type ignores them.
84
+ CONTENT_TYPES.encodings_map.clear()
85
+ for content_type, extension in ADDITIONAL_CONTENT_TYPES.items():
86
+ CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined]
87
+
88
+
89
+ _CLOSE_FUTURES: Set[asyncio.Future[None]] = set()
90
+
91
+
92
+ class FileResponse(StreamResponse):
93
+ """A response object can be used to send files."""
94
+
95
+ def __init__(
96
+ self,
97
+ path: PathLike,
98
+ chunk_size: int = 256 * 1024,
99
+ status: int = 200,
100
+ reason: Optional[str] = None,
101
+ headers: Optional[LooseHeaders] = None,
102
+ ) -> None:
103
+ super().__init__(status=status, reason=reason, headers=headers)
104
+
105
+ self._path = pathlib.Path(path)
106
+ self._chunk_size = chunk_size
107
+
108
+ def _seek_and_read(self, fobj: IO[Any], offset: int, chunk_size: int) -> bytes:
109
+ fobj.seek(offset)
110
+ return fobj.read(chunk_size) # type: ignore[no-any-return]
111
+
112
+ async def _sendfile_fallback(
113
+ self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
114
+ ) -> AbstractStreamWriter:
115
+ # To keep memory usage low,fobj is transferred in chunks
116
+ # controlled by the constructor's chunk_size argument.
117
+
118
+ chunk_size = self._chunk_size
119
+ loop = asyncio.get_event_loop()
120
+ chunk = await loop.run_in_executor(
121
+ None, self._seek_and_read, fobj, offset, chunk_size
122
+ )
123
+ while chunk:
124
+ await writer.write(chunk)
125
+ count = count - chunk_size
126
+ if count <= 0:
127
+ break
128
+ chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
129
+
130
+ await writer.drain()
131
+ return writer
132
+
133
+ async def _sendfile(
134
+ self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
135
+ ) -> AbstractStreamWriter:
136
+ writer = await super().prepare(request)
137
+ assert writer is not None
138
+
139
+ if NOSENDFILE or self.compression:
140
+ return await self._sendfile_fallback(writer, fobj, offset, count)
141
+
142
+ loop = request._loop
143
+ transport = request.transport
144
+ assert transport is not None
145
+
146
+ try:
147
+ await loop.sendfile(transport, fobj, offset, count)
148
+ except NotImplementedError:
149
+ return await self._sendfile_fallback(writer, fobj, offset, count)
150
+
151
+ await super().write_eof()
152
+ return writer
153
+
154
+ @staticmethod
155
+ def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool:
156
+ if len(etags) == 1 and etags[0].value == ETAG_ANY:
157
+ return True
158
+ return any(
159
+ etag.value == etag_value for etag in etags if weak or not etag.is_weak
160
+ )
161
+
162
+ async def _not_modified(
163
+ self, request: "BaseRequest", etag_value: str, last_modified: float
164
+ ) -> Optional[AbstractStreamWriter]:
165
+ self.set_status(HTTPNotModified.status_code)
166
+ self._length_check = False
167
+ self.etag = etag_value # type: ignore[assignment]
168
+ self.last_modified = last_modified # type: ignore[assignment]
169
+ # Delete any Content-Length headers provided by user. HTTP 304
170
+ # should always have empty response body
171
+ return await super().prepare(request)
172
+
173
+ async def _precondition_failed(
174
+ self, request: "BaseRequest"
175
+ ) -> Optional[AbstractStreamWriter]:
176
+ self.set_status(HTTPPreconditionFailed.status_code)
177
+ self.content_length = 0
178
+ return await super().prepare(request)
179
+
180
+ def _make_response(
181
+ self, request: "BaseRequest", accept_encoding: str
182
+ ) -> Tuple[
183
+ _FileResponseResult, Optional[io.BufferedReader], os.stat_result, Optional[str]
184
+ ]:
185
+ """Return the response result, io object, stat result, and encoding.
186
+
187
+ If an uncompressed file is returned, the encoding is set to
188
+ :py:data:`None`.
189
+
190
+ This method should be called from a thread executor
191
+ since it calls os.stat which may block.
192
+ """
193
+ file_path, st, file_encoding = self._get_file_path_stat_encoding(
194
+ accept_encoding
195
+ )
196
+ if not file_path:
197
+ return _FileResponseResult.NOT_ACCEPTABLE, None, st, None
198
+
199
+ etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
200
+
201
+ # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2
202
+ if (ifmatch := request.if_match) is not None and not self._etag_match(
203
+ etag_value, ifmatch, weak=False
204
+ ):
205
+ return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding
206
+
207
+ if (
208
+ (unmodsince := request.if_unmodified_since) is not None
209
+ and ifmatch is None
210
+ and st.st_mtime > unmodsince.timestamp()
211
+ ):
212
+ return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding
213
+
214
+ # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2
215
+ if (ifnonematch := request.if_none_match) is not None and self._etag_match(
216
+ etag_value, ifnonematch, weak=True
217
+ ):
218
+ return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding
219
+
220
+ if (
221
+ (modsince := request.if_modified_since) is not None
222
+ and ifnonematch is None
223
+ and st.st_mtime <= modsince.timestamp()
224
+ ):
225
+ return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding
226
+
227
+ fobj = file_path.open("rb")
228
+ with suppress(OSError):
229
+ # fstat() may not be available on all platforms
230
+ # Once we open the file, we want the fstat() to ensure
231
+ # the file has not changed between the first stat()
232
+ # and the open().
233
+ st = os.stat(fobj.fileno())
234
+ return _FileResponseResult.SEND_FILE, fobj, st, file_encoding
235
+
236
+ def _get_file_path_stat_encoding(
237
+ self, accept_encoding: str
238
+ ) -> Tuple[Optional[pathlib.Path], os.stat_result, Optional[str]]:
239
+ file_path = self._path
240
+ for file_extension, file_encoding in ENCODING_EXTENSIONS.items():
241
+ if file_encoding not in accept_encoding:
242
+ continue
243
+
244
+ compressed_path = file_path.with_suffix(file_path.suffix + file_extension)
245
+ with suppress(OSError):
246
+ # Do not follow symlinks and ignore any non-regular files.
247
+ st = compressed_path.lstat()
248
+ if S_ISREG(st.st_mode):
249
+ return compressed_path, st, file_encoding
250
+
251
+ # Fallback to the uncompressed file
252
+ st = file_path.stat()
253
+ return file_path if S_ISREG(st.st_mode) else None, st, None
254
+
255
+ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
256
+ loop = asyncio.get_running_loop()
257
+ # Encoding comparisons should be case-insensitive
258
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
259
+ accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
260
+ try:
261
+ response_result, fobj, st, file_encoding = await loop.run_in_executor(
262
+ None, self._make_response, request, accept_encoding
263
+ )
264
+ except PermissionError:
265
+ self.set_status(HTTPForbidden.status_code)
266
+ return await super().prepare(request)
267
+ except OSError:
268
+ # Most likely to be FileNotFoundError or OSError for circular
269
+ # symlinks in python >= 3.13, so respond with 404.
270
+ self.set_status(HTTPNotFound.status_code)
271
+ return await super().prepare(request)
272
+
273
+ # Forbid special files like sockets, pipes, devices, etc.
274
+ if response_result is _FileResponseResult.NOT_ACCEPTABLE:
275
+ self.set_status(HTTPForbidden.status_code)
276
+ return await super().prepare(request)
277
+
278
+ if response_result is _FileResponseResult.PRE_CONDITION_FAILED:
279
+ return await self._precondition_failed(request)
280
+
281
+ if response_result is _FileResponseResult.NOT_MODIFIED:
282
+ etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
283
+ last_modified = st.st_mtime
284
+ return await self._not_modified(request, etag_value, last_modified)
285
+
286
+ assert fobj is not None
287
+ try:
288
+ return await self._prepare_open_file(request, fobj, st, file_encoding)
289
+ finally:
290
+ # We do not await here because we do not want to wait
291
+ # for the executor to finish before returning the response
292
+ # so the connection can begin servicing another request
293
+ # as soon as possible.
294
+ close_future = loop.run_in_executor(None, fobj.close)
295
+ # Hold a strong reference to the future to prevent it from being
296
+ # garbage collected before it completes.
297
+ _CLOSE_FUTURES.add(close_future)
298
+ close_future.add_done_callback(_CLOSE_FUTURES.remove)
299
+
300
+ async def _prepare_open_file(
301
+ self,
302
+ request: "BaseRequest",
303
+ fobj: io.BufferedReader,
304
+ st: os.stat_result,
305
+ file_encoding: Optional[str],
306
+ ) -> Optional[AbstractStreamWriter]:
307
+ status = self._status
308
+ file_size: int = st.st_size
309
+ file_mtime: float = st.st_mtime
310
+ count: int = file_size
311
+ start: Optional[int] = None
312
+
313
+ if (ifrange := request.if_range) is None or file_mtime <= ifrange.timestamp():
314
+ # If-Range header check:
315
+ # condition = cached date >= last modification date
316
+ # return 206 if True else 200.
317
+ # if False:
318
+ # Range header would not be processed, return 200
319
+ # if True but Range header missing
320
+ # return 200
321
+ try:
322
+ rng = request.http_range
323
+ start = rng.start
324
+ end: Optional[int] = rng.stop
325
+ except ValueError:
326
+ # https://tools.ietf.org/html/rfc7233:
327
+ # A server generating a 416 (Range Not Satisfiable) response to
328
+ # a byte-range request SHOULD send a Content-Range header field
329
+ # with an unsatisfied-range value.
330
+ # The complete-length in a 416 response indicates the current
331
+ # length of the selected representation.
332
+ #
333
+ # Will do the same below. Many servers ignore this and do not
334
+ # send a Content-Range header with HTTP 416
335
+ self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
336
+ self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
337
+ return await super().prepare(request)
338
+
339
+ # If a range request has been made, convert start, end slice
340
+ # notation into file pointer offset and count
341
+ if start is not None:
342
+ if start < 0 and end is None: # return tail of file
343
+ start += file_size
344
+ if start < 0:
345
+ # if Range:bytes=-1000 in request header but file size
346
+ # is only 200, there would be trouble without this
347
+ start = 0
348
+ count = file_size - start
349
+ else:
350
+ # rfc7233:If the last-byte-pos value is
351
+ # absent, or if the value is greater than or equal to
352
+ # the current length of the representation data,
353
+ # the byte range is interpreted as the remainder
354
+ # of the representation (i.e., the server replaces the
355
+ # value of last-byte-pos with a value that is one less than
356
+ # the current length of the selected representation).
357
+ count = (
358
+ min(end if end is not None else file_size, file_size) - start
359
+ )
360
+
361
+ if start >= file_size:
362
+ # HTTP 416 should be returned in this case.
363
+ #
364
+ # According to https://tools.ietf.org/html/rfc7233:
365
+ # If a valid byte-range-set includes at least one
366
+ # byte-range-spec with a first-byte-pos that is less than
367
+ # the current length of the representation, or at least one
368
+ # suffix-byte-range-spec with a non-zero suffix-length,
369
+ # then the byte-range-set is satisfiable. Otherwise, the
370
+ # byte-range-set is unsatisfiable.
371
+ self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
372
+ self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
373
+ return await super().prepare(request)
374
+
375
+ status = HTTPPartialContent.status_code
376
+ # Even though you are sending the whole file, you should still
377
+ # return a HTTP 206 for a Range request.
378
+ self.set_status(status)
379
+
380
+ # If the Content-Type header is not already set, guess it based on the
381
+ # extension of the request path. The encoding returned by guess_type
382
+ # can be ignored since the map was cleared above.
383
+ if hdrs.CONTENT_TYPE not in self._headers:
384
+ if sys.version_info >= (3, 13):
385
+ guesser = CONTENT_TYPES.guess_file_type
386
+ else:
387
+ guesser = CONTENT_TYPES.guess_type
388
+ self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE
389
+
390
+ if file_encoding:
391
+ self._headers[hdrs.CONTENT_ENCODING] = file_encoding
392
+ self._headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
393
+ # Disable compression if we are already sending
394
+ # a compressed file since we don't want to double
395
+ # compress.
396
+ self._compression = False
397
+
398
+ self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}" # type: ignore[assignment]
399
+ self.last_modified = file_mtime # type: ignore[assignment]
400
+ self.content_length = count
401
+
402
+ self._headers[hdrs.ACCEPT_RANGES] = "bytes"
403
+
404
+ if status == HTTPPartialContent.status_code:
405
+ real_start = start
406
+ assert real_start is not None
407
+ self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
408
+ real_start, real_start + count - 1, file_size
409
+ )
410
+
411
+ # If we are sending 0 bytes calling sendfile() will throw a ValueError
412
+ if count == 0 or must_be_empty_body(request.method, status):
413
+ return await super().prepare(request)
414
+
415
+ # be aware that start could be None or int=0 here.
416
+ offset = start or 0
417
+
418
+ return await self._sendfile(request, fobj, offset, count)
deepseek/lib/python3.10/site-packages/aiohttp/web_middlewares.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ from typing import TYPE_CHECKING, Tuple, Type, TypeVar
3
+
4
+ from .typedefs import Handler, Middleware
5
+ from .web_exceptions import HTTPMove, HTTPPermanentRedirect
6
+ from .web_request import Request
7
+ from .web_response import StreamResponse
8
+ from .web_urldispatcher import SystemRoute
9
+
10
+ __all__ = (
11
+ "middleware",
12
+ "normalize_path_middleware",
13
+ )
14
+
15
+ if TYPE_CHECKING:
16
+ from .web_app import Application
17
+
18
+ _Func = TypeVar("_Func")
19
+
20
+
21
+ async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
22
+ alt_request = request.clone(rel_url=path)
23
+
24
+ match_info = await request.app.router.resolve(alt_request)
25
+ alt_request._match_info = match_info
26
+
27
+ if match_info.http_exception is None:
28
+ return True, alt_request
29
+
30
+ return False, request
31
+
32
+
33
+ def middleware(f: _Func) -> _Func:
34
+ f.__middleware_version__ = 1 # type: ignore[attr-defined]
35
+ return f
36
+
37
+
38
+ def normalize_path_middleware(
39
+ *,
40
+ append_slash: bool = True,
41
+ remove_slash: bool = False,
42
+ merge_slashes: bool = True,
43
+ redirect_class: Type[HTTPMove] = HTTPPermanentRedirect,
44
+ ) -> Middleware:
45
+ """Factory for producing a middleware that normalizes the path of a request.
46
+
47
+ Normalizing means:
48
+ - Add or remove a trailing slash to the path.
49
+ - Double slashes are replaced by one.
50
+
51
+ The middleware returns as soon as it finds a path that resolves
52
+ correctly. The order if both merge and append/remove are enabled is
53
+ 1) merge slashes
54
+ 2) append/remove slash
55
+ 3) both merge slashes and append/remove slash.
56
+ If the path resolves with at least one of those conditions, it will
57
+ redirect to the new path.
58
+
59
+ Only one of `append_slash` and `remove_slash` can be enabled. If both
60
+ are `True` the factory will raise an assertion error
61
+
62
+ If `append_slash` is `True` the middleware will append a slash when
63
+ needed. If a resource is defined with trailing slash and the request
64
+ comes without it, it will append it automatically.
65
+
66
+ If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
67
+ the middleware will remove trailing slashes and redirect if the resource
68
+ is defined
69
+
70
+ If merge_slashes is True, merge multiple consecutive slashes in the
71
+ path into one.
72
+ """
73
+ correct_configuration = not (append_slash and remove_slash)
74
+ assert correct_configuration, "Cannot both remove and append slash"
75
+
76
+ @middleware
77
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
78
+ if isinstance(request.match_info.route, SystemRoute):
79
+ paths_to_check = []
80
+ if "?" in request.raw_path:
81
+ path, query = request.raw_path.split("?", 1)
82
+ query = "?" + query
83
+ else:
84
+ query = ""
85
+ path = request.raw_path
86
+
87
+ if merge_slashes:
88
+ paths_to_check.append(re.sub("//+", "/", path))
89
+ if append_slash and not request.path.endswith("/"):
90
+ paths_to_check.append(path + "/")
91
+ if remove_slash and request.path.endswith("/"):
92
+ paths_to_check.append(path[:-1])
93
+ if merge_slashes and append_slash:
94
+ paths_to_check.append(re.sub("//+", "/", path + "/"))
95
+ if merge_slashes and remove_slash:
96
+ merged_slashes = re.sub("//+", "/", path)
97
+ paths_to_check.append(merged_slashes[:-1])
98
+
99
+ for path in paths_to_check:
100
+ path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
101
+ resolves, request = await _check_request_resolves(request, path)
102
+ if resolves:
103
+ raise redirect_class(request.raw_path + query)
104
+
105
+ return await handler(request)
106
+
107
+ return impl
108
+
109
+
110
+ def _fix_request_current_app(app: "Application") -> Middleware:
111
+ @middleware
112
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
113
+ match_info = request.match_info
114
+ prev = match_info.current_app
115
+ match_info.current_app = app
116
+ try:
117
+ return await handler(request)
118
+ finally:
119
+ match_info.current_app = prev
120
+
121
+ return impl
deepseek/lib/python3.10/site-packages/aiohttp/web_protocol.py ADDED
@@ -0,0 +1,746 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import asyncio.streams
3
+ import sys
4
+ import traceback
5
+ import warnings
6
+ from collections import deque
7
+ from contextlib import suppress
8
+ from html import escape as html_escape
9
+ from http import HTTPStatus
10
+ from logging import Logger
11
+ from typing import (
12
+ TYPE_CHECKING,
13
+ Any,
14
+ Awaitable,
15
+ Callable,
16
+ Deque,
17
+ Optional,
18
+ Sequence,
19
+ Tuple,
20
+ Type,
21
+ Union,
22
+ cast,
23
+ )
24
+
25
+ import attr
26
+ import yarl
27
+
28
+ from .abc import AbstractAccessLogger, AbstractStreamWriter
29
+ from .base_protocol import BaseProtocol
30
+ from .helpers import ceil_timeout
31
+ from .http import (
32
+ HttpProcessingError,
33
+ HttpRequestParser,
34
+ HttpVersion10,
35
+ RawRequestMessage,
36
+ StreamWriter,
37
+ )
38
+ from .http_exceptions import BadHttpMethod
39
+ from .log import access_logger, server_logger
40
+ from .streams import EMPTY_PAYLOAD, StreamReader
41
+ from .tcp_helpers import tcp_keepalive
42
+ from .web_exceptions import HTTPException, HTTPInternalServerError
43
+ from .web_log import AccessLogger
44
+ from .web_request import BaseRequest
45
+ from .web_response import Response, StreamResponse
46
+
47
+ __all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
48
+
49
+ if TYPE_CHECKING:
50
+ from .web_server import Server
51
+
52
+
53
+ _RequestFactory = Callable[
54
+ [
55
+ RawRequestMessage,
56
+ StreamReader,
57
+ "RequestHandler",
58
+ AbstractStreamWriter,
59
+ "asyncio.Task[None]",
60
+ ],
61
+ BaseRequest,
62
+ ]
63
+
64
+ _RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
65
+
66
+ ERROR = RawRequestMessage(
67
+ "UNKNOWN",
68
+ "/",
69
+ HttpVersion10,
70
+ {}, # type: ignore[arg-type]
71
+ {}, # type: ignore[arg-type]
72
+ True,
73
+ None,
74
+ False,
75
+ False,
76
+ yarl.URL("/"),
77
+ )
78
+
79
+
80
+ class RequestPayloadError(Exception):
81
+ """Payload parsing error."""
82
+
83
+
84
+ class PayloadAccessError(Exception):
85
+ """Payload was accessed after response was sent."""
86
+
87
+
88
+ _PAYLOAD_ACCESS_ERROR = PayloadAccessError()
89
+
90
+
91
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
92
+ class _ErrInfo:
93
+ status: int
94
+ exc: BaseException
95
+ message: str
96
+
97
+
98
+ _MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
99
+
100
+
101
+ class RequestHandler(BaseProtocol):
102
+ """HTTP protocol implementation.
103
+
104
+ RequestHandler handles incoming HTTP request. It reads request line,
105
+ request headers and request payload and calls handle_request() method.
106
+ By default it always returns with 404 response.
107
+
108
+ RequestHandler handles errors in incoming request, like bad
109
+ status line, bad headers or incomplete payload. If any error occurs,
110
+ connection gets closed.
111
+
112
+ keepalive_timeout -- number of seconds before closing
113
+ keep-alive connection
114
+
115
+ tcp_keepalive -- TCP keep-alive is on, default is on
116
+
117
+ debug -- enable debug mode
118
+
119
+ logger -- custom logger object
120
+
121
+ access_log_class -- custom class for access_logger
122
+
123
+ access_log -- custom logging object
124
+
125
+ access_log_format -- access log format string
126
+
127
+ loop -- Optional event loop
128
+
129
+ max_line_size -- Optional maximum header line size
130
+
131
+ max_field_size -- Optional maximum header field size
132
+
133
+ max_headers -- Optional maximum header size
134
+
135
+ timeout_ceil_threshold -- Optional value to specify
136
+ threshold to ceil() timeout
137
+ values
138
+
139
+ """
140
+
141
+ __slots__ = (
142
+ "_request_count",
143
+ "_keepalive",
144
+ "_manager",
145
+ "_request_handler",
146
+ "_request_factory",
147
+ "_tcp_keepalive",
148
+ "_next_keepalive_close_time",
149
+ "_keepalive_handle",
150
+ "_keepalive_timeout",
151
+ "_lingering_time",
152
+ "_messages",
153
+ "_message_tail",
154
+ "_handler_waiter",
155
+ "_waiter",
156
+ "_task_handler",
157
+ "_upgrade",
158
+ "_payload_parser",
159
+ "_request_parser",
160
+ "_reading_paused",
161
+ "logger",
162
+ "debug",
163
+ "access_log",
164
+ "access_logger",
165
+ "_close",
166
+ "_force_close",
167
+ "_current_request",
168
+ "_timeout_ceil_threshold",
169
+ "_request_in_progress",
170
+ )
171
+
172
+ def __init__(
173
+ self,
174
+ manager: "Server",
175
+ *,
176
+ loop: asyncio.AbstractEventLoop,
177
+ # Default should be high enough that it's likely longer than a reverse proxy.
178
+ keepalive_timeout: float = 3630,
179
+ tcp_keepalive: bool = True,
180
+ logger: Logger = server_logger,
181
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
182
+ access_log: Logger = access_logger,
183
+ access_log_format: str = AccessLogger.LOG_FORMAT,
184
+ debug: bool = False,
185
+ max_line_size: int = 8190,
186
+ max_headers: int = 32768,
187
+ max_field_size: int = 8190,
188
+ lingering_time: float = 10.0,
189
+ read_bufsize: int = 2**16,
190
+ auto_decompress: bool = True,
191
+ timeout_ceil_threshold: float = 5,
192
+ ):
193
+ super().__init__(loop)
194
+
195
+ # _request_count is the number of requests processed with the same connection.
196
+ self._request_count = 0
197
+ self._keepalive = False
198
+ self._current_request: Optional[BaseRequest] = None
199
+ self._manager: Optional[Server] = manager
200
+ self._request_handler: Optional[_RequestHandler] = manager.request_handler
201
+ self._request_factory: Optional[_RequestFactory] = manager.request_factory
202
+
203
+ self._tcp_keepalive = tcp_keepalive
204
+ # placeholder to be replaced on keepalive timeout setup
205
+ self._next_keepalive_close_time = 0.0
206
+ self._keepalive_handle: Optional[asyncio.Handle] = None
207
+ self._keepalive_timeout = keepalive_timeout
208
+ self._lingering_time = float(lingering_time)
209
+
210
+ self._messages: Deque[_MsgType] = deque()
211
+ self._message_tail = b""
212
+
213
+ self._waiter: Optional[asyncio.Future[None]] = None
214
+ self._handler_waiter: Optional[asyncio.Future[None]] = None
215
+ self._task_handler: Optional[asyncio.Task[None]] = None
216
+
217
+ self._upgrade = False
218
+ self._payload_parser: Any = None
219
+ self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
220
+ self,
221
+ loop,
222
+ read_bufsize,
223
+ max_line_size=max_line_size,
224
+ max_field_size=max_field_size,
225
+ max_headers=max_headers,
226
+ payload_exception=RequestPayloadError,
227
+ auto_decompress=auto_decompress,
228
+ )
229
+
230
+ self._timeout_ceil_threshold: float = 5
231
+ try:
232
+ self._timeout_ceil_threshold = float(timeout_ceil_threshold)
233
+ except (TypeError, ValueError):
234
+ pass
235
+
236
+ self.logger = logger
237
+ self.debug = debug
238
+ self.access_log = access_log
239
+ if access_log:
240
+ self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
241
+ access_log, access_log_format
242
+ )
243
+ else:
244
+ self.access_logger = None
245
+
246
+ self._close = False
247
+ self._force_close = False
248
+ self._request_in_progress = False
249
+
250
+ def __repr__(self) -> str:
251
+ return "<{} {}>".format(
252
+ self.__class__.__name__,
253
+ "connected" if self.transport is not None else "disconnected",
254
+ )
255
+
256
+ @property
257
+ def keepalive_timeout(self) -> float:
258
+ return self._keepalive_timeout
259
+
260
+ async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
261
+ """Do worker process exit preparations.
262
+
263
+ We need to clean up everything and stop accepting requests.
264
+ It is especially important for keep-alive connections.
265
+ """
266
+ self._force_close = True
267
+
268
+ if self._keepalive_handle is not None:
269
+ self._keepalive_handle.cancel()
270
+
271
+ # Wait for graceful handler completion
272
+ if self._request_in_progress:
273
+ # The future is only created when we are shutting
274
+ # down while the handler is still processing a request
275
+ # to avoid creating a future for every request.
276
+ self._handler_waiter = self._loop.create_future()
277
+ try:
278
+ async with ceil_timeout(timeout):
279
+ await self._handler_waiter
280
+ except (asyncio.CancelledError, asyncio.TimeoutError):
281
+ self._handler_waiter = None
282
+ if (
283
+ sys.version_info >= (3, 11)
284
+ and (task := asyncio.current_task())
285
+ and task.cancelling()
286
+ ):
287
+ raise
288
+ # Then cancel handler and wait
289
+ try:
290
+ async with ceil_timeout(timeout):
291
+ if self._current_request is not None:
292
+ self._current_request._cancel(asyncio.CancelledError())
293
+
294
+ if self._task_handler is not None and not self._task_handler.done():
295
+ await asyncio.shield(self._task_handler)
296
+ except (asyncio.CancelledError, asyncio.TimeoutError):
297
+ if (
298
+ sys.version_info >= (3, 11)
299
+ and (task := asyncio.current_task())
300
+ and task.cancelling()
301
+ ):
302
+ raise
303
+
304
+ # force-close non-idle handler
305
+ if self._task_handler is not None:
306
+ self._task_handler.cancel()
307
+
308
+ self.force_close()
309
+
310
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
311
+ super().connection_made(transport)
312
+
313
+ real_transport = cast(asyncio.Transport, transport)
314
+ if self._tcp_keepalive:
315
+ tcp_keepalive(real_transport)
316
+
317
+ assert self._manager is not None
318
+ self._manager.connection_made(self, real_transport)
319
+
320
+ loop = self._loop
321
+ if sys.version_info >= (3, 12):
322
+ task = asyncio.Task(self.start(), loop=loop, eager_start=True)
323
+ else:
324
+ task = loop.create_task(self.start())
325
+ self._task_handler = task
326
+
327
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
328
+ if self._manager is None:
329
+ return
330
+ self._manager.connection_lost(self, exc)
331
+
332
+ # Grab value before setting _manager to None.
333
+ handler_cancellation = self._manager.handler_cancellation
334
+
335
+ self.force_close()
336
+ super().connection_lost(exc)
337
+ self._manager = None
338
+ self._request_factory = None
339
+ self._request_handler = None
340
+ self._request_parser = None
341
+
342
+ if self._keepalive_handle is not None:
343
+ self._keepalive_handle.cancel()
344
+
345
+ if self._current_request is not None:
346
+ if exc is None:
347
+ exc = ConnectionResetError("Connection lost")
348
+ self._current_request._cancel(exc)
349
+
350
+ if handler_cancellation and self._task_handler is not None:
351
+ self._task_handler.cancel()
352
+
353
+ self._task_handler = None
354
+
355
+ if self._payload_parser is not None:
356
+ self._payload_parser.feed_eof()
357
+ self._payload_parser = None
358
+
359
+ def set_parser(self, parser: Any) -> None:
360
+ # Actual type is WebReader
361
+ assert self._payload_parser is None
362
+
363
+ self._payload_parser = parser
364
+
365
+ if self._message_tail:
366
+ self._payload_parser.feed_data(self._message_tail)
367
+ self._message_tail = b""
368
+
369
+ def eof_received(self) -> None:
370
+ pass
371
+
372
+ def data_received(self, data: bytes) -> None:
373
+ if self._force_close or self._close:
374
+ return
375
+ # parse http messages
376
+ messages: Sequence[_MsgType]
377
+ if self._payload_parser is None and not self._upgrade:
378
+ assert self._request_parser is not None
379
+ try:
380
+ messages, upgraded, tail = self._request_parser.feed_data(data)
381
+ except HttpProcessingError as exc:
382
+ messages = [
383
+ (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
384
+ ]
385
+ upgraded = False
386
+ tail = b""
387
+
388
+ for msg, payload in messages or ():
389
+ self._request_count += 1
390
+ self._messages.append((msg, payload))
391
+
392
+ waiter = self._waiter
393
+ if messages and waiter is not None and not waiter.done():
394
+ # don't set result twice
395
+ waiter.set_result(None)
396
+
397
+ self._upgrade = upgraded
398
+ if upgraded and tail:
399
+ self._message_tail = tail
400
+
401
+ # no parser, just store
402
+ elif self._payload_parser is None and self._upgrade and data:
403
+ self._message_tail += data
404
+
405
+ # feed payload
406
+ elif data:
407
+ eof, tail = self._payload_parser.feed_data(data)
408
+ if eof:
409
+ self.close()
410
+
411
+ def keep_alive(self, val: bool) -> None:
412
+ """Set keep-alive connection mode.
413
+
414
+ :param bool val: new state.
415
+ """
416
+ self._keepalive = val
417
+ if self._keepalive_handle:
418
+ self._keepalive_handle.cancel()
419
+ self._keepalive_handle = None
420
+
421
+ def close(self) -> None:
422
+ """Close connection.
423
+
424
+ Stop accepting new pipelining messages and close
425
+ connection when handlers done processing messages.
426
+ """
427
+ self._close = True
428
+ if self._waiter:
429
+ self._waiter.cancel()
430
+
431
+ def force_close(self) -> None:
432
+ """Forcefully close connection."""
433
+ self._force_close = True
434
+ if self._waiter:
435
+ self._waiter.cancel()
436
+ if self.transport is not None:
437
+ self.transport.close()
438
+ self.transport = None
439
+
440
+ def log_access(
441
+ self, request: BaseRequest, response: StreamResponse, time: float
442
+ ) -> None:
443
+ if self.access_logger is not None and self.access_logger.enabled:
444
+ self.access_logger.log(request, response, self._loop.time() - time)
445
+
446
+ def log_debug(self, *args: Any, **kw: Any) -> None:
447
+ if self.debug:
448
+ self.logger.debug(*args, **kw)
449
+
450
+ def log_exception(self, *args: Any, **kw: Any) -> None:
451
+ self.logger.exception(*args, **kw)
452
+
453
+ def _process_keepalive(self) -> None:
454
+ self._keepalive_handle = None
455
+ if self._force_close or not self._keepalive:
456
+ return
457
+
458
+ loop = self._loop
459
+ now = loop.time()
460
+ close_time = self._next_keepalive_close_time
461
+ if now <= close_time:
462
+ # Keep alive close check fired too early, reschedule
463
+ self._keepalive_handle = loop.call_at(close_time, self._process_keepalive)
464
+ return
465
+
466
+ # handler in idle state
467
+ if self._waiter and not self._waiter.done():
468
+ self.force_close()
469
+
470
+ async def _handle_request(
471
+ self,
472
+ request: BaseRequest,
473
+ start_time: float,
474
+ request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
475
+ ) -> Tuple[StreamResponse, bool]:
476
+ self._request_in_progress = True
477
+ try:
478
+ try:
479
+ self._current_request = request
480
+ resp = await request_handler(request)
481
+ finally:
482
+ self._current_request = None
483
+ except HTTPException as exc:
484
+ resp = exc
485
+ resp, reset = await self.finish_response(request, resp, start_time)
486
+ except asyncio.CancelledError:
487
+ raise
488
+ except asyncio.TimeoutError as exc:
489
+ self.log_debug("Request handler timed out.", exc_info=exc)
490
+ resp = self.handle_error(request, 504)
491
+ resp, reset = await self.finish_response(request, resp, start_time)
492
+ except Exception as exc:
493
+ resp = self.handle_error(request, 500, exc)
494
+ resp, reset = await self.finish_response(request, resp, start_time)
495
+ else:
496
+ # Deprecation warning (See #2415)
497
+ if getattr(resp, "__http_exception__", False):
498
+ warnings.warn(
499
+ "returning HTTPException object is deprecated "
500
+ "(#2415) and will be removed, "
501
+ "please raise the exception instead",
502
+ DeprecationWarning,
503
+ )
504
+
505
+ resp, reset = await self.finish_response(request, resp, start_time)
506
+ finally:
507
+ self._request_in_progress = False
508
+ if self._handler_waiter is not None:
509
+ self._handler_waiter.set_result(None)
510
+
511
+ return resp, reset
512
+
513
+ async def start(self) -> None:
514
+ """Process incoming request.
515
+
516
+ It reads request line, request headers and request payload, then
517
+ calls handle_request() method. Subclass has to override
518
+ handle_request(). start() handles various exceptions in request
519
+ or response handling. Connection is being closed always unless
520
+ keep_alive(True) specified.
521
+ """
522
+ loop = self._loop
523
+ handler = asyncio.current_task(loop)
524
+ assert handler is not None
525
+ manager = self._manager
526
+ assert manager is not None
527
+ keepalive_timeout = self._keepalive_timeout
528
+ resp = None
529
+ assert self._request_factory is not None
530
+ assert self._request_handler is not None
531
+
532
+ while not self._force_close:
533
+ if not self._messages:
534
+ try:
535
+ # wait for next request
536
+ self._waiter = loop.create_future()
537
+ await self._waiter
538
+ finally:
539
+ self._waiter = None
540
+
541
+ message, payload = self._messages.popleft()
542
+
543
+ start = loop.time()
544
+
545
+ manager.requests_count += 1
546
+ writer = StreamWriter(self, loop)
547
+ if isinstance(message, _ErrInfo):
548
+ # make request_factory work
549
+ request_handler = self._make_error_handler(message)
550
+ message = ERROR
551
+ else:
552
+ request_handler = self._request_handler
553
+
554
+ request = self._request_factory(message, payload, self, writer, handler)
555
+ try:
556
+ # a new task is used for copy context vars (#3406)
557
+ coro = self._handle_request(request, start, request_handler)
558
+ if sys.version_info >= (3, 12):
559
+ task = asyncio.Task(coro, loop=loop, eager_start=True)
560
+ else:
561
+ task = loop.create_task(coro)
562
+ try:
563
+ resp, reset = await task
564
+ except ConnectionError:
565
+ self.log_debug("Ignored premature client disconnection")
566
+ break
567
+
568
+ # Drop the processed task from asyncio.Task.all_tasks() early
569
+ del task
570
+ if reset:
571
+ self.log_debug("Ignored premature client disconnection 2")
572
+ break
573
+
574
+ # notify server about keep-alive
575
+ self._keepalive = bool(resp.keep_alive)
576
+
577
+ # check payload
578
+ if not payload.is_eof():
579
+ lingering_time = self._lingering_time
580
+ if not self._force_close and lingering_time:
581
+ self.log_debug(
582
+ "Start lingering close timer for %s sec.", lingering_time
583
+ )
584
+
585
+ now = loop.time()
586
+ end_t = now + lingering_time
587
+
588
+ try:
589
+ while not payload.is_eof() and now < end_t:
590
+ async with ceil_timeout(end_t - now):
591
+ # read and ignore
592
+ await payload.readany()
593
+ now = loop.time()
594
+ except (asyncio.CancelledError, asyncio.TimeoutError):
595
+ if (
596
+ sys.version_info >= (3, 11)
597
+ and (t := asyncio.current_task())
598
+ and t.cancelling()
599
+ ):
600
+ raise
601
+
602
+ # if payload still uncompleted
603
+ if not payload.is_eof() and not self._force_close:
604
+ self.log_debug("Uncompleted request.")
605
+ self.close()
606
+
607
+ payload.set_exception(_PAYLOAD_ACCESS_ERROR)
608
+
609
+ except asyncio.CancelledError:
610
+ self.log_debug("Ignored premature client disconnection")
611
+ raise
612
+ except Exception as exc:
613
+ self.log_exception("Unhandled exception", exc_info=exc)
614
+ self.force_close()
615
+ finally:
616
+ if self.transport is None and resp is not None:
617
+ self.log_debug("Ignored premature client disconnection.")
618
+ elif not self._force_close:
619
+ if self._keepalive and not self._close:
620
+ # start keep-alive timer
621
+ if keepalive_timeout is not None:
622
+ now = loop.time()
623
+ close_time = now + keepalive_timeout
624
+ self._next_keepalive_close_time = close_time
625
+ if self._keepalive_handle is None:
626
+ self._keepalive_handle = loop.call_at(
627
+ close_time, self._process_keepalive
628
+ )
629
+ else:
630
+ break
631
+
632
+ # remove handler, close transport if no handlers left
633
+ if not self._force_close:
634
+ self._task_handler = None
635
+ if self.transport is not None:
636
+ self.transport.close()
637
+
638
+ async def finish_response(
639
+ self, request: BaseRequest, resp: StreamResponse, start_time: float
640
+ ) -> Tuple[StreamResponse, bool]:
641
+ """Prepare the response and write_eof, then log access.
642
+
643
+ This has to
644
+ be called within the context of any exception so the access logger
645
+ can get exception information. Returns True if the client disconnects
646
+ prematurely.
647
+ """
648
+ request._finish()
649
+ if self._request_parser is not None:
650
+ self._request_parser.set_upgraded(False)
651
+ self._upgrade = False
652
+ if self._message_tail:
653
+ self._request_parser.feed_data(self._message_tail)
654
+ self._message_tail = b""
655
+ try:
656
+ prepare_meth = resp.prepare
657
+ except AttributeError:
658
+ if resp is None:
659
+ self.log_exception("Missing return statement on request handler")
660
+ else:
661
+ self.log_exception(
662
+ "Web-handler should return a response instance, "
663
+ "got {!r}".format(resp)
664
+ )
665
+ exc = HTTPInternalServerError()
666
+ resp = Response(
667
+ status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
668
+ )
669
+ prepare_meth = resp.prepare
670
+ try:
671
+ await prepare_meth(request)
672
+ await resp.write_eof()
673
+ except ConnectionError:
674
+ self.log_access(request, resp, start_time)
675
+ return resp, True
676
+
677
+ self.log_access(request, resp, start_time)
678
+ return resp, False
679
+
680
+ def handle_error(
681
+ self,
682
+ request: BaseRequest,
683
+ status: int = 500,
684
+ exc: Optional[BaseException] = None,
685
+ message: Optional[str] = None,
686
+ ) -> StreamResponse:
687
+ """Handle errors.
688
+
689
+ Returns HTTP response with specific status code. Logs additional
690
+ information. It always closes current connection.
691
+ """
692
+ if self._request_count == 1 and isinstance(exc, BadHttpMethod):
693
+ # BadHttpMethod is common when a client sends non-HTTP
694
+ # or encrypted traffic to an HTTP port. This is expected
695
+ # to happen when connected to the public internet so we log
696
+ # it at the debug level as to not fill logs with noise.
697
+ self.logger.debug("Error handling request", exc_info=exc)
698
+ else:
699
+ self.log_exception("Error handling request", exc_info=exc)
700
+
701
+ # some data already got sent, connection is broken
702
+ if request.writer.output_size > 0:
703
+ raise ConnectionError(
704
+ "Response is sent already, cannot send another response "
705
+ "with the error message"
706
+ )
707
+
708
+ ct = "text/plain"
709
+ if status == HTTPStatus.INTERNAL_SERVER_ERROR:
710
+ title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
711
+ msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
712
+ tb = None
713
+ if self.debug:
714
+ with suppress(Exception):
715
+ tb = traceback.format_exc()
716
+
717
+ if "text/html" in request.headers.get("Accept", ""):
718
+ if tb:
719
+ tb = html_escape(tb)
720
+ msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
721
+ message = (
722
+ "<html><head>"
723
+ "<title>{title}</title>"
724
+ "</head><body>\n<h1>{title}</h1>"
725
+ "\n{msg}\n</body></html>\n"
726
+ ).format(title=title, msg=msg)
727
+ ct = "text/html"
728
+ else:
729
+ if tb:
730
+ msg = tb
731
+ message = title + "\n\n" + msg
732
+
733
+ resp = Response(status=status, text=message, content_type=ct)
734
+ resp.force_close()
735
+
736
+ return resp
737
+
738
+ def _make_error_handler(
739
+ self, err_info: _ErrInfo
740
+ ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
741
+ async def handler(request: BaseRequest) -> StreamResponse:
742
+ return self.handle_error(
743
+ request, err_info.status, err_info.exc, err_info.message
744
+ )
745
+
746
+ return handler
deepseek/lib/python3.10/site-packages/aiohttp/web_request.py ADDED
@@ -0,0 +1,916 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import datetime
3
+ import io
4
+ import re
5
+ import socket
6
+ import string
7
+ import tempfile
8
+ import types
9
+ import warnings
10
+ from http.cookies import SimpleCookie
11
+ from types import MappingProxyType
12
+ from typing import (
13
+ TYPE_CHECKING,
14
+ Any,
15
+ Dict,
16
+ Final,
17
+ Iterator,
18
+ Mapping,
19
+ MutableMapping,
20
+ Optional,
21
+ Pattern,
22
+ Tuple,
23
+ Union,
24
+ cast,
25
+ )
26
+ from urllib.parse import parse_qsl
27
+
28
+ import attr
29
+ from multidict import (
30
+ CIMultiDict,
31
+ CIMultiDictProxy,
32
+ MultiDict,
33
+ MultiDictProxy,
34
+ MultiMapping,
35
+ )
36
+ from yarl import URL
37
+
38
+ from . import hdrs
39
+ from .abc import AbstractStreamWriter
40
+ from .helpers import (
41
+ _SENTINEL,
42
+ DEBUG,
43
+ ETAG_ANY,
44
+ LIST_QUOTED_ETAG_RE,
45
+ ChainMapProxy,
46
+ ETag,
47
+ HeadersMixin,
48
+ parse_http_date,
49
+ reify,
50
+ sentinel,
51
+ set_exception,
52
+ )
53
+ from .http_parser import RawRequestMessage
54
+ from .http_writer import HttpVersion
55
+ from .multipart import BodyPartReader, MultipartReader
56
+ from .streams import EmptyStreamReader, StreamReader
57
+ from .typedefs import (
58
+ DEFAULT_JSON_DECODER,
59
+ JSONDecoder,
60
+ LooseHeaders,
61
+ RawHeaders,
62
+ StrOrURL,
63
+ )
64
+ from .web_exceptions import HTTPRequestEntityTooLarge
65
+ from .web_response import StreamResponse
66
+
67
+ __all__ = ("BaseRequest", "FileField", "Request")
68
+
69
+
70
+ if TYPE_CHECKING:
71
+ from .web_app import Application
72
+ from .web_protocol import RequestHandler
73
+ from .web_urldispatcher import UrlMappingMatchInfo
74
+
75
+
76
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
77
+ class FileField:
78
+ name: str
79
+ filename: str
80
+ file: io.BufferedReader
81
+ content_type: str
82
+ headers: CIMultiDictProxy[str]
83
+
84
+
85
+ _TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
86
+ # '-' at the end to prevent interpretation as range in a char class
87
+
88
+ _TOKEN: Final[str] = rf"[{_TCHAR}]+"
89
+
90
+ _QDTEXT: Final[str] = r"[{}]".format(
91
+ r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
92
+ )
93
+ # qdtext includes 0x5C to escape 0x5D ('\]')
94
+ # qdtext excludes obs-text (because obsoleted, and encoding not specified)
95
+
96
+ _QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
97
+
98
+ _QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
99
+ qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
100
+ )
101
+
102
+ _FORWARDED_PAIR: Final[str] = (
103
+ r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
104
+ token=_TOKEN, quoted_string=_QUOTED_STRING
105
+ )
106
+ )
107
+
108
+ _QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
109
+ # same pattern as _QUOTED_PAIR but contains a capture group
110
+
111
+ _FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
112
+
113
+ ############################################################
114
+ # HTTP Request
115
+ ############################################################
116
+
117
+
118
+ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
119
+
120
+ POST_METHODS = {
121
+ hdrs.METH_PATCH,
122
+ hdrs.METH_POST,
123
+ hdrs.METH_PUT,
124
+ hdrs.METH_TRACE,
125
+ hdrs.METH_DELETE,
126
+ }
127
+
128
+ ATTRS = HeadersMixin.ATTRS | frozenset(
129
+ [
130
+ "_message",
131
+ "_protocol",
132
+ "_payload_writer",
133
+ "_payload",
134
+ "_headers",
135
+ "_method",
136
+ "_version",
137
+ "_rel_url",
138
+ "_post",
139
+ "_read_bytes",
140
+ "_state",
141
+ "_cache",
142
+ "_task",
143
+ "_client_max_size",
144
+ "_loop",
145
+ "_transport_sslcontext",
146
+ "_transport_peername",
147
+ ]
148
+ )
149
+ _post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None
150
+ _read_bytes: Optional[bytes] = None
151
+
152
+ def __init__(
153
+ self,
154
+ message: RawRequestMessage,
155
+ payload: StreamReader,
156
+ protocol: "RequestHandler",
157
+ payload_writer: AbstractStreamWriter,
158
+ task: "asyncio.Task[None]",
159
+ loop: asyncio.AbstractEventLoop,
160
+ *,
161
+ client_max_size: int = 1024**2,
162
+ state: Optional[Dict[str, Any]] = None,
163
+ scheme: Optional[str] = None,
164
+ host: Optional[str] = None,
165
+ remote: Optional[str] = None,
166
+ ) -> None:
167
+ self._message = message
168
+ self._protocol = protocol
169
+ self._payload_writer = payload_writer
170
+
171
+ self._payload = payload
172
+ self._headers: CIMultiDictProxy[str] = message.headers
173
+ self._method = message.method
174
+ self._version = message.version
175
+ self._cache: Dict[str, Any] = {}
176
+ url = message.url
177
+ if url.absolute:
178
+ if scheme is not None:
179
+ url = url.with_scheme(scheme)
180
+ if host is not None:
181
+ url = url.with_host(host)
182
+ # absolute URL is given,
183
+ # override auto-calculating url, host, and scheme
184
+ # all other properties should be good
185
+ self._cache["url"] = url
186
+ self._cache["host"] = url.host
187
+ self._cache["scheme"] = url.scheme
188
+ self._rel_url = url.relative()
189
+ else:
190
+ self._rel_url = url
191
+ if scheme is not None:
192
+ self._cache["scheme"] = scheme
193
+ if host is not None:
194
+ self._cache["host"] = host
195
+
196
+ self._state = {} if state is None else state
197
+ self._task = task
198
+ self._client_max_size = client_max_size
199
+ self._loop = loop
200
+
201
+ transport = protocol.transport
202
+ assert transport is not None
203
+ self._transport_sslcontext = transport.get_extra_info("sslcontext")
204
+ self._transport_peername = transport.get_extra_info("peername")
205
+
206
+ if remote is not None:
207
+ self._cache["remote"] = remote
208
+
209
+ def clone(
210
+ self,
211
+ *,
212
+ method: Union[str, _SENTINEL] = sentinel,
213
+ rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
214
+ headers: Union[LooseHeaders, _SENTINEL] = sentinel,
215
+ scheme: Union[str, _SENTINEL] = sentinel,
216
+ host: Union[str, _SENTINEL] = sentinel,
217
+ remote: Union[str, _SENTINEL] = sentinel,
218
+ client_max_size: Union[int, _SENTINEL] = sentinel,
219
+ ) -> "BaseRequest":
220
+ """Clone itself with replacement some attributes.
221
+
222
+ Creates and returns a new instance of Request object. If no parameters
223
+ are given, an exact copy is returned. If a parameter is not passed, it
224
+ will reuse the one from the current request object.
225
+ """
226
+ if self._read_bytes:
227
+ raise RuntimeError("Cannot clone request after reading its content")
228
+
229
+ dct: Dict[str, Any] = {}
230
+ if method is not sentinel:
231
+ dct["method"] = method
232
+ if rel_url is not sentinel:
233
+ new_url: URL = URL(rel_url)
234
+ dct["url"] = new_url
235
+ dct["path"] = str(new_url)
236
+ if headers is not sentinel:
237
+ # a copy semantic
238
+ dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
239
+ dct["raw_headers"] = tuple(
240
+ (k.encode("utf-8"), v.encode("utf-8"))
241
+ for k, v in dct["headers"].items()
242
+ )
243
+
244
+ message = self._message._replace(**dct)
245
+
246
+ kwargs = {}
247
+ if scheme is not sentinel:
248
+ kwargs["scheme"] = scheme
249
+ if host is not sentinel:
250
+ kwargs["host"] = host
251
+ if remote is not sentinel:
252
+ kwargs["remote"] = remote
253
+ if client_max_size is sentinel:
254
+ client_max_size = self._client_max_size
255
+
256
+ return self.__class__(
257
+ message,
258
+ self._payload,
259
+ self._protocol,
260
+ self._payload_writer,
261
+ self._task,
262
+ self._loop,
263
+ client_max_size=client_max_size,
264
+ state=self._state.copy(),
265
+ **kwargs,
266
+ )
267
+
268
+ @property
269
+ def task(self) -> "asyncio.Task[None]":
270
+ return self._task
271
+
272
+ @property
273
+ def protocol(self) -> "RequestHandler":
274
+ return self._protocol
275
+
276
+ @property
277
+ def transport(self) -> Optional[asyncio.Transport]:
278
+ if self._protocol is None:
279
+ return None
280
+ return self._protocol.transport
281
+
282
+ @property
283
+ def writer(self) -> AbstractStreamWriter:
284
+ return self._payload_writer
285
+
286
+ @property
287
+ def client_max_size(self) -> int:
288
+ return self._client_max_size
289
+
290
+ @reify
291
+ def message(self) -> RawRequestMessage:
292
+ warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
293
+ return self._message
294
+
295
+ @reify
296
+ def rel_url(self) -> URL:
297
+ return self._rel_url
298
+
299
+ @reify
300
+ def loop(self) -> asyncio.AbstractEventLoop:
301
+ warnings.warn(
302
+ "request.loop property is deprecated", DeprecationWarning, stacklevel=2
303
+ )
304
+ return self._loop
305
+
306
+ # MutableMapping API
307
+
308
+ def __getitem__(self, key: str) -> Any:
309
+ return self._state[key]
310
+
311
+ def __setitem__(self, key: str, value: Any) -> None:
312
+ self._state[key] = value
313
+
314
+ def __delitem__(self, key: str) -> None:
315
+ del self._state[key]
316
+
317
+ def __len__(self) -> int:
318
+ return len(self._state)
319
+
320
+ def __iter__(self) -> Iterator[str]:
321
+ return iter(self._state)
322
+
323
+ ########
324
+
325
+ @reify
326
+ def secure(self) -> bool:
327
+ """A bool indicating if the request is handled with SSL."""
328
+ return self.scheme == "https"
329
+
330
+ @reify
331
+ def forwarded(self) -> Tuple[Mapping[str, str], ...]:
332
+ """A tuple containing all parsed Forwarded header(s).
333
+
334
+ Makes an effort to parse Forwarded headers as specified by RFC 7239:
335
+
336
+ - It adds one (immutable) dictionary per Forwarded 'field-value', ie
337
+ per proxy. The element corresponds to the data in the Forwarded
338
+ field-value added by the first proxy encountered by the client. Each
339
+ subsequent item corresponds to those added by later proxies.
340
+ - It checks that every value has valid syntax in general as specified
341
+ in section 4: either a 'token' or a 'quoted-string'.
342
+ - It un-escapes found escape sequences.
343
+ - It does NOT validate 'by' and 'for' contents as specified in section
344
+ 6.
345
+ - It does NOT validate 'host' contents (Host ABNF).
346
+ - It does NOT validate 'proto' contents for valid URI scheme names.
347
+
348
+ Returns a tuple containing one or more immutable dicts
349
+ """
350
+ elems = []
351
+ for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
352
+ length = len(field_value)
353
+ pos = 0
354
+ need_separator = False
355
+ elem: Dict[str, str] = {}
356
+ elems.append(types.MappingProxyType(elem))
357
+ while 0 <= pos < length:
358
+ match = _FORWARDED_PAIR_RE.match(field_value, pos)
359
+ if match is not None: # got a valid forwarded-pair
360
+ if need_separator:
361
+ # bad syntax here, skip to next comma
362
+ pos = field_value.find(",", pos)
363
+ else:
364
+ name, value, port = match.groups()
365
+ if value[0] == '"':
366
+ # quoted string: remove quotes and unescape
367
+ value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
368
+ if port:
369
+ value += port
370
+ elem[name.lower()] = value
371
+ pos += len(match.group(0))
372
+ need_separator = True
373
+ elif field_value[pos] == ",": # next forwarded-element
374
+ need_separator = False
375
+ elem = {}
376
+ elems.append(types.MappingProxyType(elem))
377
+ pos += 1
378
+ elif field_value[pos] == ";": # next forwarded-pair
379
+ need_separator = False
380
+ pos += 1
381
+ elif field_value[pos] in " \t":
382
+ # Allow whitespace even between forwarded-pairs, though
383
+ # RFC 7239 doesn't. This simplifies code and is in line
384
+ # with Postel's law.
385
+ pos += 1
386
+ else:
387
+ # bad syntax here, skip to next comma
388
+ pos = field_value.find(",", pos)
389
+ return tuple(elems)
390
+
391
+ @reify
392
+ def scheme(self) -> str:
393
+ """A string representing the scheme of the request.
394
+
395
+ Hostname is resolved in this order:
396
+
397
+ - overridden value by .clone(scheme=new_scheme) call.
398
+ - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.
399
+
400
+ 'http' or 'https'.
401
+ """
402
+ if self._transport_sslcontext:
403
+ return "https"
404
+ else:
405
+ return "http"
406
+
407
+ @reify
408
+ def method(self) -> str:
409
+ """Read only property for getting HTTP method.
410
+
411
+ The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
412
+ """
413
+ return self._method
414
+
415
+ @reify
416
+ def version(self) -> HttpVersion:
417
+ """Read only property for getting HTTP version of request.
418
+
419
+ Returns aiohttp.protocol.HttpVersion instance.
420
+ """
421
+ return self._version
422
+
423
+ @reify
424
+ def host(self) -> str:
425
+ """Hostname of the request.
426
+
427
+ Hostname is resolved in this order:
428
+
429
+ - overridden value by .clone(host=new_host) call.
430
+ - HOST HTTP header
431
+ - socket.getfqdn() value
432
+
433
+ For example, 'example.com' or 'localhost:8080'.
434
+
435
+ For historical reasons, the port number may be included.
436
+ """
437
+ host = self._message.headers.get(hdrs.HOST)
438
+ if host is not None:
439
+ return host
440
+ return socket.getfqdn()
441
+
442
+ @reify
443
+ def remote(self) -> Optional[str]:
444
+ """Remote IP of client initiated HTTP request.
445
+
446
+ The IP is resolved in this order:
447
+
448
+ - overridden value by .clone(remote=new_remote) call.
449
+ - peername of opened socket
450
+ """
451
+ if self._transport_peername is None:
452
+ return None
453
+ if isinstance(self._transport_peername, (list, tuple)):
454
+ return str(self._transport_peername[0])
455
+ return str(self._transport_peername)
456
+
457
+ @reify
458
+ def url(self) -> URL:
459
+ """The full URL of the request."""
460
+ # authority is used here because it may include the port number
461
+ # and we want yarl to parse it correctly
462
+ return URL.build(scheme=self.scheme, authority=self.host).join(self._rel_url)
463
+
464
+ @reify
465
+ def path(self) -> str:
466
+ """The URL including *PATH INFO* without the host or scheme.
467
+
468
+ E.g., ``/app/blog``
469
+ """
470
+ return self._rel_url.path
471
+
472
+ @reify
473
+ def path_qs(self) -> str:
474
+ """The URL including PATH_INFO and the query string.
475
+
476
+ E.g, /app/blog?id=10
477
+ """
478
+ return str(self._rel_url)
479
+
480
+ @reify
481
+ def raw_path(self) -> str:
482
+ """The URL including raw *PATH INFO* without the host or scheme.
483
+
484
+ Warning, the path is unquoted and may contains non valid URL characters
485
+
486
+ E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
487
+ """
488
+ return self._message.path
489
+
490
+ @reify
491
+ def query(self) -> "MultiMapping[str]":
492
+ """A multidict with all the variables in the query string."""
493
+ return self._rel_url.query
494
+
495
+ @reify
496
+ def query_string(self) -> str:
497
+ """The query string in the URL.
498
+
499
+ E.g., id=10
500
+ """
501
+ return self._rel_url.query_string
502
+
503
+ @reify
504
+ def headers(self) -> CIMultiDictProxy[str]:
505
+ """A case-insensitive multidict proxy with all headers."""
506
+ return self._headers
507
+
508
+ @reify
509
+ def raw_headers(self) -> RawHeaders:
510
+ """A sequence of pairs for all headers."""
511
+ return self._message.raw_headers
512
+
513
+ @reify
514
+ def if_modified_since(self) -> Optional[datetime.datetime]:
515
+ """The value of If-Modified-Since HTTP header, or None.
516
+
517
+ This header is represented as a `datetime` object.
518
+ """
519
+ return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
520
+
521
+ @reify
522
+ def if_unmodified_since(self) -> Optional[datetime.datetime]:
523
+ """The value of If-Unmodified-Since HTTP header, or None.
524
+
525
+ This header is represented as a `datetime` object.
526
+ """
527
+ return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
528
+
529
+ @staticmethod
530
+ def _etag_values(etag_header: str) -> Iterator[ETag]:
531
+ """Extract `ETag` objects from raw header."""
532
+ if etag_header == ETAG_ANY:
533
+ yield ETag(
534
+ is_weak=False,
535
+ value=ETAG_ANY,
536
+ )
537
+ else:
538
+ for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
539
+ is_weak, value, garbage = match.group(2, 3, 4)
540
+ # Any symbol captured by 4th group means
541
+ # that the following sequence is invalid.
542
+ if garbage:
543
+ break
544
+
545
+ yield ETag(
546
+ is_weak=bool(is_weak),
547
+ value=value,
548
+ )
549
+
550
+ @classmethod
551
+ def _if_match_or_none_impl(
552
+ cls, header_value: Optional[str]
553
+ ) -> Optional[Tuple[ETag, ...]]:
554
+ if not header_value:
555
+ return None
556
+
557
+ return tuple(cls._etag_values(header_value))
558
+
559
+ @reify
560
+ def if_match(self) -> Optional[Tuple[ETag, ...]]:
561
+ """The value of If-Match HTTP header, or None.
562
+
563
+ This header is represented as a `tuple` of `ETag` objects.
564
+ """
565
+ return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
566
+
567
+ @reify
568
+ def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
569
+ """The value of If-None-Match HTTP header, or None.
570
+
571
+ This header is represented as a `tuple` of `ETag` objects.
572
+ """
573
+ return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
574
+
575
+ @reify
576
+ def if_range(self) -> Optional[datetime.datetime]:
577
+ """The value of If-Range HTTP header, or None.
578
+
579
+ This header is represented as a `datetime` object.
580
+ """
581
+ return parse_http_date(self.headers.get(hdrs.IF_RANGE))
582
+
583
+ @reify
584
+ def keep_alive(self) -> bool:
585
+ """Is keepalive enabled by client?"""
586
+ return not self._message.should_close
587
+
588
+ @reify
589
+ def cookies(self) -> Mapping[str, str]:
590
+ """Return request cookies.
591
+
592
+ A read-only dictionary-like object.
593
+ """
594
+ raw = self.headers.get(hdrs.COOKIE, "")
595
+ parsed = SimpleCookie(raw)
596
+ return MappingProxyType({key: val.value for key, val in parsed.items()})
597
+
598
+ @reify
599
+ def http_range(self) -> slice:
600
+ """The content of Range HTTP header.
601
+
602
+ Return a slice instance.
603
+
604
+ """
605
+ rng = self._headers.get(hdrs.RANGE)
606
+ start, end = None, None
607
+ if rng is not None:
608
+ try:
609
+ pattern = r"^bytes=(\d*)-(\d*)$"
610
+ start, end = re.findall(pattern, rng)[0]
611
+ except IndexError: # pattern was not found in header
612
+ raise ValueError("range not in acceptable format")
613
+
614
+ end = int(end) if end else None
615
+ start = int(start) if start else None
616
+
617
+ if start is None and end is not None:
618
+ # end with no start is to return tail of content
619
+ start = -end
620
+ end = None
621
+
622
+ if start is not None and end is not None:
623
+ # end is inclusive in range header, exclusive for slice
624
+ end += 1
625
+
626
+ if start >= end:
627
+ raise ValueError("start cannot be after end")
628
+
629
+ if start is end is None: # No valid range supplied
630
+ raise ValueError("No start or end of range specified")
631
+
632
+ return slice(start, end, 1)
633
+
634
+ @reify
635
+ def content(self) -> StreamReader:
636
+ """Return raw payload stream."""
637
+ return self._payload
638
+
639
+ @property
640
+ def has_body(self) -> bool:
641
+ """Return True if request's HTTP BODY can be read, False otherwise."""
642
+ warnings.warn(
643
+ "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
644
+ )
645
+ return not self._payload.at_eof()
646
+
647
+ @property
648
+ def can_read_body(self) -> bool:
649
+ """Return True if request's HTTP BODY can be read, False otherwise."""
650
+ return not self._payload.at_eof()
651
+
652
+ @reify
653
+ def body_exists(self) -> bool:
654
+ """Return True if request has HTTP BODY, False otherwise."""
655
+ return type(self._payload) is not EmptyStreamReader
656
+
657
+ async def release(self) -> None:
658
+ """Release request.
659
+
660
+ Eat unread part of HTTP BODY if present.
661
+ """
662
+ while not self._payload.at_eof():
663
+ await self._payload.readany()
664
+
665
+ async def read(self) -> bytes:
666
+ """Read request body if present.
667
+
668
+ Returns bytes object with full request content.
669
+ """
670
+ if self._read_bytes is None:
671
+ body = bytearray()
672
+ while True:
673
+ chunk = await self._payload.readany()
674
+ body.extend(chunk)
675
+ if self._client_max_size:
676
+ body_size = len(body)
677
+ if body_size >= self._client_max_size:
678
+ raise HTTPRequestEntityTooLarge(
679
+ max_size=self._client_max_size, actual_size=body_size
680
+ )
681
+ if not chunk:
682
+ break
683
+ self._read_bytes = bytes(body)
684
+ return self._read_bytes
685
+
686
+ async def text(self) -> str:
687
+ """Return BODY as text using encoding from .charset."""
688
+ bytes_body = await self.read()
689
+ encoding = self.charset or "utf-8"
690
+ return bytes_body.decode(encoding)
691
+
692
+ async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
693
+ """Return BODY as JSON."""
694
+ body = await self.text()
695
+ return loads(body)
696
+
697
+ async def multipart(self) -> MultipartReader:
698
+ """Return async iterator to process BODY as multipart."""
699
+ return MultipartReader(self._headers, self._payload)
700
+
701
+ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
702
+ """Return POST parameters."""
703
+ if self._post is not None:
704
+ return self._post
705
+ if self._method not in self.POST_METHODS:
706
+ self._post = MultiDictProxy(MultiDict())
707
+ return self._post
708
+
709
+ content_type = self.content_type
710
+ if content_type not in (
711
+ "",
712
+ "application/x-www-form-urlencoded",
713
+ "multipart/form-data",
714
+ ):
715
+ self._post = MultiDictProxy(MultiDict())
716
+ return self._post
717
+
718
+ out: MultiDict[Union[str, bytes, FileField]] = MultiDict()
719
+
720
+ if content_type == "multipart/form-data":
721
+ multipart = await self.multipart()
722
+ max_size = self._client_max_size
723
+
724
+ field = await multipart.next()
725
+ while field is not None:
726
+ size = 0
727
+ field_ct = field.headers.get(hdrs.CONTENT_TYPE)
728
+
729
+ if isinstance(field, BodyPartReader):
730
+ assert field.name is not None
731
+
732
+ # Note that according to RFC 7578, the Content-Type header
733
+ # is optional, even for files, so we can't assume it's
734
+ # present.
735
+ # https://tools.ietf.org/html/rfc7578#section-4.4
736
+ if field.filename:
737
+ # store file in temp file
738
+ tmp = await self._loop.run_in_executor(
739
+ None, tempfile.TemporaryFile
740
+ )
741
+ chunk = await field.read_chunk(size=2**16)
742
+ while chunk:
743
+ chunk = field.decode(chunk)
744
+ await self._loop.run_in_executor(None, tmp.write, chunk)
745
+ size += len(chunk)
746
+ if 0 < max_size < size:
747
+ await self._loop.run_in_executor(None, tmp.close)
748
+ raise HTTPRequestEntityTooLarge(
749
+ max_size=max_size, actual_size=size
750
+ )
751
+ chunk = await field.read_chunk(size=2**16)
752
+ await self._loop.run_in_executor(None, tmp.seek, 0)
753
+
754
+ if field_ct is None:
755
+ field_ct = "application/octet-stream"
756
+
757
+ ff = FileField(
758
+ field.name,
759
+ field.filename,
760
+ cast(io.BufferedReader, tmp),
761
+ field_ct,
762
+ field.headers,
763
+ )
764
+ out.add(field.name, ff)
765
+ else:
766
+ # deal with ordinary data
767
+ value = await field.read(decode=True)
768
+ if field_ct is None or field_ct.startswith("text/"):
769
+ charset = field.get_charset(default="utf-8")
770
+ out.add(field.name, value.decode(charset))
771
+ else:
772
+ out.add(field.name, value)
773
+ size += len(value)
774
+ if 0 < max_size < size:
775
+ raise HTTPRequestEntityTooLarge(
776
+ max_size=max_size, actual_size=size
777
+ )
778
+ else:
779
+ raise ValueError(
780
+ "To decode nested multipart you need to use custom reader",
781
+ )
782
+
783
+ field = await multipart.next()
784
+ else:
785
+ data = await self.read()
786
+ if data:
787
+ charset = self.charset or "utf-8"
788
+ out.extend(
789
+ parse_qsl(
790
+ data.rstrip().decode(charset),
791
+ keep_blank_values=True,
792
+ encoding=charset,
793
+ )
794
+ )
795
+
796
+ self._post = MultiDictProxy(out)
797
+ return self._post
798
+
799
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
800
+ """Extra info from protocol transport"""
801
+ protocol = self._protocol
802
+ if protocol is None:
803
+ return default
804
+
805
+ transport = protocol.transport
806
+ if transport is None:
807
+ return default
808
+
809
+ return transport.get_extra_info(name, default)
810
+
811
+ def __repr__(self) -> str:
812
+ ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
813
+ "ascii"
814
+ )
815
+ return "<{} {} {} >".format(
816
+ self.__class__.__name__, self._method, ascii_encodable_path
817
+ )
818
+
819
+ def __eq__(self, other: object) -> bool:
820
+ return id(self) == id(other)
821
+
822
+ def __bool__(self) -> bool:
823
+ return True
824
+
825
+ async def _prepare_hook(self, response: StreamResponse) -> None:
826
+ return
827
+
828
+ def _cancel(self, exc: BaseException) -> None:
829
+ set_exception(self._payload, exc)
830
+
831
+ def _finish(self) -> None:
832
+ if self._post is None or self.content_type != "multipart/form-data":
833
+ return
834
+
835
+ # NOTE: Release file descriptors for the
836
+ # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom`
837
+ # NOTE: instances of files sent within multipart request body
838
+ # NOTE: via HTTP POST request.
839
+ for file_name, file_field_object in self._post.items():
840
+ if isinstance(file_field_object, FileField):
841
+ file_field_object.file.close()
842
+
843
+
844
+ class Request(BaseRequest):
845
+
846
+ ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
847
+
848
+ _match_info: Optional["UrlMappingMatchInfo"] = None
849
+
850
+ if DEBUG:
851
+
852
+ def __setattr__(self, name: str, val: Any) -> None:
853
+ if name not in self.ATTRS:
854
+ warnings.warn(
855
+ "Setting custom {}.{} attribute "
856
+ "is discouraged".format(self.__class__.__name__, name),
857
+ DeprecationWarning,
858
+ stacklevel=2,
859
+ )
860
+ super().__setattr__(name, val)
861
+
862
+ def clone(
863
+ self,
864
+ *,
865
+ method: Union[str, _SENTINEL] = sentinel,
866
+ rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
867
+ headers: Union[LooseHeaders, _SENTINEL] = sentinel,
868
+ scheme: Union[str, _SENTINEL] = sentinel,
869
+ host: Union[str, _SENTINEL] = sentinel,
870
+ remote: Union[str, _SENTINEL] = sentinel,
871
+ client_max_size: Union[int, _SENTINEL] = sentinel,
872
+ ) -> "Request":
873
+ ret = super().clone(
874
+ method=method,
875
+ rel_url=rel_url,
876
+ headers=headers,
877
+ scheme=scheme,
878
+ host=host,
879
+ remote=remote,
880
+ client_max_size=client_max_size,
881
+ )
882
+ new_ret = cast(Request, ret)
883
+ new_ret._match_info = self._match_info
884
+ return new_ret
885
+
886
+ @reify
887
+ def match_info(self) -> "UrlMappingMatchInfo":
888
+ """Result of route resolving."""
889
+ match_info = self._match_info
890
+ assert match_info is not None
891
+ return match_info
892
+
893
+ @property
894
+ def app(self) -> "Application":
895
+ """Application instance."""
896
+ match_info = self._match_info
897
+ assert match_info is not None
898
+ return match_info.current_app
899
+
900
+ @property
901
+ def config_dict(self) -> ChainMapProxy:
902
+ match_info = self._match_info
903
+ assert match_info is not None
904
+ lst = match_info.apps
905
+ app = self.app
906
+ idx = lst.index(app)
907
+ sublist = list(reversed(lst[: idx + 1]))
908
+ return ChainMapProxy(sublist)
909
+
910
+ async def _prepare_hook(self, response: StreamResponse) -> None:
911
+ match_info = self._match_info
912
+ if match_info is None:
913
+ return
914
+ for app in match_info._apps:
915
+ if on_response_prepare := app.on_response_prepare:
916
+ await on_response_prepare.send(self, response)
deepseek/lib/python3.10/site-packages/aiohttp/web_response.py ADDED
@@ -0,0 +1,840 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import collections.abc
3
+ import datetime
4
+ import enum
5
+ import json
6
+ import math
7
+ import time
8
+ import warnings
9
+ import zlib
10
+ from concurrent.futures import Executor
11
+ from http import HTTPStatus
12
+ from http.cookies import SimpleCookie
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Dict,
17
+ Iterator,
18
+ MutableMapping,
19
+ Optional,
20
+ Union,
21
+ cast,
22
+ )
23
+
24
+ from multidict import CIMultiDict, istr
25
+
26
+ from . import hdrs, payload
27
+ from .abc import AbstractStreamWriter
28
+ from .compression_utils import ZLibCompressor
29
+ from .helpers import (
30
+ ETAG_ANY,
31
+ QUOTED_ETAG_RE,
32
+ ETag,
33
+ HeadersMixin,
34
+ must_be_empty_body,
35
+ parse_http_date,
36
+ rfc822_formatted_time,
37
+ sentinel,
38
+ should_remove_content_length,
39
+ validate_etag_value,
40
+ )
41
+ from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11
42
+ from .payload import Payload
43
+ from .typedefs import JSONEncoder, LooseHeaders
44
+
45
+ REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
46
+ LARGE_BODY_SIZE = 1024**2
47
+
48
+ __all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
49
+
50
+
51
+ if TYPE_CHECKING:
52
+ from .web_request import BaseRequest
53
+
54
+ BaseClass = MutableMapping[str, Any]
55
+ else:
56
+ BaseClass = collections.abc.MutableMapping
57
+
58
+
59
+ # TODO(py311): Convert to StrEnum for wider use
60
+ class ContentCoding(enum.Enum):
61
+ # The content codings that we have support for.
62
+ #
63
+ # Additional registered codings are listed at:
64
+ # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
65
+ deflate = "deflate"
66
+ gzip = "gzip"
67
+ identity = "identity"
68
+
69
+
70
+ CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding}
71
+
72
+ ############################################################
73
+ # HTTP Response classes
74
+ ############################################################
75
+
76
+
77
+ class StreamResponse(BaseClass, HeadersMixin):
78
+
79
+ _body: Union[None, bytes, bytearray, Payload]
80
+ _length_check = True
81
+ _body = None
82
+ _keep_alive: Optional[bool] = None
83
+ _chunked: bool = False
84
+ _compression: bool = False
85
+ _compression_strategy: int = zlib.Z_DEFAULT_STRATEGY
86
+ _compression_force: Optional[ContentCoding] = None
87
+ _req: Optional["BaseRequest"] = None
88
+ _payload_writer: Optional[AbstractStreamWriter] = None
89
+ _eof_sent: bool = False
90
+ _must_be_empty_body: Optional[bool] = None
91
+ _body_length = 0
92
+ _cookies: Optional[SimpleCookie] = None
93
+
94
+ def __init__(
95
+ self,
96
+ *,
97
+ status: int = 200,
98
+ reason: Optional[str] = None,
99
+ headers: Optional[LooseHeaders] = None,
100
+ _real_headers: Optional[CIMultiDict[str]] = None,
101
+ ) -> None:
102
+ """Initialize a new stream response object.
103
+
104
+ _real_headers is an internal parameter used to pass a pre-populated
105
+ headers object. It is used by the `Response` class to avoid copying
106
+ the headers when creating a new response object. It is not intended
107
+ to be used by external code.
108
+ """
109
+ self._state: Dict[str, Any] = {}
110
+
111
+ if _real_headers is not None:
112
+ self._headers = _real_headers
113
+ elif headers is not None:
114
+ self._headers: CIMultiDict[str] = CIMultiDict(headers)
115
+ else:
116
+ self._headers = CIMultiDict()
117
+
118
+ self._set_status(status, reason)
119
+
120
+ @property
121
+ def prepared(self) -> bool:
122
+ return self._eof_sent or self._payload_writer is not None
123
+
124
+ @property
125
+ def task(self) -> "Optional[asyncio.Task[None]]":
126
+ if self._req:
127
+ return self._req.task
128
+ else:
129
+ return None
130
+
131
+ @property
132
+ def status(self) -> int:
133
+ return self._status
134
+
135
+ @property
136
+ def chunked(self) -> bool:
137
+ return self._chunked
138
+
139
+ @property
140
+ def compression(self) -> bool:
141
+ return self._compression
142
+
143
+ @property
144
+ def reason(self) -> str:
145
+ return self._reason
146
+
147
+ def set_status(
148
+ self,
149
+ status: int,
150
+ reason: Optional[str] = None,
151
+ ) -> None:
152
+ assert (
153
+ not self.prepared
154
+ ), "Cannot change the response status code after the headers have been sent"
155
+ self._set_status(status, reason)
156
+
157
+ def _set_status(self, status: int, reason: Optional[str]) -> None:
158
+ self._status = int(status)
159
+ if reason is None:
160
+ reason = REASON_PHRASES.get(self._status, "")
161
+ elif "\n" in reason:
162
+ raise ValueError("Reason cannot contain \\n")
163
+ self._reason = reason
164
+
165
+ @property
166
+ def keep_alive(self) -> Optional[bool]:
167
+ return self._keep_alive
168
+
169
+ def force_close(self) -> None:
170
+ self._keep_alive = False
171
+
172
+ @property
173
+ def body_length(self) -> int:
174
+ return self._body_length
175
+
176
+ @property
177
+ def output_length(self) -> int:
178
+ warnings.warn("output_length is deprecated", DeprecationWarning)
179
+ assert self._payload_writer
180
+ return self._payload_writer.buffer_size
181
+
182
+ def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
183
+ """Enables automatic chunked transfer encoding."""
184
+ if hdrs.CONTENT_LENGTH in self._headers:
185
+ raise RuntimeError(
186
+ "You can't enable chunked encoding when a content length is set"
187
+ )
188
+ if chunk_size is not None:
189
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
190
+ self._chunked = True
191
+
192
+ def enable_compression(
193
+ self,
194
+ force: Optional[Union[bool, ContentCoding]] = None,
195
+ strategy: int = zlib.Z_DEFAULT_STRATEGY,
196
+ ) -> None:
197
+ """Enables response compression encoding."""
198
+ # Backwards compatibility for when force was a bool <0.17.
199
+ if isinstance(force, bool):
200
+ force = ContentCoding.deflate if force else ContentCoding.identity
201
+ warnings.warn(
202
+ "Using boolean for force is deprecated #3318", DeprecationWarning
203
+ )
204
+ elif force is not None:
205
+ assert isinstance(
206
+ force, ContentCoding
207
+ ), "force should one of None, bool or ContentEncoding"
208
+
209
+ self._compression = True
210
+ self._compression_force = force
211
+ self._compression_strategy = strategy
212
+
213
+ @property
214
+ def headers(self) -> "CIMultiDict[str]":
215
+ return self._headers
216
+
217
+ @property
218
+ def cookies(self) -> SimpleCookie:
219
+ if self._cookies is None:
220
+ self._cookies = SimpleCookie()
221
+ return self._cookies
222
+
223
+ def set_cookie(
224
+ self,
225
+ name: str,
226
+ value: str,
227
+ *,
228
+ expires: Optional[str] = None,
229
+ domain: Optional[str] = None,
230
+ max_age: Optional[Union[int, str]] = None,
231
+ path: str = "/",
232
+ secure: Optional[bool] = None,
233
+ httponly: Optional[bool] = None,
234
+ version: Optional[str] = None,
235
+ samesite: Optional[str] = None,
236
+ ) -> None:
237
+ """Set or update response cookie.
238
+
239
+ Sets new cookie or updates existent with new value.
240
+ Also updates only those params which are not None.
241
+ """
242
+ if self._cookies is None:
243
+ self._cookies = SimpleCookie()
244
+
245
+ self._cookies[name] = value
246
+ c = self._cookies[name]
247
+
248
+ if expires is not None:
249
+ c["expires"] = expires
250
+ elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
251
+ del c["expires"]
252
+
253
+ if domain is not None:
254
+ c["domain"] = domain
255
+
256
+ if max_age is not None:
257
+ c["max-age"] = str(max_age)
258
+ elif "max-age" in c:
259
+ del c["max-age"]
260
+
261
+ c["path"] = path
262
+
263
+ if secure is not None:
264
+ c["secure"] = secure
265
+ if httponly is not None:
266
+ c["httponly"] = httponly
267
+ if version is not None:
268
+ c["version"] = version
269
+ if samesite is not None:
270
+ c["samesite"] = samesite
271
+
272
+ def del_cookie(
273
+ self,
274
+ name: str,
275
+ *,
276
+ domain: Optional[str] = None,
277
+ path: str = "/",
278
+ secure: Optional[bool] = None,
279
+ httponly: Optional[bool] = None,
280
+ samesite: Optional[str] = None,
281
+ ) -> None:
282
+ """Delete cookie.
283
+
284
+ Creates new empty expired cookie.
285
+ """
286
+ # TODO: do we need domain/path here?
287
+ if self._cookies is not None:
288
+ self._cookies.pop(name, None)
289
+ self.set_cookie(
290
+ name,
291
+ "",
292
+ max_age=0,
293
+ expires="Thu, 01 Jan 1970 00:00:00 GMT",
294
+ domain=domain,
295
+ path=path,
296
+ secure=secure,
297
+ httponly=httponly,
298
+ samesite=samesite,
299
+ )
300
+
301
+ @property
302
+ def content_length(self) -> Optional[int]:
303
+ # Just a placeholder for adding setter
304
+ return super().content_length
305
+
306
+ @content_length.setter
307
+ def content_length(self, value: Optional[int]) -> None:
308
+ if value is not None:
309
+ value = int(value)
310
+ if self._chunked:
311
+ raise RuntimeError(
312
+ "You can't set content length when chunked encoding is enable"
313
+ )
314
+ self._headers[hdrs.CONTENT_LENGTH] = str(value)
315
+ else:
316
+ self._headers.pop(hdrs.CONTENT_LENGTH, None)
317
+
318
+ @property
319
+ def content_type(self) -> str:
320
+ # Just a placeholder for adding setter
321
+ return super().content_type
322
+
323
+ @content_type.setter
324
+ def content_type(self, value: str) -> None:
325
+ self.content_type # read header values if needed
326
+ self._content_type = str(value)
327
+ self._generate_content_type_header()
328
+
329
+ @property
330
+ def charset(self) -> Optional[str]:
331
+ # Just a placeholder for adding setter
332
+ return super().charset
333
+
334
+ @charset.setter
335
+ def charset(self, value: Optional[str]) -> None:
336
+ ctype = self.content_type # read header values if needed
337
+ if ctype == "application/octet-stream":
338
+ raise RuntimeError(
339
+ "Setting charset for application/octet-stream "
340
+ "doesn't make sense, setup content_type first"
341
+ )
342
+ assert self._content_dict is not None
343
+ if value is None:
344
+ self._content_dict.pop("charset", None)
345
+ else:
346
+ self._content_dict["charset"] = str(value).lower()
347
+ self._generate_content_type_header()
348
+
349
+ @property
350
+ def last_modified(self) -> Optional[datetime.datetime]:
351
+ """The value of Last-Modified HTTP header, or None.
352
+
353
+ This header is represented as a `datetime` object.
354
+ """
355
+ return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
356
+
357
+ @last_modified.setter
358
+ def last_modified(
359
+ self, value: Optional[Union[int, float, datetime.datetime, str]]
360
+ ) -> None:
361
+ if value is None:
362
+ self._headers.pop(hdrs.LAST_MODIFIED, None)
363
+ elif isinstance(value, (int, float)):
364
+ self._headers[hdrs.LAST_MODIFIED] = time.strftime(
365
+ "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
366
+ )
367
+ elif isinstance(value, datetime.datetime):
368
+ self._headers[hdrs.LAST_MODIFIED] = time.strftime(
369
+ "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
370
+ )
371
+ elif isinstance(value, str):
372
+ self._headers[hdrs.LAST_MODIFIED] = value
373
+
374
+ @property
375
+ def etag(self) -> Optional[ETag]:
376
+ quoted_value = self._headers.get(hdrs.ETAG)
377
+ if not quoted_value:
378
+ return None
379
+ elif quoted_value == ETAG_ANY:
380
+ return ETag(value=ETAG_ANY)
381
+ match = QUOTED_ETAG_RE.fullmatch(quoted_value)
382
+ if not match:
383
+ return None
384
+ is_weak, value = match.group(1, 2)
385
+ return ETag(
386
+ is_weak=bool(is_weak),
387
+ value=value,
388
+ )
389
+
390
+ @etag.setter
391
+ def etag(self, value: Optional[Union[ETag, str]]) -> None:
392
+ if value is None:
393
+ self._headers.pop(hdrs.ETAG, None)
394
+ elif (isinstance(value, str) and value == ETAG_ANY) or (
395
+ isinstance(value, ETag) and value.value == ETAG_ANY
396
+ ):
397
+ self._headers[hdrs.ETAG] = ETAG_ANY
398
+ elif isinstance(value, str):
399
+ validate_etag_value(value)
400
+ self._headers[hdrs.ETAG] = f'"{value}"'
401
+ elif isinstance(value, ETag) and isinstance(value.value, str):
402
+ validate_etag_value(value.value)
403
+ hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
404
+ self._headers[hdrs.ETAG] = hdr_value
405
+ else:
406
+ raise ValueError(
407
+ f"Unsupported etag type: {type(value)}. "
408
+ f"etag must be str, ETag or None"
409
+ )
410
+
411
+ def _generate_content_type_header(
412
+ self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
413
+ ) -> None:
414
+ assert self._content_dict is not None
415
+ assert self._content_type is not None
416
+ params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
417
+ if params:
418
+ ctype = self._content_type + "; " + params
419
+ else:
420
+ ctype = self._content_type
421
+ self._headers[CONTENT_TYPE] = ctype
422
+
423
+ async def _do_start_compression(self, coding: ContentCoding) -> None:
424
+ if coding is ContentCoding.identity:
425
+ return
426
+ assert self._payload_writer is not None
427
+ self._headers[hdrs.CONTENT_ENCODING] = coding.value
428
+ self._payload_writer.enable_compression(
429
+ coding.value, self._compression_strategy
430
+ )
431
+ # Compressed payload may have different content length,
432
+ # remove the header
433
+ self._headers.popall(hdrs.CONTENT_LENGTH, None)
434
+
435
+ async def _start_compression(self, request: "BaseRequest") -> None:
436
+ if self._compression_force:
437
+ await self._do_start_compression(self._compression_force)
438
+ return
439
+ # Encoding comparisons should be case-insensitive
440
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
441
+ accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
442
+ for value, coding in CONTENT_CODINGS.items():
443
+ if value in accept_encoding:
444
+ await self._do_start_compression(coding)
445
+ return
446
+
447
+ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
448
+ if self._eof_sent:
449
+ return None
450
+ if self._payload_writer is not None:
451
+ return self._payload_writer
452
+ self._must_be_empty_body = must_be_empty_body(request.method, self.status)
453
+ return await self._start(request)
454
+
455
+ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
456
+ self._req = request
457
+ writer = self._payload_writer = request._payload_writer
458
+
459
+ await self._prepare_headers()
460
+ await request._prepare_hook(self)
461
+ await self._write_headers()
462
+
463
+ return writer
464
+
465
+ async def _prepare_headers(self) -> None:
466
+ request = self._req
467
+ assert request is not None
468
+ writer = self._payload_writer
469
+ assert writer is not None
470
+ keep_alive = self._keep_alive
471
+ if keep_alive is None:
472
+ keep_alive = request.keep_alive
473
+ self._keep_alive = keep_alive
474
+
475
+ version = request.version
476
+
477
+ headers = self._headers
478
+ if self._cookies:
479
+ for cookie in self._cookies.values():
480
+ value = cookie.output(header="")[1:]
481
+ headers.add(hdrs.SET_COOKIE, value)
482
+
483
+ if self._compression:
484
+ await self._start_compression(request)
485
+
486
+ if self._chunked:
487
+ if version != HttpVersion11:
488
+ raise RuntimeError(
489
+ "Using chunked encoding is forbidden "
490
+ "for HTTP/{0.major}.{0.minor}".format(request.version)
491
+ )
492
+ if not self._must_be_empty_body:
493
+ writer.enable_chunking()
494
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
495
+ elif self._length_check: # Disabled for WebSockets
496
+ writer.length = self.content_length
497
+ if writer.length is None:
498
+ if version >= HttpVersion11:
499
+ if not self._must_be_empty_body:
500
+ writer.enable_chunking()
501
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
502
+ elif not self._must_be_empty_body:
503
+ keep_alive = False
504
+
505
+ # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
506
+ # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
507
+ if self._must_be_empty_body:
508
+ if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(
509
+ request.method, self.status
510
+ ):
511
+ del headers[hdrs.CONTENT_LENGTH]
512
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10
513
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13
514
+ if hdrs.TRANSFER_ENCODING in headers:
515
+ del headers[hdrs.TRANSFER_ENCODING]
516
+ elif (writer.length if self._length_check else self.content_length) != 0:
517
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5
518
+ headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
519
+ headers.setdefault(hdrs.DATE, rfc822_formatted_time())
520
+ headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
521
+
522
+ # connection header
523
+ if hdrs.CONNECTION not in headers:
524
+ if keep_alive:
525
+ if version == HttpVersion10:
526
+ headers[hdrs.CONNECTION] = "keep-alive"
527
+ elif version == HttpVersion11:
528
+ headers[hdrs.CONNECTION] = "close"
529
+
530
+ async def _write_headers(self) -> None:
531
+ request = self._req
532
+ assert request is not None
533
+ writer = self._payload_writer
534
+ assert writer is not None
535
+ # status line
536
+ version = request.version
537
+ status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}"
538
+ await writer.write_headers(status_line, self._headers)
539
+
540
+ async def write(self, data: bytes) -> None:
541
+ assert isinstance(
542
+ data, (bytes, bytearray, memoryview)
543
+ ), "data argument must be byte-ish (%r)" % type(data)
544
+
545
+ if self._eof_sent:
546
+ raise RuntimeError("Cannot call write() after write_eof()")
547
+ if self._payload_writer is None:
548
+ raise RuntimeError("Cannot call write() before prepare()")
549
+
550
+ await self._payload_writer.write(data)
551
+
552
+ async def drain(self) -> None:
553
+ assert not self._eof_sent, "EOF has already been sent"
554
+ assert self._payload_writer is not None, "Response has not been started"
555
+ warnings.warn(
556
+ "drain method is deprecated, use await resp.write()",
557
+ DeprecationWarning,
558
+ stacklevel=2,
559
+ )
560
+ await self._payload_writer.drain()
561
+
562
+ async def write_eof(self, data: bytes = b"") -> None:
563
+ assert isinstance(
564
+ data, (bytes, bytearray, memoryview)
565
+ ), "data argument must be byte-ish (%r)" % type(data)
566
+
567
+ if self._eof_sent:
568
+ return
569
+
570
+ assert self._payload_writer is not None, "Response has not been started"
571
+
572
+ await self._payload_writer.write_eof(data)
573
+ self._eof_sent = True
574
+ self._req = None
575
+ self._body_length = self._payload_writer.output_size
576
+ self._payload_writer = None
577
+
578
+ def __repr__(self) -> str:
579
+ if self._eof_sent:
580
+ info = "eof"
581
+ elif self.prepared:
582
+ assert self._req is not None
583
+ info = f"{self._req.method} {self._req.path} "
584
+ else:
585
+ info = "not prepared"
586
+ return f"<{self.__class__.__name__} {self.reason} {info}>"
587
+
588
+ def __getitem__(self, key: str) -> Any:
589
+ return self._state[key]
590
+
591
+ def __setitem__(self, key: str, value: Any) -> None:
592
+ self._state[key] = value
593
+
594
+ def __delitem__(self, key: str) -> None:
595
+ del self._state[key]
596
+
597
+ def __len__(self) -> int:
598
+ return len(self._state)
599
+
600
+ def __iter__(self) -> Iterator[str]:
601
+ return iter(self._state)
602
+
603
+ def __hash__(self) -> int:
604
+ return hash(id(self))
605
+
606
+ def __eq__(self, other: object) -> bool:
607
+ return self is other
608
+
609
+
610
+ class Response(StreamResponse):
611
+
612
+ _compressed_body: Optional[bytes] = None
613
+
614
+ def __init__(
615
+ self,
616
+ *,
617
+ body: Any = None,
618
+ status: int = 200,
619
+ reason: Optional[str] = None,
620
+ text: Optional[str] = None,
621
+ headers: Optional[LooseHeaders] = None,
622
+ content_type: Optional[str] = None,
623
+ charset: Optional[str] = None,
624
+ zlib_executor_size: Optional[int] = None,
625
+ zlib_executor: Optional[Executor] = None,
626
+ ) -> None:
627
+ if body is not None and text is not None:
628
+ raise ValueError("body and text are not allowed together")
629
+
630
+ if headers is None:
631
+ real_headers: CIMultiDict[str] = CIMultiDict()
632
+ elif not isinstance(headers, CIMultiDict):
633
+ real_headers = CIMultiDict(headers)
634
+ else:
635
+ real_headers = headers # = cast('CIMultiDict[str]', headers)
636
+
637
+ if content_type is not None and "charset" in content_type:
638
+ raise ValueError("charset must not be in content_type argument")
639
+
640
+ if text is not None:
641
+ if hdrs.CONTENT_TYPE in real_headers:
642
+ if content_type or charset:
643
+ raise ValueError(
644
+ "passing both Content-Type header and "
645
+ "content_type or charset params "
646
+ "is forbidden"
647
+ )
648
+ else:
649
+ # fast path for filling headers
650
+ if not isinstance(text, str):
651
+ raise TypeError("text argument must be str (%r)" % type(text))
652
+ if content_type is None:
653
+ content_type = "text/plain"
654
+ if charset is None:
655
+ charset = "utf-8"
656
+ real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
657
+ body = text.encode(charset)
658
+ text = None
659
+ elif hdrs.CONTENT_TYPE in real_headers:
660
+ if content_type is not None or charset is not None:
661
+ raise ValueError(
662
+ "passing both Content-Type header and "
663
+ "content_type or charset params "
664
+ "is forbidden"
665
+ )
666
+ elif content_type is not None:
667
+ if charset is not None:
668
+ content_type += "; charset=" + charset
669
+ real_headers[hdrs.CONTENT_TYPE] = content_type
670
+
671
+ super().__init__(status=status, reason=reason, _real_headers=real_headers)
672
+
673
+ if text is not None:
674
+ self.text = text
675
+ else:
676
+ self.body = body
677
+
678
+ self._zlib_executor_size = zlib_executor_size
679
+ self._zlib_executor = zlib_executor
680
+
681
+ @property
682
+ def body(self) -> Optional[Union[bytes, Payload]]:
683
+ return self._body
684
+
685
+ @body.setter
686
+ def body(self, body: Any) -> None:
687
+ if body is None:
688
+ self._body = None
689
+ elif isinstance(body, (bytes, bytearray)):
690
+ self._body = body
691
+ else:
692
+ try:
693
+ self._body = body = payload.PAYLOAD_REGISTRY.get(body)
694
+ except payload.LookupError:
695
+ raise ValueError("Unsupported body type %r" % type(body))
696
+
697
+ headers = self._headers
698
+
699
+ # set content-type
700
+ if hdrs.CONTENT_TYPE not in headers:
701
+ headers[hdrs.CONTENT_TYPE] = body.content_type
702
+
703
+ # copy payload headers
704
+ if body.headers:
705
+ for key, value in body.headers.items():
706
+ if key not in headers:
707
+ headers[key] = value
708
+
709
+ self._compressed_body = None
710
+
711
+ @property
712
+ def text(self) -> Optional[str]:
713
+ if self._body is None:
714
+ return None
715
+ return self._body.decode(self.charset or "utf-8")
716
+
717
+ @text.setter
718
+ def text(self, text: str) -> None:
719
+ assert text is None or isinstance(
720
+ text, str
721
+ ), "text argument must be str (%r)" % type(text)
722
+
723
+ if self.content_type == "application/octet-stream":
724
+ self.content_type = "text/plain"
725
+ if self.charset is None:
726
+ self.charset = "utf-8"
727
+
728
+ self._body = text.encode(self.charset)
729
+ self._compressed_body = None
730
+
731
+ @property
732
+ def content_length(self) -> Optional[int]:
733
+ if self._chunked:
734
+ return None
735
+
736
+ if hdrs.CONTENT_LENGTH in self._headers:
737
+ return int(self._headers[hdrs.CONTENT_LENGTH])
738
+
739
+ if self._compressed_body is not None:
740
+ # Return length of the compressed body
741
+ return len(self._compressed_body)
742
+ elif isinstance(self._body, Payload):
743
+ # A payload without content length, or a compressed payload
744
+ return None
745
+ elif self._body is not None:
746
+ return len(self._body)
747
+ else:
748
+ return 0
749
+
750
+ @content_length.setter
751
+ def content_length(self, value: Optional[int]) -> None:
752
+ raise RuntimeError("Content length is set automatically")
753
+
754
+ async def write_eof(self, data: bytes = b"") -> None:
755
+ if self._eof_sent:
756
+ return
757
+ if self._compressed_body is None:
758
+ body: Optional[Union[bytes, Payload]] = self._body
759
+ else:
760
+ body = self._compressed_body
761
+ assert not data, f"data arg is not supported, got {data!r}"
762
+ assert self._req is not None
763
+ assert self._payload_writer is not None
764
+ if body is None or self._must_be_empty_body:
765
+ await super().write_eof()
766
+ elif isinstance(self._body, Payload):
767
+ await self._body.write(self._payload_writer)
768
+ await super().write_eof()
769
+ else:
770
+ await super().write_eof(cast(bytes, body))
771
+
772
+ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
773
+ if hdrs.CONTENT_LENGTH in self._headers:
774
+ if should_remove_content_length(request.method, self.status):
775
+ del self._headers[hdrs.CONTENT_LENGTH]
776
+ elif not self._chunked:
777
+ if isinstance(self._body, Payload):
778
+ if self._body.size is not None:
779
+ self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size)
780
+ else:
781
+ body_len = len(self._body) if self._body else "0"
782
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7
783
+ if body_len != "0" or (
784
+ self.status != 304 and request.method not in hdrs.METH_HEAD_ALL
785
+ ):
786
+ self._headers[hdrs.CONTENT_LENGTH] = str(body_len)
787
+
788
+ return await super()._start(request)
789
+
790
+ async def _do_start_compression(self, coding: ContentCoding) -> None:
791
+ if self._chunked or isinstance(self._body, Payload):
792
+ return await super()._do_start_compression(coding)
793
+ if coding is ContentCoding.identity:
794
+ return
795
+ # Instead of using _payload_writer.enable_compression,
796
+ # compress the whole body
797
+ compressor = ZLibCompressor(
798
+ encoding=coding.value,
799
+ max_sync_chunk_size=self._zlib_executor_size,
800
+ executor=self._zlib_executor,
801
+ )
802
+ assert self._body is not None
803
+ if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE:
804
+ warnings.warn(
805
+ "Synchronous compression of large response bodies "
806
+ f"({len(self._body)} bytes) might block the async event loop. "
807
+ "Consider providing a custom value to zlib_executor_size/"
808
+ "zlib_executor response properties or disabling compression on it."
809
+ )
810
+ self._compressed_body = (
811
+ await compressor.compress(self._body) + compressor.flush()
812
+ )
813
+ self._headers[hdrs.CONTENT_ENCODING] = coding.value
814
+ self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
815
+
816
+
817
+ def json_response(
818
+ data: Any = sentinel,
819
+ *,
820
+ text: Optional[str] = None,
821
+ body: Optional[bytes] = None,
822
+ status: int = 200,
823
+ reason: Optional[str] = None,
824
+ headers: Optional[LooseHeaders] = None,
825
+ content_type: str = "application/json",
826
+ dumps: JSONEncoder = json.dumps,
827
+ ) -> Response:
828
+ if data is not sentinel:
829
+ if text or body:
830
+ raise ValueError("only one of data, text, or body should be specified")
831
+ else:
832
+ text = dumps(data)
833
+ return Response(
834
+ text=text,
835
+ body=body,
836
+ status=status,
837
+ reason=reason,
838
+ headers=headers,
839
+ content_type=content_type,
840
+ )
deepseek/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py ADDED
@@ -0,0 +1,1301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ import asyncio
3
+ import base64
4
+ import functools
5
+ import hashlib
6
+ import html
7
+ import inspect
8
+ import keyword
9
+ import os
10
+ import re
11
+ import sys
12
+ import warnings
13
+ from functools import wraps
14
+ from pathlib import Path
15
+ from types import MappingProxyType
16
+ from typing import (
17
+ TYPE_CHECKING,
18
+ Any,
19
+ Awaitable,
20
+ Callable,
21
+ Container,
22
+ Dict,
23
+ Final,
24
+ Generator,
25
+ Iterable,
26
+ Iterator,
27
+ List,
28
+ Mapping,
29
+ NoReturn,
30
+ Optional,
31
+ Pattern,
32
+ Set,
33
+ Sized,
34
+ Tuple,
35
+ Type,
36
+ TypedDict,
37
+ Union,
38
+ cast,
39
+ )
40
+
41
+ from yarl import URL, __version__ as yarl_version
42
+
43
+ from . import hdrs
44
+ from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
45
+ from .helpers import DEBUG
46
+ from .http import HttpVersion11
47
+ from .typedefs import Handler, PathLike
48
+ from .web_exceptions import (
49
+ HTTPException,
50
+ HTTPExpectationFailed,
51
+ HTTPForbidden,
52
+ HTTPMethodNotAllowed,
53
+ HTTPNotFound,
54
+ )
55
+ from .web_fileresponse import FileResponse
56
+ from .web_request import Request
57
+ from .web_response import Response, StreamResponse
58
+ from .web_routedef import AbstractRouteDef
59
+
60
+ __all__ = (
61
+ "UrlDispatcher",
62
+ "UrlMappingMatchInfo",
63
+ "AbstractResource",
64
+ "Resource",
65
+ "PlainResource",
66
+ "DynamicResource",
67
+ "AbstractRoute",
68
+ "ResourceRoute",
69
+ "StaticResource",
70
+ "View",
71
+ )
72
+
73
+
74
+ if TYPE_CHECKING:
75
+ from .web_app import Application
76
+
77
+ BaseDict = Dict[str, str]
78
+ else:
79
+ BaseDict = dict
80
+
81
+ CIRCULAR_SYMLINK_ERROR = (
82
+ (OSError,)
83
+ if sys.version_info < (3, 10) and sys.platform.startswith("win32")
84
+ else (RuntimeError,) if sys.version_info < (3, 13) else ()
85
+ )
86
+
87
+ YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2]))
88
+
89
+ HTTP_METHOD_RE: Final[Pattern[str]] = re.compile(
90
+ r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"
91
+ )
92
+ ROUTE_RE: Final[Pattern[str]] = re.compile(
93
+ r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"
94
+ )
95
+ PATH_SEP: Final[str] = re.escape("/")
96
+
97
+
98
+ _ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]]
99
+ _Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]
100
+
101
+ html_escape = functools.partial(html.escape, quote=True)
102
+
103
+
104
+ class _InfoDict(TypedDict, total=False):
105
+ path: str
106
+
107
+ formatter: str
108
+ pattern: Pattern[str]
109
+
110
+ directory: Path
111
+ prefix: str
112
+ routes: Mapping[str, "AbstractRoute"]
113
+
114
+ app: "Application"
115
+
116
+ domain: str
117
+
118
+ rule: "AbstractRuleMatching"
119
+
120
+ http_exception: HTTPException
121
+
122
+
123
+ class AbstractResource(Sized, Iterable["AbstractRoute"]):
124
+ def __init__(self, *, name: Optional[str] = None) -> None:
125
+ self._name = name
126
+
127
+ @property
128
+ def name(self) -> Optional[str]:
129
+ return self._name
130
+
131
+ @property
132
+ @abc.abstractmethod
133
+ def canonical(self) -> str:
134
+ """Exposes the resource's canonical path.
135
+
136
+ For example '/foo/bar/{name}'
137
+
138
+ """
139
+
140
+ @abc.abstractmethod # pragma: no branch
141
+ def url_for(self, **kwargs: str) -> URL:
142
+ """Construct url for resource with additional params."""
143
+
144
+ @abc.abstractmethod # pragma: no branch
145
+ async def resolve(self, request: Request) -> _Resolve:
146
+ """Resolve resource.
147
+
148
+ Return (UrlMappingMatchInfo, allowed_methods) pair.
149
+ """
150
+
151
+ @abc.abstractmethod
152
+ def add_prefix(self, prefix: str) -> None:
153
+ """Add a prefix to processed URLs.
154
+
155
+ Required for subapplications support.
156
+ """
157
+
158
+ @abc.abstractmethod
159
+ def get_info(self) -> _InfoDict:
160
+ """Return a dict with additional info useful for introspection"""
161
+
162
+ def freeze(self) -> None:
163
+ pass
164
+
165
+ @abc.abstractmethod
166
+ def raw_match(self, path: str) -> bool:
167
+ """Perform a raw match against path"""
168
+
169
+
170
+ class AbstractRoute(abc.ABC):
171
+ def __init__(
172
+ self,
173
+ method: str,
174
+ handler: Union[Handler, Type[AbstractView]],
175
+ *,
176
+ expect_handler: Optional[_ExpectHandler] = None,
177
+ resource: Optional[AbstractResource] = None,
178
+ ) -> None:
179
+
180
+ if expect_handler is None:
181
+ expect_handler = _default_expect_handler
182
+
183
+ assert asyncio.iscoroutinefunction(
184
+ expect_handler
185
+ ), f"Coroutine is expected, got {expect_handler!r}"
186
+
187
+ method = method.upper()
188
+ if not HTTP_METHOD_RE.match(method):
189
+ raise ValueError(f"{method} is not allowed HTTP method")
190
+
191
+ assert callable(handler), handler
192
+ if asyncio.iscoroutinefunction(handler):
193
+ pass
194
+ elif inspect.isgeneratorfunction(handler):
195
+ warnings.warn(
196
+ "Bare generators are deprecated, use @coroutine wrapper",
197
+ DeprecationWarning,
198
+ )
199
+ elif isinstance(handler, type) and issubclass(handler, AbstractView):
200
+ pass
201
+ else:
202
+ warnings.warn(
203
+ "Bare functions are deprecated, use async ones", DeprecationWarning
204
+ )
205
+
206
+ @wraps(handler)
207
+ async def handler_wrapper(request: Request) -> StreamResponse:
208
+ result = old_handler(request) # type: ignore[call-arg]
209
+ if asyncio.iscoroutine(result):
210
+ result = await result
211
+ assert isinstance(result, StreamResponse)
212
+ return result
213
+
214
+ old_handler = handler
215
+ handler = handler_wrapper
216
+
217
+ self._method = method
218
+ self._handler = handler
219
+ self._expect_handler = expect_handler
220
+ self._resource = resource
221
+
222
+ @property
223
+ def method(self) -> str:
224
+ return self._method
225
+
226
+ @property
227
+ def handler(self) -> Handler:
228
+ return self._handler
229
+
230
+ @property
231
+ @abc.abstractmethod
232
+ def name(self) -> Optional[str]:
233
+ """Optional route's name, always equals to resource's name."""
234
+
235
+ @property
236
+ def resource(self) -> Optional[AbstractResource]:
237
+ return self._resource
238
+
239
+ @abc.abstractmethod
240
+ def get_info(self) -> _InfoDict:
241
+ """Return a dict with additional info useful for introspection"""
242
+
243
+ @abc.abstractmethod # pragma: no branch
244
+ def url_for(self, *args: str, **kwargs: str) -> URL:
245
+ """Construct url for route with additional params."""
246
+
247
+ async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]:
248
+ return await self._expect_handler(request)
249
+
250
+
251
+ class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
252
+
253
+ __slots__ = ("_route", "_apps", "_current_app", "_frozen")
254
+
255
+ def __init__(self, match_dict: Dict[str, str], route: AbstractRoute) -> None:
256
+ super().__init__(match_dict)
257
+ self._route = route
258
+ self._apps: List[Application] = []
259
+ self._current_app: Optional[Application] = None
260
+ self._frozen = False
261
+
262
+ @property
263
+ def handler(self) -> Handler:
264
+ return self._route.handler
265
+
266
+ @property
267
+ def route(self) -> AbstractRoute:
268
+ return self._route
269
+
270
+ @property
271
+ def expect_handler(self) -> _ExpectHandler:
272
+ return self._route.handle_expect_header
273
+
274
+ @property
275
+ def http_exception(self) -> Optional[HTTPException]:
276
+ return None
277
+
278
+ def get_info(self) -> _InfoDict: # type: ignore[override]
279
+ return self._route.get_info()
280
+
281
+ @property
282
+ def apps(self) -> Tuple["Application", ...]:
283
+ return tuple(self._apps)
284
+
285
+ def add_app(self, app: "Application") -> None:
286
+ if self._frozen:
287
+ raise RuntimeError("Cannot change apps stack after .freeze() call")
288
+ if self._current_app is None:
289
+ self._current_app = app
290
+ self._apps.insert(0, app)
291
+
292
+ @property
293
+ def current_app(self) -> "Application":
294
+ app = self._current_app
295
+ assert app is not None
296
+ return app
297
+
298
+ @current_app.setter
299
+ def current_app(self, app: "Application") -> None:
300
+ if DEBUG: # pragma: no cover
301
+ if app not in self._apps:
302
+ raise RuntimeError(
303
+ "Expected one of the following apps {!r}, got {!r}".format(
304
+ self._apps, app
305
+ )
306
+ )
307
+ self._current_app = app
308
+
309
+ def freeze(self) -> None:
310
+ self._frozen = True
311
+
312
+ def __repr__(self) -> str:
313
+ return f"<MatchInfo {super().__repr__()}: {self._route}>"
314
+
315
+
316
+ class MatchInfoError(UrlMappingMatchInfo):
317
+
318
+ __slots__ = ("_exception",)
319
+
320
+ def __init__(self, http_exception: HTTPException) -> None:
321
+ self._exception = http_exception
322
+ super().__init__({}, SystemRoute(self._exception))
323
+
324
+ @property
325
+ def http_exception(self) -> HTTPException:
326
+ return self._exception
327
+
328
+ def __repr__(self) -> str:
329
+ return "<MatchInfoError {}: {}>".format(
330
+ self._exception.status, self._exception.reason
331
+ )
332
+
333
+
334
+ async def _default_expect_handler(request: Request) -> None:
335
+ """Default handler for Expect header.
336
+
337
+ Just send "100 Continue" to client.
338
+ raise HTTPExpectationFailed if value of header is not "100-continue"
339
+ """
340
+ expect = request.headers.get(hdrs.EXPECT, "")
341
+ if request.version == HttpVersion11:
342
+ if expect.lower() == "100-continue":
343
+ await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
344
+ # Reset output_size as we haven't started the main body yet.
345
+ request.writer.output_size = 0
346
+ else:
347
+ raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
348
+
349
+
350
+ class Resource(AbstractResource):
351
+ def __init__(self, *, name: Optional[str] = None) -> None:
352
+ super().__init__(name=name)
353
+ self._routes: Dict[str, ResourceRoute] = {}
354
+ self._any_route: Optional[ResourceRoute] = None
355
+ self._allowed_methods: Set[str] = set()
356
+
357
+ def add_route(
358
+ self,
359
+ method: str,
360
+ handler: Union[Type[AbstractView], Handler],
361
+ *,
362
+ expect_handler: Optional[_ExpectHandler] = None,
363
+ ) -> "ResourceRoute":
364
+ if route := self._routes.get(method, self._any_route):
365
+ raise RuntimeError(
366
+ "Added route will never be executed, "
367
+ f"method {route.method} is already "
368
+ "registered"
369
+ )
370
+
371
+ route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler)
372
+ self.register_route(route_obj)
373
+ return route_obj
374
+
375
+ def register_route(self, route: "ResourceRoute") -> None:
376
+ assert isinstance(
377
+ route, ResourceRoute
378
+ ), f"Instance of Route class is required, got {route!r}"
379
+ if route.method == hdrs.METH_ANY:
380
+ self._any_route = route
381
+ self._allowed_methods.add(route.method)
382
+ self._routes[route.method] = route
383
+
384
+ async def resolve(self, request: Request) -> _Resolve:
385
+ if (match_dict := self._match(request.rel_url.path_safe)) is None:
386
+ return None, set()
387
+ if route := self._routes.get(request.method, self._any_route):
388
+ return UrlMappingMatchInfo(match_dict, route), self._allowed_methods
389
+ return None, self._allowed_methods
390
+
391
+ @abc.abstractmethod
392
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
393
+ pass # pragma: no cover
394
+
395
+ def __len__(self) -> int:
396
+ return len(self._routes)
397
+
398
+ def __iter__(self) -> Iterator["ResourceRoute"]:
399
+ return iter(self._routes.values())
400
+
401
+ # TODO: implement all abstract methods
402
+
403
+
404
+ class PlainResource(Resource):
405
+ def __init__(self, path: str, *, name: Optional[str] = None) -> None:
406
+ super().__init__(name=name)
407
+ assert not path or path.startswith("/")
408
+ self._path = path
409
+
410
+ @property
411
+ def canonical(self) -> str:
412
+ return self._path
413
+
414
+ def freeze(self) -> None:
415
+ if not self._path:
416
+ self._path = "/"
417
+
418
+ def add_prefix(self, prefix: str) -> None:
419
+ assert prefix.startswith("/")
420
+ assert not prefix.endswith("/")
421
+ assert len(prefix) > 1
422
+ self._path = prefix + self._path
423
+
424
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
425
+ # string comparison is about 10 times faster than regexp matching
426
+ if self._path == path:
427
+ return {}
428
+ return None
429
+
430
+ def raw_match(self, path: str) -> bool:
431
+ return self._path == path
432
+
433
+ def get_info(self) -> _InfoDict:
434
+ return {"path": self._path}
435
+
436
+ def url_for(self) -> URL: # type: ignore[override]
437
+ return URL.build(path=self._path, encoded=True)
438
+
439
+ def __repr__(self) -> str:
440
+ name = "'" + self.name + "' " if self.name is not None else ""
441
+ return f"<PlainResource {name} {self._path}>"
442
+
443
+
444
+ class DynamicResource(Resource):
445
+
446
+ DYN = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*)\}")
447
+ DYN_WITH_RE = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}")
448
+ GOOD = r"[^{}/]+"
449
+
450
+ def __init__(self, path: str, *, name: Optional[str] = None) -> None:
451
+ super().__init__(name=name)
452
+ self._orig_path = path
453
+ pattern = ""
454
+ formatter = ""
455
+ for part in ROUTE_RE.split(path):
456
+ match = self.DYN.fullmatch(part)
457
+ if match:
458
+ pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD)
459
+ formatter += "{" + match.group("var") + "}"
460
+ continue
461
+
462
+ match = self.DYN_WITH_RE.fullmatch(part)
463
+ if match:
464
+ pattern += "(?P<{var}>{re})".format(**match.groupdict())
465
+ formatter += "{" + match.group("var") + "}"
466
+ continue
467
+
468
+ if "{" in part or "}" in part:
469
+ raise ValueError(f"Invalid path '{path}'['{part}']")
470
+
471
+ part = _requote_path(part)
472
+ formatter += part
473
+ pattern += re.escape(part)
474
+
475
+ try:
476
+ compiled = re.compile(pattern)
477
+ except re.error as exc:
478
+ raise ValueError(f"Bad pattern '{pattern}': {exc}") from None
479
+ assert compiled.pattern.startswith(PATH_SEP)
480
+ assert formatter.startswith("/")
481
+ self._pattern = compiled
482
+ self._formatter = formatter
483
+
484
+ @property
485
+ def canonical(self) -> str:
486
+ return self._formatter
487
+
488
+ def add_prefix(self, prefix: str) -> None:
489
+ assert prefix.startswith("/")
490
+ assert not prefix.endswith("/")
491
+ assert len(prefix) > 1
492
+ self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern)
493
+ self._formatter = prefix + self._formatter
494
+
495
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
496
+ match = self._pattern.fullmatch(path)
497
+ if match is None:
498
+ return None
499
+ return {
500
+ key: _unquote_path_safe(value) for key, value in match.groupdict().items()
501
+ }
502
+
503
+ def raw_match(self, path: str) -> bool:
504
+ return self._orig_path == path
505
+
506
+ def get_info(self) -> _InfoDict:
507
+ return {"formatter": self._formatter, "pattern": self._pattern}
508
+
509
+ def url_for(self, **parts: str) -> URL:
510
+ url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()})
511
+ return URL.build(path=url, encoded=True)
512
+
513
+ def __repr__(self) -> str:
514
+ name = "'" + self.name + "' " if self.name is not None else ""
515
+ return "<DynamicResource {name} {formatter}>".format(
516
+ name=name, formatter=self._formatter
517
+ )
518
+
519
+
520
+ class PrefixResource(AbstractResource):
521
+ def __init__(self, prefix: str, *, name: Optional[str] = None) -> None:
522
+ assert not prefix or prefix.startswith("/"), prefix
523
+ assert prefix in ("", "/") or not prefix.endswith("/"), prefix
524
+ super().__init__(name=name)
525
+ self._prefix = _requote_path(prefix)
526
+ self._prefix2 = self._prefix + "/"
527
+
528
+ @property
529
+ def canonical(self) -> str:
530
+ return self._prefix
531
+
532
+ def add_prefix(self, prefix: str) -> None:
533
+ assert prefix.startswith("/")
534
+ assert not prefix.endswith("/")
535
+ assert len(prefix) > 1
536
+ self._prefix = prefix + self._prefix
537
+ self._prefix2 = self._prefix + "/"
538
+
539
+ def raw_match(self, prefix: str) -> bool:
540
+ return False
541
+
542
+ # TODO: impl missing abstract methods
543
+
544
+
545
+ class StaticResource(PrefixResource):
546
+ VERSION_KEY = "v"
547
+
548
+ def __init__(
549
+ self,
550
+ prefix: str,
551
+ directory: PathLike,
552
+ *,
553
+ name: Optional[str] = None,
554
+ expect_handler: Optional[_ExpectHandler] = None,
555
+ chunk_size: int = 256 * 1024,
556
+ show_index: bool = False,
557
+ follow_symlinks: bool = False,
558
+ append_version: bool = False,
559
+ ) -> None:
560
+ super().__init__(prefix, name=name)
561
+ try:
562
+ directory = Path(directory).expanduser().resolve(strict=True)
563
+ except FileNotFoundError as error:
564
+ raise ValueError(f"'{directory}' does not exist") from error
565
+ if not directory.is_dir():
566
+ raise ValueError(f"'{directory}' is not a directory")
567
+ self._directory = directory
568
+ self._show_index = show_index
569
+ self._chunk_size = chunk_size
570
+ self._follow_symlinks = follow_symlinks
571
+ self._expect_handler = expect_handler
572
+ self._append_version = append_version
573
+
574
+ self._routes = {
575
+ "GET": ResourceRoute(
576
+ "GET", self._handle, self, expect_handler=expect_handler
577
+ ),
578
+ "HEAD": ResourceRoute(
579
+ "HEAD", self._handle, self, expect_handler=expect_handler
580
+ ),
581
+ }
582
+ self._allowed_methods = set(self._routes)
583
+
584
+ def url_for( # type: ignore[override]
585
+ self,
586
+ *,
587
+ filename: PathLike,
588
+ append_version: Optional[bool] = None,
589
+ ) -> URL:
590
+ if append_version is None:
591
+ append_version = self._append_version
592
+ filename = str(filename).lstrip("/")
593
+
594
+ url = URL.build(path=self._prefix, encoded=True)
595
+ # filename is not encoded
596
+ if YARL_VERSION < (1, 6):
597
+ url = url / filename.replace("%", "%25")
598
+ else:
599
+ url = url / filename
600
+
601
+ if append_version:
602
+ unresolved_path = self._directory.joinpath(filename)
603
+ try:
604
+ if self._follow_symlinks:
605
+ normalized_path = Path(os.path.normpath(unresolved_path))
606
+ normalized_path.relative_to(self._directory)
607
+ filepath = normalized_path.resolve()
608
+ else:
609
+ filepath = unresolved_path.resolve()
610
+ filepath.relative_to(self._directory)
611
+ except (ValueError, FileNotFoundError):
612
+ # ValueError for case when path point to symlink
613
+ # with follow_symlinks is False
614
+ return url # relatively safe
615
+ if filepath.is_file():
616
+ # TODO cache file content
617
+ # with file watcher for cache invalidation
618
+ with filepath.open("rb") as f:
619
+ file_bytes = f.read()
620
+ h = self._get_file_hash(file_bytes)
621
+ url = url.with_query({self.VERSION_KEY: h})
622
+ return url
623
+ return url
624
+
625
+ @staticmethod
626
+ def _get_file_hash(byte_array: bytes) -> str:
627
+ m = hashlib.sha256() # todo sha256 can be configurable param
628
+ m.update(byte_array)
629
+ b64 = base64.urlsafe_b64encode(m.digest())
630
+ return b64.decode("ascii")
631
+
632
+ def get_info(self) -> _InfoDict:
633
+ return {
634
+ "directory": self._directory,
635
+ "prefix": self._prefix,
636
+ "routes": self._routes,
637
+ }
638
+
639
+ def set_options_route(self, handler: Handler) -> None:
640
+ if "OPTIONS" in self._routes:
641
+ raise RuntimeError("OPTIONS route was set already")
642
+ self._routes["OPTIONS"] = ResourceRoute(
643
+ "OPTIONS", handler, self, expect_handler=self._expect_handler
644
+ )
645
+ self._allowed_methods.add("OPTIONS")
646
+
647
+ async def resolve(self, request: Request) -> _Resolve:
648
+ path = request.rel_url.path_safe
649
+ method = request.method
650
+ if not path.startswith(self._prefix2) and path != self._prefix:
651
+ return None, set()
652
+
653
+ allowed_methods = self._allowed_methods
654
+ if method not in allowed_methods:
655
+ return None, allowed_methods
656
+
657
+ match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])}
658
+ return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods)
659
+
660
+ def __len__(self) -> int:
661
+ return len(self._routes)
662
+
663
+ def __iter__(self) -> Iterator[AbstractRoute]:
664
+ return iter(self._routes.values())
665
+
666
+ async def _handle(self, request: Request) -> StreamResponse:
667
+ rel_url = request.match_info["filename"]
668
+ filename = Path(rel_url)
669
+ if filename.anchor:
670
+ # rel_url is an absolute name like
671
+ # /static/\\machine_name\c$ or /static/D:\path
672
+ # where the static dir is totally different
673
+ raise HTTPForbidden()
674
+
675
+ unresolved_path = self._directory.joinpath(filename)
676
+ loop = asyncio.get_running_loop()
677
+ return await loop.run_in_executor(
678
+ None, self._resolve_path_to_response, unresolved_path
679
+ )
680
+
681
+ def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse:
682
+ """Take the unresolved path and query the file system to form a response."""
683
+ # Check for access outside the root directory. For follow symlinks, URI
684
+ # cannot traverse out, but symlinks can. Otherwise, no access outside
685
+ # root is permitted.
686
+ try:
687
+ if self._follow_symlinks:
688
+ normalized_path = Path(os.path.normpath(unresolved_path))
689
+ normalized_path.relative_to(self._directory)
690
+ file_path = normalized_path.resolve()
691
+ else:
692
+ file_path = unresolved_path.resolve()
693
+ file_path.relative_to(self._directory)
694
+ except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error:
695
+ # ValueError is raised for the relative check. Circular symlinks
696
+ # raise here on resolving for python < 3.13.
697
+ raise HTTPNotFound() from error
698
+
699
+ # if path is a directory, return the contents if permitted. Note the
700
+ # directory check will raise if a segment is not readable.
701
+ try:
702
+ if file_path.is_dir():
703
+ if self._show_index:
704
+ return Response(
705
+ text=self._directory_as_html(file_path),
706
+ content_type="text/html",
707
+ )
708
+ else:
709
+ raise HTTPForbidden()
710
+ except PermissionError as error:
711
+ raise HTTPForbidden() from error
712
+
713
+ # Return the file response, which handles all other checks.
714
+ return FileResponse(file_path, chunk_size=self._chunk_size)
715
+
716
+ def _directory_as_html(self, dir_path: Path) -> str:
717
+ """returns directory's index as html."""
718
+ assert dir_path.is_dir()
719
+
720
+ relative_path_to_dir = dir_path.relative_to(self._directory).as_posix()
721
+ index_of = f"Index of /{html_escape(relative_path_to_dir)}"
722
+ h1 = f"<h1>{index_of}</h1>"
723
+
724
+ index_list = []
725
+ dir_index = dir_path.iterdir()
726
+ for _file in sorted(dir_index):
727
+ # show file url as relative to static path
728
+ rel_path = _file.relative_to(self._directory).as_posix()
729
+ quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}")
730
+
731
+ # if file is a directory, add '/' to the end of the name
732
+ if _file.is_dir():
733
+ file_name = f"{_file.name}/"
734
+ else:
735
+ file_name = _file.name
736
+
737
+ index_list.append(
738
+ f'<li><a href="{quoted_file_url}">{html_escape(file_name)}</a></li>'
739
+ )
740
+ ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))
741
+ body = f"<body>\n{h1}\n{ul}\n</body>"
742
+
743
+ head_str = f"<head>\n<title>{index_of}</title>\n</head>"
744
+ html = f"<html>\n{head_str}\n{body}\n</html>"
745
+
746
+ return html
747
+
748
+ def __repr__(self) -> str:
749
+ name = "'" + self.name + "'" if self.name is not None else ""
750
+ return "<StaticResource {name} {path} -> {directory!r}>".format(
751
+ name=name, path=self._prefix, directory=self._directory
752
+ )
753
+
754
+
755
+ class PrefixedSubAppResource(PrefixResource):
756
+ def __init__(self, prefix: str, app: "Application") -> None:
757
+ super().__init__(prefix)
758
+ self._app = app
759
+ self._add_prefix_to_resources(prefix)
760
+
761
+ def add_prefix(self, prefix: str) -> None:
762
+ super().add_prefix(prefix)
763
+ self._add_prefix_to_resources(prefix)
764
+
765
+ def _add_prefix_to_resources(self, prefix: str) -> None:
766
+ router = self._app.router
767
+ for resource in router.resources():
768
+ # Since the canonical path of a resource is about
769
+ # to change, we need to unindex it and then reindex
770
+ router.unindex_resource(resource)
771
+ resource.add_prefix(prefix)
772
+ router.index_resource(resource)
773
+
774
+ def url_for(self, *args: str, **kwargs: str) -> URL:
775
+ raise RuntimeError(".url_for() is not supported by sub-application root")
776
+
777
+ def get_info(self) -> _InfoDict:
778
+ return {"app": self._app, "prefix": self._prefix}
779
+
780
+ async def resolve(self, request: Request) -> _Resolve:
781
+ match_info = await self._app.router.resolve(request)
782
+ match_info.add_app(self._app)
783
+ if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
784
+ methods = match_info.http_exception.allowed_methods
785
+ else:
786
+ methods = set()
787
+ return match_info, methods
788
+
789
+ def __len__(self) -> int:
790
+ return len(self._app.router.routes())
791
+
792
+ def __iter__(self) -> Iterator[AbstractRoute]:
793
+ return iter(self._app.router.routes())
794
+
795
+ def __repr__(self) -> str:
796
+ return "<PrefixedSubAppResource {prefix} -> {app!r}>".format(
797
+ prefix=self._prefix, app=self._app
798
+ )
799
+
800
+
801
+ class AbstractRuleMatching(abc.ABC):
802
+ @abc.abstractmethod # pragma: no branch
803
+ async def match(self, request: Request) -> bool:
804
+ """Return bool if the request satisfies the criteria"""
805
+
806
+ @abc.abstractmethod # pragma: no branch
807
+ def get_info(self) -> _InfoDict:
808
+ """Return a dict with additional info useful for introspection"""
809
+
810
+ @property
811
+ @abc.abstractmethod # pragma: no branch
812
+ def canonical(self) -> str:
813
+ """Return a str"""
814
+
815
+
816
+ class Domain(AbstractRuleMatching):
817
+ re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(?<!-)")
818
+
819
+ def __init__(self, domain: str) -> None:
820
+ super().__init__()
821
+ self._domain = self.validation(domain)
822
+
823
+ @property
824
+ def canonical(self) -> str:
825
+ return self._domain
826
+
827
+ def validation(self, domain: str) -> str:
828
+ if not isinstance(domain, str):
829
+ raise TypeError("Domain must be str")
830
+ domain = domain.rstrip(".").lower()
831
+ if not domain:
832
+ raise ValueError("Domain cannot be empty")
833
+ elif "://" in domain:
834
+ raise ValueError("Scheme not supported")
835
+ url = URL("http://" + domain)
836
+ assert url.raw_host is not None
837
+ if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")):
838
+ raise ValueError("Domain not valid")
839
+ if url.port == 80:
840
+ return url.raw_host
841
+ return f"{url.raw_host}:{url.port}"
842
+
843
+ async def match(self, request: Request) -> bool:
844
+ host = request.headers.get(hdrs.HOST)
845
+ if not host:
846
+ return False
847
+ return self.match_domain(host)
848
+
849
+ def match_domain(self, host: str) -> bool:
850
+ return host.lower() == self._domain
851
+
852
+ def get_info(self) -> _InfoDict:
853
+ return {"domain": self._domain}
854
+
855
+
856
+ class MaskDomain(Domain):
857
+ re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(?<!-)")
858
+
859
+ def __init__(self, domain: str) -> None:
860
+ super().__init__(domain)
861
+ mask = self._domain.replace(".", r"\.").replace("*", ".*")
862
+ self._mask = re.compile(mask)
863
+
864
+ @property
865
+ def canonical(self) -> str:
866
+ return self._mask.pattern
867
+
868
+ def match_domain(self, host: str) -> bool:
869
+ return self._mask.fullmatch(host) is not None
870
+
871
+
872
+ class MatchedSubAppResource(PrefixedSubAppResource):
873
+ def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None:
874
+ AbstractResource.__init__(self)
875
+ self._prefix = ""
876
+ self._app = app
877
+ self._rule = rule
878
+
879
+ @property
880
+ def canonical(self) -> str:
881
+ return self._rule.canonical
882
+
883
+ def get_info(self) -> _InfoDict:
884
+ return {"app": self._app, "rule": self._rule}
885
+
886
+ async def resolve(self, request: Request) -> _Resolve:
887
+ if not await self._rule.match(request):
888
+ return None, set()
889
+ match_info = await self._app.router.resolve(request)
890
+ match_info.add_app(self._app)
891
+ if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
892
+ methods = match_info.http_exception.allowed_methods
893
+ else:
894
+ methods = set()
895
+ return match_info, methods
896
+
897
+ def __repr__(self) -> str:
898
+ return f"<MatchedSubAppResource -> {self._app!r}>"
899
+
900
+
901
+ class ResourceRoute(AbstractRoute):
902
+ """A route with resource"""
903
+
904
+ def __init__(
905
+ self,
906
+ method: str,
907
+ handler: Union[Handler, Type[AbstractView]],
908
+ resource: AbstractResource,
909
+ *,
910
+ expect_handler: Optional[_ExpectHandler] = None,
911
+ ) -> None:
912
+ super().__init__(
913
+ method, handler, expect_handler=expect_handler, resource=resource
914
+ )
915
+
916
+ def __repr__(self) -> str:
917
+ return "<ResourceRoute [{method}] {resource} -> {handler!r}".format(
918
+ method=self.method, resource=self._resource, handler=self.handler
919
+ )
920
+
921
+ @property
922
+ def name(self) -> Optional[str]:
923
+ if self._resource is None:
924
+ return None
925
+ return self._resource.name
926
+
927
+ def url_for(self, *args: str, **kwargs: str) -> URL:
928
+ """Construct url for route with additional params."""
929
+ assert self._resource is not None
930
+ return self._resource.url_for(*args, **kwargs)
931
+
932
+ def get_info(self) -> _InfoDict:
933
+ assert self._resource is not None
934
+ return self._resource.get_info()
935
+
936
+
937
+ class SystemRoute(AbstractRoute):
938
+ def __init__(self, http_exception: HTTPException) -> None:
939
+ super().__init__(hdrs.METH_ANY, self._handle)
940
+ self._http_exception = http_exception
941
+
942
+ def url_for(self, *args: str, **kwargs: str) -> URL:
943
+ raise RuntimeError(".url_for() is not allowed for SystemRoute")
944
+
945
+ @property
946
+ def name(self) -> Optional[str]:
947
+ return None
948
+
949
+ def get_info(self) -> _InfoDict:
950
+ return {"http_exception": self._http_exception}
951
+
952
+ async def _handle(self, request: Request) -> StreamResponse:
953
+ raise self._http_exception
954
+
955
+ @property
956
+ def status(self) -> int:
957
+ return self._http_exception.status
958
+
959
+ @property
960
+ def reason(self) -> str:
961
+ return self._http_exception.reason
962
+
963
+ def __repr__(self) -> str:
964
+ return "<SystemRoute {self.status}: {self.reason}>".format(self=self)
965
+
966
+
967
+ class View(AbstractView):
968
+ async def _iter(self) -> StreamResponse:
969
+ if self.request.method not in hdrs.METH_ALL:
970
+ self._raise_allowed_methods()
971
+ method: Optional[Callable[[], Awaitable[StreamResponse]]]
972
+ method = getattr(self, self.request.method.lower(), None)
973
+ if method is None:
974
+ self._raise_allowed_methods()
975
+ ret = await method()
976
+ assert isinstance(ret, StreamResponse)
977
+ return ret
978
+
979
+ def __await__(self) -> Generator[Any, None, StreamResponse]:
980
+ return self._iter().__await__()
981
+
982
+ def _raise_allowed_methods(self) -> NoReturn:
983
+ allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
984
+ raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
985
+
986
+
987
+ class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]):
988
+ def __init__(self, resources: List[AbstractResource]) -> None:
989
+ self._resources = resources
990
+
991
+ def __len__(self) -> int:
992
+ return len(self._resources)
993
+
994
+ def __iter__(self) -> Iterator[AbstractResource]:
995
+ yield from self._resources
996
+
997
+ def __contains__(self, resource: object) -> bool:
998
+ return resource in self._resources
999
+
1000
+
1001
+ class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
1002
+ def __init__(self, resources: List[AbstractResource]):
1003
+ self._routes: List[AbstractRoute] = []
1004
+ for resource in resources:
1005
+ for route in resource:
1006
+ self._routes.append(route)
1007
+
1008
+ def __len__(self) -> int:
1009
+ return len(self._routes)
1010
+
1011
+ def __iter__(self) -> Iterator[AbstractRoute]:
1012
+ yield from self._routes
1013
+
1014
+ def __contains__(self, route: object) -> bool:
1015
+ return route in self._routes
1016
+
1017
+
1018
+ class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
1019
+
1020
+ NAME_SPLIT_RE = re.compile(r"[.:-]")
1021
+
1022
+ def __init__(self) -> None:
1023
+ super().__init__()
1024
+ self._resources: List[AbstractResource] = []
1025
+ self._named_resources: Dict[str, AbstractResource] = {}
1026
+ self._resource_index: dict[str, list[AbstractResource]] = {}
1027
+ self._matched_sub_app_resources: List[MatchedSubAppResource] = []
1028
+
1029
+ async def resolve(self, request: Request) -> UrlMappingMatchInfo:
1030
+ resource_index = self._resource_index
1031
+ allowed_methods: Set[str] = set()
1032
+
1033
+ # Walk the url parts looking for candidates. We walk the url backwards
1034
+ # to ensure the most explicit match is found first. If there are multiple
1035
+ # candidates for a given url part because there are multiple resources
1036
+ # registered for the same canonical path, we resolve them in a linear
1037
+ # fashion to ensure registration order is respected.
1038
+ url_part = request.rel_url.path_safe
1039
+ while url_part:
1040
+ for candidate in resource_index.get(url_part, ()):
1041
+ match_dict, allowed = await candidate.resolve(request)
1042
+ if match_dict is not None:
1043
+ return match_dict
1044
+ else:
1045
+ allowed_methods |= allowed
1046
+ if url_part == "/":
1047
+ break
1048
+ url_part = url_part.rpartition("/")[0] or "/"
1049
+
1050
+ #
1051
+ # We didn't find any candidates, so we'll try the matched sub-app
1052
+ # resources which we have to walk in a linear fashion because they
1053
+ # have regex/wildcard match rules and we cannot index them.
1054
+ #
1055
+ # For most cases we do not expect there to be many of these since
1056
+ # currently they are only added by `add_domain`
1057
+ #
1058
+ for resource in self._matched_sub_app_resources:
1059
+ match_dict, allowed = await resource.resolve(request)
1060
+ if match_dict is not None:
1061
+ return match_dict
1062
+ else:
1063
+ allowed_methods |= allowed
1064
+
1065
+ if allowed_methods:
1066
+ return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods))
1067
+
1068
+ return MatchInfoError(HTTPNotFound())
1069
+
1070
+ def __iter__(self) -> Iterator[str]:
1071
+ return iter(self._named_resources)
1072
+
1073
+ def __len__(self) -> int:
1074
+ return len(self._named_resources)
1075
+
1076
+ def __contains__(self, resource: object) -> bool:
1077
+ return resource in self._named_resources
1078
+
1079
+ def __getitem__(self, name: str) -> AbstractResource:
1080
+ return self._named_resources[name]
1081
+
1082
+ def resources(self) -> ResourcesView:
1083
+ return ResourcesView(self._resources)
1084
+
1085
+ def routes(self) -> RoutesView:
1086
+ return RoutesView(self._resources)
1087
+
1088
+ def named_resources(self) -> Mapping[str, AbstractResource]:
1089
+ return MappingProxyType(self._named_resources)
1090
+
1091
+ def register_resource(self, resource: AbstractResource) -> None:
1092
+ assert isinstance(
1093
+ resource, AbstractResource
1094
+ ), f"Instance of AbstractResource class is required, got {resource!r}"
1095
+ if self.frozen:
1096
+ raise RuntimeError("Cannot register a resource into frozen router.")
1097
+
1098
+ name = resource.name
1099
+
1100
+ if name is not None:
1101
+ parts = self.NAME_SPLIT_RE.split(name)
1102
+ for part in parts:
1103
+ if keyword.iskeyword(part):
1104
+ raise ValueError(
1105
+ f"Incorrect route name {name!r}, "
1106
+ "python keywords cannot be used "
1107
+ "for route name"
1108
+ )
1109
+ if not part.isidentifier():
1110
+ raise ValueError(
1111
+ "Incorrect route name {!r}, "
1112
+ "the name should be a sequence of "
1113
+ "python identifiers separated "
1114
+ "by dash, dot or column".format(name)
1115
+ )
1116
+ if name in self._named_resources:
1117
+ raise ValueError(
1118
+ "Duplicate {!r}, "
1119
+ "already handled by {!r}".format(name, self._named_resources[name])
1120
+ )
1121
+ self._named_resources[name] = resource
1122
+ self._resources.append(resource)
1123
+
1124
+ if isinstance(resource, MatchedSubAppResource):
1125
+ # We cannot index match sub-app resources because they have match rules
1126
+ self._matched_sub_app_resources.append(resource)
1127
+ else:
1128
+ self.index_resource(resource)
1129
+
1130
+ def _get_resource_index_key(self, resource: AbstractResource) -> str:
1131
+ """Return a key to index the resource in the resource index."""
1132
+ if "{" in (index_key := resource.canonical):
1133
+ # strip at the first { to allow for variables, and than
1134
+ # rpartition at / to allow for variable parts in the path
1135
+ # For example if the canonical path is `/core/locations{tail:.*}`
1136
+ # the index key will be `/core` since index is based on the
1137
+ # url parts split by `/`
1138
+ index_key = index_key.partition("{")[0].rpartition("/")[0]
1139
+ return index_key.rstrip("/") or "/"
1140
+
1141
+ def index_resource(self, resource: AbstractResource) -> None:
1142
+ """Add a resource to the resource index."""
1143
+ resource_key = self._get_resource_index_key(resource)
1144
+ # There may be multiple resources for a canonical path
1145
+ # so we keep them in a list to ensure that registration
1146
+ # order is respected.
1147
+ self._resource_index.setdefault(resource_key, []).append(resource)
1148
+
1149
+ def unindex_resource(self, resource: AbstractResource) -> None:
1150
+ """Remove a resource from the resource index."""
1151
+ resource_key = self._get_resource_index_key(resource)
1152
+ self._resource_index[resource_key].remove(resource)
1153
+
1154
+ def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource:
1155
+ if path and not path.startswith("/"):
1156
+ raise ValueError("path should be started with / or be empty")
1157
+ # Reuse last added resource if path and name are the same
1158
+ if self._resources:
1159
+ resource = self._resources[-1]
1160
+ if resource.name == name and resource.raw_match(path):
1161
+ return cast(Resource, resource)
1162
+ if not ("{" in path or "}" in path or ROUTE_RE.search(path)):
1163
+ resource = PlainResource(path, name=name)
1164
+ self.register_resource(resource)
1165
+ return resource
1166
+ resource = DynamicResource(path, name=name)
1167
+ self.register_resource(resource)
1168
+ return resource
1169
+
1170
+ def add_route(
1171
+ self,
1172
+ method: str,
1173
+ path: str,
1174
+ handler: Union[Handler, Type[AbstractView]],
1175
+ *,
1176
+ name: Optional[str] = None,
1177
+ expect_handler: Optional[_ExpectHandler] = None,
1178
+ ) -> AbstractRoute:
1179
+ resource = self.add_resource(path, name=name)
1180
+ return resource.add_route(method, handler, expect_handler=expect_handler)
1181
+
1182
+ def add_static(
1183
+ self,
1184
+ prefix: str,
1185
+ path: PathLike,
1186
+ *,
1187
+ name: Optional[str] = None,
1188
+ expect_handler: Optional[_ExpectHandler] = None,
1189
+ chunk_size: int = 256 * 1024,
1190
+ show_index: bool = False,
1191
+ follow_symlinks: bool = False,
1192
+ append_version: bool = False,
1193
+ ) -> AbstractResource:
1194
+ """Add static files view.
1195
+
1196
+ prefix - url prefix
1197
+ path - folder with files
1198
+
1199
+ """
1200
+ assert prefix.startswith("/")
1201
+ if prefix.endswith("/"):
1202
+ prefix = prefix[:-1]
1203
+ resource = StaticResource(
1204
+ prefix,
1205
+ path,
1206
+ name=name,
1207
+ expect_handler=expect_handler,
1208
+ chunk_size=chunk_size,
1209
+ show_index=show_index,
1210
+ follow_symlinks=follow_symlinks,
1211
+ append_version=append_version,
1212
+ )
1213
+ self.register_resource(resource)
1214
+ return resource
1215
+
1216
+ def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
1217
+ """Shortcut for add_route with method HEAD."""
1218
+ return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
1219
+
1220
+ def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
1221
+ """Shortcut for add_route with method OPTIONS."""
1222
+ return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
1223
+
1224
+ def add_get(
1225
+ self,
1226
+ path: str,
1227
+ handler: Handler,
1228
+ *,
1229
+ name: Optional[str] = None,
1230
+ allow_head: bool = True,
1231
+ **kwargs: Any,
1232
+ ) -> AbstractRoute:
1233
+ """Shortcut for add_route with method GET.
1234
+
1235
+ If allow_head is true, another
1236
+ route is added allowing head requests to the same endpoint.
1237
+ """
1238
+ resource = self.add_resource(path, name=name)
1239
+ if allow_head:
1240
+ resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
1241
+ return resource.add_route(hdrs.METH_GET, handler, **kwargs)
1242
+
1243
+ def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
1244
+ """Shortcut for add_route with method POST."""
1245
+ return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
1246
+
1247
+ def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
1248
+ """Shortcut for add_route with method PUT."""
1249
+ return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
1250
+
1251
+ def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
1252
+ """Shortcut for add_route with method PATCH."""
1253
+ return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
1254
+
1255
+ def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
1256
+ """Shortcut for add_route with method DELETE."""
1257
+ return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
1258
+
1259
+ def add_view(
1260
+ self, path: str, handler: Type[AbstractView], **kwargs: Any
1261
+ ) -> AbstractRoute:
1262
+ """Shortcut for add_route with ANY methods for a class-based view."""
1263
+ return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
1264
+
1265
+ def freeze(self) -> None:
1266
+ super().freeze()
1267
+ for resource in self._resources:
1268
+ resource.freeze()
1269
+
1270
+ def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
1271
+ """Append routes to route table.
1272
+
1273
+ Parameter should be a sequence of RouteDef objects.
1274
+
1275
+ Returns a list of registered AbstractRoute instances.
1276
+ """
1277
+ registered_routes = []
1278
+ for route_def in routes:
1279
+ registered_routes.extend(route_def.register(self))
1280
+ return registered_routes
1281
+
1282
+
1283
+ def _quote_path(value: str) -> str:
1284
+ if YARL_VERSION < (1, 6):
1285
+ value = value.replace("%", "%25")
1286
+ return URL.build(path=value, encoded=False).raw_path
1287
+
1288
+
1289
+ def _unquote_path_safe(value: str) -> str:
1290
+ if "%" not in value:
1291
+ return value
1292
+ return value.replace("%2F", "/").replace("%25", "%")
1293
+
1294
+
1295
+ def _requote_path(value: str) -> str:
1296
+ # Quote non-ascii characters and other characters which must be quoted,
1297
+ # but preserve existing %-sequences.
1298
+ result = _quote_path(value)
1299
+ if "%" in value:
1300
+ result = result.replace("%25", "%")
1301
+ return result
deepseek/lib/python3.10/site-packages/aiohttp/worker.py ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Async gunicorn worker for aiohttp.web"""
2
+
3
+ import asyncio
4
+ import os
5
+ import re
6
+ import signal
7
+ import sys
8
+ from types import FrameType
9
+ from typing import Any, Awaitable, Callable, Optional, Union # noqa
10
+
11
+ from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
12
+ from gunicorn.workers import base
13
+
14
+ from aiohttp import web
15
+
16
+ from .helpers import set_result
17
+ from .web_app import Application
18
+ from .web_log import AccessLogger
19
+
20
+ try:
21
+ import ssl
22
+
23
+ SSLContext = ssl.SSLContext
24
+ except ImportError: # pragma: no cover
25
+ ssl = None # type: ignore[assignment]
26
+ SSLContext = object # type: ignore[misc,assignment]
27
+
28
+
29
+ __all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker")
30
+
31
+
32
+ class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
33
+
34
+ DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
35
+ DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
36
+
37
+ def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
38
+ super().__init__(*args, **kw)
39
+
40
+ self._task: Optional[asyncio.Task[None]] = None
41
+ self.exit_code = 0
42
+ self._notify_waiter: Optional[asyncio.Future[bool]] = None
43
+
44
+ def init_process(self) -> None:
45
+ # create new event_loop after fork
46
+ asyncio.get_event_loop().close()
47
+
48
+ self.loop = asyncio.new_event_loop()
49
+ asyncio.set_event_loop(self.loop)
50
+
51
+ super().init_process()
52
+
53
+ def run(self) -> None:
54
+ self._task = self.loop.create_task(self._run())
55
+
56
+ try: # ignore all finalization problems
57
+ self.loop.run_until_complete(self._task)
58
+ except Exception:
59
+ self.log.exception("Exception in gunicorn worker")
60
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
61
+ self.loop.close()
62
+
63
+ sys.exit(self.exit_code)
64
+
65
+ async def _run(self) -> None:
66
+ runner = None
67
+ if isinstance(self.wsgi, Application):
68
+ app = self.wsgi
69
+ elif asyncio.iscoroutinefunction(self.wsgi):
70
+ wsgi = await self.wsgi()
71
+ if isinstance(wsgi, web.AppRunner):
72
+ runner = wsgi
73
+ app = runner.app
74
+ else:
75
+ app = wsgi
76
+ else:
77
+ raise RuntimeError(
78
+ "wsgi app should be either Application or "
79
+ "async function returning Application, got {}".format(self.wsgi)
80
+ )
81
+
82
+ if runner is None:
83
+ access_log = self.log.access_log if self.cfg.accesslog else None
84
+ runner = web.AppRunner(
85
+ app,
86
+ logger=self.log,
87
+ keepalive_timeout=self.cfg.keepalive,
88
+ access_log=access_log,
89
+ access_log_format=self._get_valid_log_format(
90
+ self.cfg.access_log_format
91
+ ),
92
+ shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
93
+ )
94
+ await runner.setup()
95
+
96
+ ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
97
+
98
+ runner = runner
99
+ assert runner is not None
100
+ server = runner.server
101
+ assert server is not None
102
+ for sock in self.sockets:
103
+ site = web.SockSite(
104
+ runner,
105
+ sock,
106
+ ssl_context=ctx,
107
+ )
108
+ await site.start()
109
+
110
+ # If our parent changed then we shut down.
111
+ pid = os.getpid()
112
+ try:
113
+ while self.alive: # type: ignore[has-type]
114
+ self.notify()
115
+
116
+ cnt = server.requests_count
117
+ if self.max_requests and cnt > self.max_requests:
118
+ self.alive = False
119
+ self.log.info("Max requests, shutting down: %s", self)
120
+
121
+ elif pid == os.getpid() and self.ppid != os.getppid():
122
+ self.alive = False
123
+ self.log.info("Parent changed, shutting down: %s", self)
124
+ else:
125
+ await self._wait_next_notify()
126
+ except BaseException:
127
+ pass
128
+
129
+ await runner.cleanup()
130
+
131
+ def _wait_next_notify(self) -> "asyncio.Future[bool]":
132
+ self._notify_waiter_done()
133
+
134
+ loop = self.loop
135
+ assert loop is not None
136
+ self._notify_waiter = waiter = loop.create_future()
137
+ self.loop.call_later(1.0, self._notify_waiter_done, waiter)
138
+
139
+ return waiter
140
+
141
+ def _notify_waiter_done(
142
+ self, waiter: Optional["asyncio.Future[bool]"] = None
143
+ ) -> None:
144
+ if waiter is None:
145
+ waiter = self._notify_waiter
146
+ if waiter is not None:
147
+ set_result(waiter, True)
148
+
149
+ if waiter is self._notify_waiter:
150
+ self._notify_waiter = None
151
+
152
+ def init_signals(self) -> None:
153
+ # Set up signals through the event loop API.
154
+
155
+ self.loop.add_signal_handler(
156
+ signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
157
+ )
158
+
159
+ self.loop.add_signal_handler(
160
+ signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
161
+ )
162
+
163
+ self.loop.add_signal_handler(
164
+ signal.SIGINT, self.handle_quit, signal.SIGINT, None
165
+ )
166
+
167
+ self.loop.add_signal_handler(
168
+ signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
169
+ )
170
+
171
+ self.loop.add_signal_handler(
172
+ signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
173
+ )
174
+
175
+ self.loop.add_signal_handler(
176
+ signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
177
+ )
178
+
179
+ # Don't let SIGTERM and SIGUSR1 disturb active requests
180
+ # by interrupting system calls
181
+ signal.siginterrupt(signal.SIGTERM, False)
182
+ signal.siginterrupt(signal.SIGUSR1, False)
183
+ # Reset signals so Gunicorn doesn't swallow subprocess return codes
184
+ # See: https://github.com/aio-libs/aiohttp/issues/6130
185
+
186
+ def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None:
187
+ self.alive = False
188
+
189
+ # worker_int callback
190
+ self.cfg.worker_int(self)
191
+
192
+ # wakeup closing process
193
+ self._notify_waiter_done()
194
+
195
+ def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None:
196
+ self.alive = False
197
+ self.exit_code = 1
198
+ self.cfg.worker_abort(self)
199
+ sys.exit(1)
200
+
201
+ @staticmethod
202
+ def _create_ssl_context(cfg: Any) -> "SSLContext":
203
+ """Creates SSLContext instance for usage in asyncio.create_server.
204
+
205
+ See ssl.SSLSocket.__init__ for more details.
206
+ """
207
+ if ssl is None: # pragma: no cover
208
+ raise RuntimeError("SSL is not supported.")
209
+
210
+ ctx = ssl.SSLContext(cfg.ssl_version)
211
+ ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
212
+ ctx.verify_mode = cfg.cert_reqs
213
+ if cfg.ca_certs:
214
+ ctx.load_verify_locations(cfg.ca_certs)
215
+ if cfg.ciphers:
216
+ ctx.set_ciphers(cfg.ciphers)
217
+ return ctx
218
+
219
+ def _get_valid_log_format(self, source_format: str) -> str:
220
+ if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
221
+ return self.DEFAULT_AIOHTTP_LOG_FORMAT
222
+ elif re.search(r"%\([^\)]+\)", source_format):
223
+ raise ValueError(
224
+ "Gunicorn's style options in form of `%(name)s` are not "
225
+ "supported for the log formatting. Please use aiohttp's "
226
+ "format specification to configure access log formatting: "
227
+ "http://docs.aiohttp.org/en/stable/logging.html"
228
+ "#format-specification"
229
+ )
230
+ else:
231
+ return source_format
232
+
233
+
234
+ class GunicornUVLoopWebWorker(GunicornWebWorker):
235
+ def init_process(self) -> None:
236
+ import uvloop
237
+
238
+ # Close any existing event loop before setting a
239
+ # new policy.
240
+ asyncio.get_event_loop().close()
241
+
242
+ # Setup uvloop policy, so that every
243
+ # asyncio.get_event_loop() will create an instance
244
+ # of uvloop event loop.
245
+ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
246
+
247
+ super().init_process()
deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_check.cpython-310.pyc ADDED
Binary file (1.47 kB). View file
 
deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc ADDED
Binary file (1.02 kB). View file