nnilayy commited on
Commit
b6cf7d0
·
verified ·
1 Parent(s): f6edc64

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. lib/python3.10/site-packages/google/protobuf/internal/__init__.py +7 -0
  2. lib/python3.10/site-packages/google/protobuf/internal/_parameterized.py +420 -0
  3. lib/python3.10/site-packages/google/protobuf/internal/builder.py +118 -0
  4. lib/python3.10/site-packages/google/protobuf/internal/containers.py +687 -0
  5. lib/python3.10/site-packages/google/protobuf/internal/decoder.py +1044 -0
  6. lib/python3.10/site-packages/google/protobuf/internal/enum_type_wrapper.py +101 -0
  7. lib/python3.10/site-packages/google/protobuf/internal/field_mask.py +310 -0
  8. lib/python3.10/site-packages/google/protobuf/internal/testing_refleaks.py +119 -0
  9. lib/python3.10/site-packages/google/protobuf/internal/type_checkers.py +408 -0
  10. lib/python3.10/site-packages/google/protobuf/internal/wire_format.py +245 -0
  11. lib/python3.10/site-packages/grpc/__init__.py +2348 -0
  12. lib/python3.10/site-packages/grpc/_auth.py +80 -0
  13. lib/python3.10/site-packages/grpc/_channel.py +2267 -0
  14. lib/python3.10/site-packages/grpc/_common.py +183 -0
  15. lib/python3.10/site-packages/grpc/_compression.py +71 -0
  16. lib/python3.10/site-packages/grpc/_cython/__init__.py +13 -0
  17. lib/python3.10/site-packages/grpc/_cython/_credentials/roots.pem +0 -0
  18. lib/python3.10/site-packages/grpc/_cython/_cygrpc/__init__.py +13 -0
  19. lib/python3.10/site-packages/grpc/_grpcio_metadata.py +1 -0
  20. lib/python3.10/site-packages/grpc/_interceptor.py +813 -0
  21. lib/python3.10/site-packages/grpc/_observability.py +299 -0
  22. lib/python3.10/site-packages/grpc/_plugin_wrapping.py +136 -0
  23. lib/python3.10/site-packages/grpc/_runtime_protos.py +165 -0
  24. lib/python3.10/site-packages/grpc/_server.py +1528 -0
  25. lib/python3.10/site-packages/grpc/_simple_stubs.py +588 -0
  26. lib/python3.10/site-packages/grpc/_typing.py +95 -0
  27. lib/python3.10/site-packages/grpc/_utilities.py +222 -0
  28. lib/python3.10/site-packages/grpc/aio/__init__.py +95 -0
  29. lib/python3.10/site-packages/grpc/aio/_base_call.py +257 -0
  30. lib/python3.10/site-packages/grpc/aio/_base_channel.py +364 -0
  31. lib/python3.10/site-packages/grpc/aio/_base_server.py +385 -0
  32. lib/python3.10/site-packages/grpc/aio/_channel.py +627 -0
  33. lib/python3.10/site-packages/grpc/aio/_interceptor.py +1178 -0
  34. lib/python3.10/site-packages/grpc/aio/_metadata.py +137 -0
  35. lib/python3.10/site-packages/grpc/aio/_server.py +239 -0
  36. lib/python3.10/site-packages/grpc/aio/_utils.py +22 -0
  37. lib/python3.10/site-packages/grpc/experimental/__init__.py +134 -0
  38. lib/python3.10/site-packages/grpc/experimental/aio/__init__.py +16 -0
  39. lib/python3.10/site-packages/grpc/experimental/gevent.py +27 -0
  40. lib/python3.10/site-packages/grpc/experimental/session_cache.py +45 -0
  41. lib/python3.10/site-packages/grpc/framework/__init__.py +13 -0
  42. lib/python3.10/site-packages/grpc/framework/common/__init__.py +13 -0
  43. lib/python3.10/site-packages/grpc/framework/common/cardinality.py +26 -0
  44. lib/python3.10/site-packages/grpc/framework/common/style.py +24 -0
  45. lib/python3.10/site-packages/grpc/framework/foundation/__init__.py +13 -0
  46. lib/python3.10/site-packages/grpc/framework/foundation/abandonment.py +22 -0
  47. lib/python3.10/site-packages/grpc/framework/foundation/callable_util.py +98 -0
  48. lib/python3.10/site-packages/grpc/framework/foundation/future.py +219 -0
  49. lib/python3.10/site-packages/grpc/framework/foundation/logging_pool.py +72 -0
  50. lib/python3.10/site-packages/grpc/framework/foundation/stream.py +43 -0
lib/python3.10/site-packages/google/protobuf/internal/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
lib/python3.10/site-packages/google/protobuf/internal/_parameterized.py ADDED
@@ -0,0 +1,420 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #! /usr/bin/env python
2
+ #
3
+ # Protocol Buffers - Google's data interchange format
4
+ # Copyright 2008 Google Inc. All rights reserved.
5
+ #
6
+ # Use of this source code is governed by a BSD-style
7
+ # license that can be found in the LICENSE file or at
8
+ # https://developers.google.com/open-source/licenses/bsd
9
+
10
+ """Adds support for parameterized tests to Python's unittest TestCase class.
11
+
12
+ A parameterized test is a method in a test case that is invoked with different
13
+ argument tuples.
14
+
15
+ A simple example:
16
+
17
+ class AdditionExample(_parameterized.TestCase):
18
+ @_parameterized.parameters(
19
+ (1, 2, 3),
20
+ (4, 5, 9),
21
+ (1, 1, 3))
22
+ def testAddition(self, op1, op2, result):
23
+ self.assertEqual(result, op1 + op2)
24
+
25
+
26
+ Each invocation is a separate test case and properly isolated just
27
+ like a normal test method, with its own setUp/tearDown cycle. In the
28
+ example above, there are three separate testcases, one of which will
29
+ fail due to an assertion error (1 + 1 != 3).
30
+
31
+ Parameters for individual test cases can be tuples (with positional parameters)
32
+ or dictionaries (with named parameters):
33
+
34
+ class AdditionExample(_parameterized.TestCase):
35
+ @_parameterized.parameters(
36
+ {'op1': 1, 'op2': 2, 'result': 3},
37
+ {'op1': 4, 'op2': 5, 'result': 9},
38
+ )
39
+ def testAddition(self, op1, op2, result):
40
+ self.assertEqual(result, op1 + op2)
41
+
42
+ If a parameterized test fails, the error message will show the
43
+ original test name (which is modified internally) and the arguments
44
+ for the specific invocation, which are part of the string returned by
45
+ the shortDescription() method on test cases.
46
+
47
+ The id method of the test, used internally by the unittest framework,
48
+ is also modified to show the arguments. To make sure that test names
49
+ stay the same across several invocations, object representations like
50
+
51
+ >>> class Foo(object):
52
+ ... pass
53
+ >>> repr(Foo())
54
+ '<__main__.Foo object at 0x23d8610>'
55
+
56
+ are turned into '<__main__.Foo>'. For even more descriptive names,
57
+ especially in test logs, you can use the named_parameters decorator. In
58
+ this case, only tuples are supported, and the first parameters has to
59
+ be a string (or an object that returns an apt name when converted via
60
+ str()):
61
+
62
+ class NamedExample(_parameterized.TestCase):
63
+ @_parameterized.named_parameters(
64
+ ('Normal', 'aa', 'aaa', True),
65
+ ('EmptyPrefix', '', 'abc', True),
66
+ ('BothEmpty', '', '', True))
67
+ def testStartsWith(self, prefix, string, result):
68
+ self.assertEqual(result, strings.startswith(prefix))
69
+
70
+ Named tests also have the benefit that they can be run individually
71
+ from the command line:
72
+
73
+ $ testmodule.py NamedExample.testStartsWithNormal
74
+ .
75
+ --------------------------------------------------------------------
76
+ Ran 1 test in 0.000s
77
+
78
+ OK
79
+
80
+ Parameterized Classes
81
+ =====================
82
+ If invocation arguments are shared across test methods in a single
83
+ TestCase class, instead of decorating all test methods
84
+ individually, the class itself can be decorated:
85
+
86
+ @_parameterized.parameters(
87
+ (1, 2, 3)
88
+ (4, 5, 9))
89
+ class ArithmeticTest(_parameterized.TestCase):
90
+ def testAdd(self, arg1, arg2, result):
91
+ self.assertEqual(arg1 + arg2, result)
92
+
93
+ def testSubtract(self, arg2, arg2, result):
94
+ self.assertEqual(result - arg1, arg2)
95
+
96
+ Inputs from Iterables
97
+ =====================
98
+ If parameters should be shared across several test cases, or are dynamically
99
+ created from other sources, a single non-tuple iterable can be passed into
100
+ the decorator. This iterable will be used to obtain the test cases:
101
+
102
+ class AdditionExample(_parameterized.TestCase):
103
+ @_parameterized.parameters(
104
+ c.op1, c.op2, c.result for c in testcases
105
+ )
106
+ def testAddition(self, op1, op2, result):
107
+ self.assertEqual(result, op1 + op2)
108
+
109
+
110
+ Single-Argument Test Methods
111
+ ============================
112
+ If a test method takes only one argument, the single argument does not need to
113
+ be wrapped into a tuple:
114
+
115
+ class NegativeNumberExample(_parameterized.TestCase):
116
+ @_parameterized.parameters(
117
+ -1, -3, -4, -5
118
+ )
119
+ def testIsNegative(self, arg):
120
+ self.assertTrue(IsNegative(arg))
121
+ """
122
+
123
+ __author__ = 'tmarek@google.com (Torsten Marek)'
124
+
125
+ import functools
126
+ import re
127
+ import types
128
+ import unittest
129
+ import uuid
130
+
131
+ try:
132
+ # Since python 3
133
+ import collections.abc as collections_abc
134
+ except ImportError:
135
+ # Won't work after python 3.8
136
+ import collections as collections_abc
137
+
138
+ ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
139
+ _SEPARATOR = uuid.uuid1().hex
140
+ _FIRST_ARG = object()
141
+ _ARGUMENT_REPR = object()
142
+
143
+
144
+ def _CleanRepr(obj):
145
+ return ADDR_RE.sub(r'<\1>', repr(obj))
146
+
147
+
148
+ # Helper function formerly from the unittest module, removed from it in
149
+ # Python 2.7.
150
+ def _StrClass(cls):
151
+ return '%s.%s' % (cls.__module__, cls.__name__)
152
+
153
+
154
+ def _NonStringIterable(obj):
155
+ return (isinstance(obj, collections_abc.Iterable) and
156
+ not isinstance(obj, str))
157
+
158
+
159
+ def _FormatParameterList(testcase_params):
160
+ if isinstance(testcase_params, collections_abc.Mapping):
161
+ return ', '.join('%s=%s' % (argname, _CleanRepr(value))
162
+ for argname, value in testcase_params.items())
163
+ elif _NonStringIterable(testcase_params):
164
+ return ', '.join(map(_CleanRepr, testcase_params))
165
+ else:
166
+ return _FormatParameterList((testcase_params,))
167
+
168
+
169
+ class _ParameterizedTestIter(object):
170
+ """Callable and iterable class for producing new test cases."""
171
+
172
+ def __init__(self, test_method, testcases, naming_type):
173
+ """Returns concrete test functions for a test and a list of parameters.
174
+
175
+ The naming_type is used to determine the name of the concrete
176
+ functions as reported by the unittest framework. If naming_type is
177
+ _FIRST_ARG, the testcases must be tuples, and the first element must
178
+ have a string representation that is a valid Python identifier.
179
+
180
+ Args:
181
+ test_method: The decorated test method.
182
+ testcases: (list of tuple/dict) A list of parameter
183
+ tuples/dicts for individual test invocations.
184
+ naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
185
+ """
186
+ self._test_method = test_method
187
+ self.testcases = testcases
188
+ self._naming_type = naming_type
189
+
190
+ def __call__(self, *args, **kwargs):
191
+ raise RuntimeError('You appear to be running a parameterized test case '
192
+ 'without having inherited from parameterized.'
193
+ 'TestCase. This is bad because none of '
194
+ 'your test cases are actually being run.')
195
+
196
+ def __iter__(self):
197
+ test_method = self._test_method
198
+ naming_type = self._naming_type
199
+
200
+ def MakeBoundParamTest(testcase_params):
201
+ @functools.wraps(test_method)
202
+ def BoundParamTest(self):
203
+ if isinstance(testcase_params, collections_abc.Mapping):
204
+ test_method(self, **testcase_params)
205
+ elif _NonStringIterable(testcase_params):
206
+ test_method(self, *testcase_params)
207
+ else:
208
+ test_method(self, testcase_params)
209
+
210
+ if naming_type is _FIRST_ARG:
211
+ # Signal the metaclass that the name of the test function is unique
212
+ # and descriptive.
213
+ BoundParamTest.__x_use_name__ = True
214
+ BoundParamTest.__name__ += str(testcase_params[0])
215
+ testcase_params = testcase_params[1:]
216
+ elif naming_type is _ARGUMENT_REPR:
217
+ # __x_extra_id__ is used to pass naming information to the __new__
218
+ # method of TestGeneratorMetaclass.
219
+ # The metaclass will make sure to create a unique, but nondescriptive
220
+ # name for this test.
221
+ BoundParamTest.__x_extra_id__ = '(%s)' % (
222
+ _FormatParameterList(testcase_params),)
223
+ else:
224
+ raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
225
+
226
+ BoundParamTest.__doc__ = '%s(%s)' % (
227
+ BoundParamTest.__name__, _FormatParameterList(testcase_params))
228
+ if test_method.__doc__:
229
+ BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
230
+ return BoundParamTest
231
+ return (MakeBoundParamTest(c) for c in self.testcases)
232
+
233
+
234
+ def _IsSingletonList(testcases):
235
+ """True iff testcases contains only a single non-tuple element."""
236
+ return len(testcases) == 1 and not isinstance(testcases[0], tuple)
237
+
238
+
239
+ def _ModifyClass(class_object, testcases, naming_type):
240
+ assert not getattr(class_object, '_id_suffix', None), (
241
+ 'Cannot add parameters to %s,'
242
+ ' which already has parameterized methods.' % (class_object,))
243
+ class_object._id_suffix = id_suffix = {}
244
+ # We change the size of __dict__ while we iterate over it,
245
+ # which Python 3.x will complain about, so use copy().
246
+ for name, obj in class_object.__dict__.copy().items():
247
+ if (name.startswith(unittest.TestLoader.testMethodPrefix)
248
+ and isinstance(obj, types.FunctionType)):
249
+ delattr(class_object, name)
250
+ methods = {}
251
+ _UpdateClassDictForParamTestCase(
252
+ methods, id_suffix, name,
253
+ _ParameterizedTestIter(obj, testcases, naming_type))
254
+ for name, meth in methods.items():
255
+ setattr(class_object, name, meth)
256
+
257
+
258
+ def _ParameterDecorator(naming_type, testcases):
259
+ """Implementation of the parameterization decorators.
260
+
261
+ Args:
262
+ naming_type: The naming type.
263
+ testcases: Testcase parameters.
264
+
265
+ Returns:
266
+ A function for modifying the decorated object.
267
+ """
268
+ def _Apply(obj):
269
+ if isinstance(obj, type):
270
+ _ModifyClass(
271
+ obj,
272
+ list(testcases) if not isinstance(testcases, collections_abc.Sequence)
273
+ else testcases,
274
+ naming_type)
275
+ return obj
276
+ else:
277
+ return _ParameterizedTestIter(obj, testcases, naming_type)
278
+
279
+ if _IsSingletonList(testcases):
280
+ assert _NonStringIterable(testcases[0]), (
281
+ 'Single parameter argument must be a non-string iterable')
282
+ testcases = testcases[0]
283
+
284
+ return _Apply
285
+
286
+
287
+ def parameters(*testcases): # pylint: disable=invalid-name
288
+ """A decorator for creating parameterized tests.
289
+
290
+ See the module docstring for a usage example.
291
+ Args:
292
+ *testcases: Parameters for the decorated method, either a single
293
+ iterable, or a list of tuples/dicts/objects (for tests
294
+ with only one argument).
295
+
296
+ Returns:
297
+ A test generator to be handled by TestGeneratorMetaclass.
298
+ """
299
+ return _ParameterDecorator(_ARGUMENT_REPR, testcases)
300
+
301
+
302
+ def named_parameters(*testcases): # pylint: disable=invalid-name
303
+ """A decorator for creating parameterized tests.
304
+
305
+ See the module docstring for a usage example. The first element of
306
+ each parameter tuple should be a string and will be appended to the
307
+ name of the test method.
308
+
309
+ Args:
310
+ *testcases: Parameters for the decorated method, either a single
311
+ iterable, or a list of tuples.
312
+
313
+ Returns:
314
+ A test generator to be handled by TestGeneratorMetaclass.
315
+ """
316
+ return _ParameterDecorator(_FIRST_ARG, testcases)
317
+
318
+
319
+ class TestGeneratorMetaclass(type):
320
+ """Metaclass for test cases with test generators.
321
+
322
+ A test generator is an iterable in a testcase that produces callables. These
323
+ callables must be single-argument methods. These methods are injected into
324
+ the class namespace and the original iterable is removed. If the name of the
325
+ iterable conforms to the test pattern, the injected methods will be picked
326
+ up as tests by the unittest framework.
327
+
328
+ In general, it is supposed to be used in conjunction with the
329
+ parameters decorator.
330
+ """
331
+
332
+ def __new__(mcs, class_name, bases, dct):
333
+ dct['_id_suffix'] = id_suffix = {}
334
+ for name, obj in dct.copy().items():
335
+ if (name.startswith(unittest.TestLoader.testMethodPrefix) and
336
+ _NonStringIterable(obj)):
337
+ iterator = iter(obj)
338
+ dct.pop(name)
339
+ _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
340
+
341
+ return type.__new__(mcs, class_name, bases, dct)
342
+
343
+
344
+ def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
345
+ """Adds individual test cases to a dictionary.
346
+
347
+ Args:
348
+ dct: The target dictionary.
349
+ id_suffix: The dictionary for mapping names to test IDs.
350
+ name: The original name of the test case.
351
+ iterator: The iterator generating the individual test cases.
352
+ """
353
+ for idx, func in enumerate(iterator):
354
+ assert callable(func), 'Test generators must yield callables, got %r' % (
355
+ func,)
356
+ if getattr(func, '__x_use_name__', False):
357
+ new_name = func.__name__
358
+ else:
359
+ new_name = '%s%s%d' % (name, _SEPARATOR, idx)
360
+ assert new_name not in dct, (
361
+ 'Name of parameterized test case "%s" not unique' % (new_name,))
362
+ dct[new_name] = func
363
+ id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
364
+
365
+
366
+ class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass):
367
+ """Base class for test cases using the parameters decorator."""
368
+
369
+ def _OriginalName(self):
370
+ return self._testMethodName.split(_SEPARATOR)[0]
371
+
372
+ def __str__(self):
373
+ return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
374
+
375
+ def id(self): # pylint: disable=invalid-name
376
+ """Returns the descriptive ID of the test.
377
+
378
+ This is used internally by the unittesting framework to get a name
379
+ for the test to be used in reports.
380
+
381
+ Returns:
382
+ The test id.
383
+ """
384
+ return '%s.%s%s' % (_StrClass(self.__class__),
385
+ self._OriginalName(),
386
+ self._id_suffix.get(self._testMethodName, ''))
387
+
388
+
389
+ def CoopTestCase(other_base_class):
390
+ """Returns a new base class with a cooperative metaclass base.
391
+
392
+ This enables the TestCase to be used in combination
393
+ with other base classes that have custom metaclasses, such as
394
+ mox.MoxTestBase.
395
+
396
+ Only works with metaclasses that do not override type.__new__.
397
+
398
+ Example:
399
+
400
+ import google3
401
+ import mox
402
+
403
+ from google.protobuf.internal import _parameterized
404
+
405
+ class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)):
406
+ ...
407
+
408
+ Args:
409
+ other_base_class: (class) A test case base class.
410
+
411
+ Returns:
412
+ A new class object.
413
+ """
414
+ metaclass = type(
415
+ 'CoopMetaclass',
416
+ (other_base_class.__metaclass__,
417
+ TestGeneratorMetaclass), {})
418
+ return metaclass(
419
+ 'CoopTestCase',
420
+ (other_base_class, TestCase), {})
lib/python3.10/site-packages/google/protobuf/internal/builder.py ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Builds descriptors, message classes and services for generated _pb2.py.
9
+
10
+ This file is only called in python generated _pb2.py files. It builds
11
+ descriptors, message classes and services that users can directly use
12
+ in generated code.
13
+ """
14
+
15
+ __author__ = 'jieluo@google.com (Jie Luo)'
16
+
17
+ from google.protobuf.internal import enum_type_wrapper
18
+ from google.protobuf.internal import python_message
19
+ from google.protobuf import message as _message
20
+ from google.protobuf import reflection as _reflection
21
+ from google.protobuf import symbol_database as _symbol_database
22
+
23
+ _sym_db = _symbol_database.Default()
24
+
25
+
26
+ def BuildMessageAndEnumDescriptors(file_des, module):
27
+ """Builds message and enum descriptors.
28
+
29
+ Args:
30
+ file_des: FileDescriptor of the .proto file
31
+ module: Generated _pb2 module
32
+ """
33
+
34
+ def BuildNestedDescriptors(msg_des, prefix):
35
+ for (name, nested_msg) in msg_des.nested_types_by_name.items():
36
+ module_name = prefix + name.upper()
37
+ module[module_name] = nested_msg
38
+ BuildNestedDescriptors(nested_msg, module_name + '_')
39
+ for enum_des in msg_des.enum_types:
40
+ module[prefix + enum_des.name.upper()] = enum_des
41
+
42
+ for (name, msg_des) in file_des.message_types_by_name.items():
43
+ module_name = '_' + name.upper()
44
+ module[module_name] = msg_des
45
+ BuildNestedDescriptors(msg_des, module_name + '_')
46
+
47
+
48
+ def BuildTopDescriptorsAndMessages(file_des, module_name, module):
49
+ """Builds top level descriptors and message classes.
50
+
51
+ Args:
52
+ file_des: FileDescriptor of the .proto file
53
+ module_name: str, the name of generated _pb2 module
54
+ module: Generated _pb2 module
55
+ """
56
+
57
+ def BuildMessage(msg_des):
58
+ create_dict = {}
59
+ for (name, nested_msg) in msg_des.nested_types_by_name.items():
60
+ create_dict[name] = BuildMessage(nested_msg)
61
+ create_dict['DESCRIPTOR'] = msg_des
62
+ create_dict['__module__'] = module_name
63
+ message_class = _reflection.GeneratedProtocolMessageType(
64
+ msg_des.name, (_message.Message,), create_dict)
65
+ _sym_db.RegisterMessage(message_class)
66
+ return message_class
67
+
68
+ # top level enums
69
+ for (name, enum_des) in file_des.enum_types_by_name.items():
70
+ module['_' + name.upper()] = enum_des
71
+ module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des)
72
+ for enum_value in enum_des.values:
73
+ module[enum_value.name] = enum_value.number
74
+
75
+ # top level extensions
76
+ for (name, extension_des) in file_des.extensions_by_name.items():
77
+ module[name.upper() + '_FIELD_NUMBER'] = extension_des.number
78
+ module[name] = extension_des
79
+
80
+ # services
81
+ for (name, service) in file_des.services_by_name.items():
82
+ module['_' + name.upper()] = service
83
+
84
+ # Build messages.
85
+ for (name, msg_des) in file_des.message_types_by_name.items():
86
+ module[name] = BuildMessage(msg_des)
87
+
88
+
89
+ def AddHelpersToExtensions(file_des):
90
+ """no-op to keep old generated code work with new runtime.
91
+
92
+ Args:
93
+ file_des: FileDescriptor of the .proto file
94
+ """
95
+ # TODO: Remove this on-op
96
+ return
97
+
98
+
99
+ def BuildServices(file_des, module_name, module):
100
+ """Builds services classes and services stub class.
101
+
102
+ Args:
103
+ file_des: FileDescriptor of the .proto file
104
+ module_name: str, the name of generated _pb2 module
105
+ module: Generated _pb2 module
106
+ """
107
+ # pylint: disable=g-import-not-at-top
108
+ from google.protobuf import service as _service
109
+ from google.protobuf import service_reflection
110
+ # pylint: enable=g-import-not-at-top
111
+ for (name, service) in file_des.services_by_name.items():
112
+ module[name] = service_reflection.GeneratedServiceType(
113
+ name, (_service.Service,),
114
+ dict(DESCRIPTOR=service, __module__=module_name))
115
+ stub_name = name + '_Stub'
116
+ module[stub_name] = service_reflection.GeneratedServiceStubType(
117
+ stub_name, (module[name],),
118
+ dict(DESCRIPTOR=service, __module__=module_name))
lib/python3.10/site-packages/google/protobuf/internal/containers.py ADDED
@@ -0,0 +1,687 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains container classes to represent different protocol buffer types.
9
+
10
+ This file defines container classes which represent categories of protocol
11
+ buffer field types which need extra maintenance. Currently these categories
12
+ are:
13
+
14
+ - Repeated scalar fields - These are all repeated fields which aren't
15
+ composite (e.g. they are of simple types like int32, string, etc).
16
+ - Repeated composite fields - Repeated fields which are composite. This
17
+ includes groups and nested messages.
18
+ """
19
+
20
+ import collections.abc
21
+ import copy
22
+ import pickle
23
+ from typing import (
24
+ Any,
25
+ Iterable,
26
+ Iterator,
27
+ List,
28
+ MutableMapping,
29
+ MutableSequence,
30
+ NoReturn,
31
+ Optional,
32
+ Sequence,
33
+ TypeVar,
34
+ Union,
35
+ overload,
36
+ )
37
+
38
+
39
+ _T = TypeVar('_T')
40
+ _K = TypeVar('_K')
41
+ _V = TypeVar('_V')
42
+
43
+
44
+ class BaseContainer(Sequence[_T]):
45
+ """Base container class."""
46
+
47
+ # Minimizes memory usage and disallows assignment to other attributes.
48
+ __slots__ = ['_message_listener', '_values']
49
+
50
+ def __init__(self, message_listener: Any) -> None:
51
+ """
52
+ Args:
53
+ message_listener: A MessageListener implementation.
54
+ The RepeatedScalarFieldContainer will call this object's
55
+ Modified() method when it is modified.
56
+ """
57
+ self._message_listener = message_listener
58
+ self._values = []
59
+
60
+ @overload
61
+ def __getitem__(self, key: int) -> _T:
62
+ ...
63
+
64
+ @overload
65
+ def __getitem__(self, key: slice) -> List[_T]:
66
+ ...
67
+
68
+ def __getitem__(self, key):
69
+ """Retrieves item by the specified key."""
70
+ return self._values[key]
71
+
72
+ def __len__(self) -> int:
73
+ """Returns the number of elements in the container."""
74
+ return len(self._values)
75
+
76
+ def __ne__(self, other: Any) -> bool:
77
+ """Checks if another instance isn't equal to this one."""
78
+ # The concrete classes should define __eq__.
79
+ return not self == other
80
+
81
+ __hash__ = None
82
+
83
+ def __repr__(self) -> str:
84
+ return repr(self._values)
85
+
86
+ def sort(self, *args, **kwargs) -> None:
87
+ # Continue to support the old sort_function keyword argument.
88
+ # This is expected to be a rare occurrence, so use LBYL to avoid
89
+ # the overhead of actually catching KeyError.
90
+ if 'sort_function' in kwargs:
91
+ kwargs['cmp'] = kwargs.pop('sort_function')
92
+ self._values.sort(*args, **kwargs)
93
+
94
+ def reverse(self) -> None:
95
+ self._values.reverse()
96
+
97
+
98
+ # TODO: Remove this. BaseContainer does *not* conform to
99
+ # MutableSequence, only its subclasses do.
100
+ collections.abc.MutableSequence.register(BaseContainer)
101
+
102
+
103
+ class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]):
104
+ """Simple, type-checked, list-like container for holding repeated scalars."""
105
+
106
+ # Disallows assignment to other attributes.
107
+ __slots__ = ['_type_checker']
108
+
109
+ def __init__(
110
+ self,
111
+ message_listener: Any,
112
+ type_checker: Any,
113
+ ) -> None:
114
+ """Args:
115
+
116
+ message_listener: A MessageListener implementation. The
117
+ RepeatedScalarFieldContainer will call this object's Modified() method
118
+ when it is modified.
119
+ type_checker: A type_checkers.ValueChecker instance to run on elements
120
+ inserted into this container.
121
+ """
122
+ super().__init__(message_listener)
123
+ self._type_checker = type_checker
124
+
125
+ def append(self, value: _T) -> None:
126
+ """Appends an item to the list. Similar to list.append()."""
127
+ self._values.append(self._type_checker.CheckValue(value))
128
+ if not self._message_listener.dirty:
129
+ self._message_listener.Modified()
130
+
131
+ def insert(self, key: int, value: _T) -> None:
132
+ """Inserts the item at the specified position. Similar to list.insert()."""
133
+ self._values.insert(key, self._type_checker.CheckValue(value))
134
+ if not self._message_listener.dirty:
135
+ self._message_listener.Modified()
136
+
137
+ def extend(self, elem_seq: Iterable[_T]) -> None:
138
+ """Extends by appending the given iterable. Similar to list.extend()."""
139
+ # TODO: Change OSS to raise error too
140
+ if elem_seq is None:
141
+ return
142
+ try:
143
+ elem_seq_iter = iter(elem_seq)
144
+ except TypeError:
145
+ if not elem_seq:
146
+ warnings.warn('Value is not iterable. Please remove the wrong '
147
+ 'usage. This will be changed to raise TypeError soon.')
148
+ return
149
+ raise
150
+ new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
151
+ if new_values:
152
+ self._values.extend(new_values)
153
+ self._message_listener.Modified()
154
+
155
+ def MergeFrom(
156
+ self,
157
+ other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]],
158
+ ) -> None:
159
+ """Appends the contents of another repeated field of the same type to this
160
+ one. We do not check the types of the individual fields.
161
+ """
162
+ self._values.extend(other)
163
+ self._message_listener.Modified()
164
+
165
+ def remove(self, elem: _T):
166
+ """Removes an item from the list. Similar to list.remove()."""
167
+ self._values.remove(elem)
168
+ self._message_listener.Modified()
169
+
170
+ def pop(self, key: Optional[int] = -1) -> _T:
171
+ """Removes and returns an item at a given index. Similar to list.pop()."""
172
+ value = self._values[key]
173
+ self.__delitem__(key)
174
+ return value
175
+
176
+ @overload
177
+ def __setitem__(self, key: int, value: _T) -> None:
178
+ ...
179
+
180
+ @overload
181
+ def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
182
+ ...
183
+
184
+ def __setitem__(self, key, value) -> None:
185
+ """Sets the item on the specified position."""
186
+ if isinstance(key, slice):
187
+ if key.step is not None:
188
+ raise ValueError('Extended slices not supported')
189
+ self._values[key] = map(self._type_checker.CheckValue, value)
190
+ self._message_listener.Modified()
191
+ else:
192
+ self._values[key] = self._type_checker.CheckValue(value)
193
+ self._message_listener.Modified()
194
+
195
+ def __delitem__(self, key: Union[int, slice]) -> None:
196
+ """Deletes the item at the specified position."""
197
+ del self._values[key]
198
+ self._message_listener.Modified()
199
+
200
+ def __eq__(self, other: Any) -> bool:
201
+ """Compares the current instance with another one."""
202
+ if self is other:
203
+ return True
204
+ # Special case for the same type which should be common and fast.
205
+ if isinstance(other, self.__class__):
206
+ return other._values == self._values
207
+ # We are presumably comparing against some other sequence type.
208
+ return other == self._values
209
+
210
+ def __deepcopy__(
211
+ self,
212
+ unused_memo: Any = None,
213
+ ) -> 'RepeatedScalarFieldContainer[_T]':
214
+ clone = RepeatedScalarFieldContainer(
215
+ copy.deepcopy(self._message_listener), self._type_checker)
216
+ clone.MergeFrom(self)
217
+ return clone
218
+
219
+ def __reduce__(self, **kwargs) -> NoReturn:
220
+ raise pickle.PickleError(
221
+ "Can't pickle repeated scalar fields, convert to list first")
222
+
223
+
224
+ # TODO: Constrain T to be a subtype of Message.
225
+ class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]):
226
+ """Simple, list-like container for holding repeated composite fields."""
227
+
228
+ # Disallows assignment to other attributes.
229
+ __slots__ = ['_message_descriptor']
230
+
231
+ def __init__(self, message_listener: Any, message_descriptor: Any) -> None:
232
+ """
233
+ Note that we pass in a descriptor instead of the generated directly,
234
+ since at the time we construct a _RepeatedCompositeFieldContainer we
235
+ haven't yet necessarily initialized the type that will be contained in the
236
+ container.
237
+
238
+ Args:
239
+ message_listener: A MessageListener implementation.
240
+ The RepeatedCompositeFieldContainer will call this object's
241
+ Modified() method when it is modified.
242
+ message_descriptor: A Descriptor instance describing the protocol type
243
+ that should be present in this container. We'll use the
244
+ _concrete_class field of this descriptor when the client calls add().
245
+ """
246
+ super().__init__(message_listener)
247
+ self._message_descriptor = message_descriptor
248
+
249
+ def add(self, **kwargs: Any) -> _T:
250
+ """Adds a new element at the end of the list and returns it. Keyword
251
+ arguments may be used to initialize the element.
252
+ """
253
+ new_element = self._message_descriptor._concrete_class(**kwargs)
254
+ new_element._SetListener(self._message_listener)
255
+ self._values.append(new_element)
256
+ if not self._message_listener.dirty:
257
+ self._message_listener.Modified()
258
+ return new_element
259
+
260
+ def append(self, value: _T) -> None:
261
+ """Appends one element by copying the message."""
262
+ new_element = self._message_descriptor._concrete_class()
263
+ new_element._SetListener(self._message_listener)
264
+ new_element.CopyFrom(value)
265
+ self._values.append(new_element)
266
+ if not self._message_listener.dirty:
267
+ self._message_listener.Modified()
268
+
269
+ def insert(self, key: int, value: _T) -> None:
270
+ """Inserts the item at the specified position by copying."""
271
+ new_element = self._message_descriptor._concrete_class()
272
+ new_element._SetListener(self._message_listener)
273
+ new_element.CopyFrom(value)
274
+ self._values.insert(key, new_element)
275
+ if not self._message_listener.dirty:
276
+ self._message_listener.Modified()
277
+
278
+ def extend(self, elem_seq: Iterable[_T]) -> None:
279
+ """Extends by appending the given sequence of elements of the same type
280
+
281
+ as this one, copying each individual message.
282
+ """
283
+ message_class = self._message_descriptor._concrete_class
284
+ listener = self._message_listener
285
+ values = self._values
286
+ for message in elem_seq:
287
+ new_element = message_class()
288
+ new_element._SetListener(listener)
289
+ new_element.MergeFrom(message)
290
+ values.append(new_element)
291
+ listener.Modified()
292
+
293
+ def MergeFrom(
294
+ self,
295
+ other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]],
296
+ ) -> None:
297
+ """Appends the contents of another repeated field of the same type to this
298
+ one, copying each individual message.
299
+ """
300
+ self.extend(other)
301
+
302
+ def remove(self, elem: _T) -> None:
303
+ """Removes an item from the list. Similar to list.remove()."""
304
+ self._values.remove(elem)
305
+ self._message_listener.Modified()
306
+
307
+ def pop(self, key: Optional[int] = -1) -> _T:
308
+ """Removes and returns an item at a given index. Similar to list.pop()."""
309
+ value = self._values[key]
310
+ self.__delitem__(key)
311
+ return value
312
+
313
+ @overload
314
+ def __setitem__(self, key: int, value: _T) -> None:
315
+ ...
316
+
317
+ @overload
318
+ def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
319
+ ...
320
+
321
+ def __setitem__(self, key, value):
322
+ # This method is implemented to make RepeatedCompositeFieldContainer
323
+ # structurally compatible with typing.MutableSequence. It is
324
+ # otherwise unsupported and will always raise an error.
325
+ raise TypeError(
326
+ f'{self.__class__.__name__} object does not support item assignment')
327
+
328
+ def __delitem__(self, key: Union[int, slice]) -> None:
329
+ """Deletes the item at the specified position."""
330
+ del self._values[key]
331
+ self._message_listener.Modified()
332
+
333
+ def __eq__(self, other: Any) -> bool:
334
+ """Compares the current instance with another one."""
335
+ if self is other:
336
+ return True
337
+ if not isinstance(other, self.__class__):
338
+ raise TypeError('Can only compare repeated composite fields against '
339
+ 'other repeated composite fields.')
340
+ return self._values == other._values
341
+
342
+
343
+ class ScalarMap(MutableMapping[_K, _V]):
344
+ """Simple, type-checked, dict-like container for holding repeated scalars."""
345
+
346
+ # Disallows assignment to other attributes.
347
+ __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
348
+ '_entry_descriptor']
349
+
350
+ def __init__(
351
+ self,
352
+ message_listener: Any,
353
+ key_checker: Any,
354
+ value_checker: Any,
355
+ entry_descriptor: Any,
356
+ ) -> None:
357
+ """
358
+ Args:
359
+ message_listener: A MessageListener implementation.
360
+ The ScalarMap will call this object's Modified() method when it
361
+ is modified.
362
+ key_checker: A type_checkers.ValueChecker instance to run on keys
363
+ inserted into this container.
364
+ value_checker: A type_checkers.ValueChecker instance to run on values
365
+ inserted into this container.
366
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
367
+ """
368
+ self._message_listener = message_listener
369
+ self._key_checker = key_checker
370
+ self._value_checker = value_checker
371
+ self._entry_descriptor = entry_descriptor
372
+ self._values = {}
373
+
374
+ def __getitem__(self, key: _K) -> _V:
375
+ try:
376
+ return self._values[key]
377
+ except KeyError:
378
+ key = self._key_checker.CheckValue(key)
379
+ val = self._value_checker.DefaultValue()
380
+ self._values[key] = val
381
+ return val
382
+
383
+ def __contains__(self, item: _K) -> bool:
384
+ # We check the key's type to match the strong-typing flavor of the API.
385
+ # Also this makes it easier to match the behavior of the C++ implementation.
386
+ self._key_checker.CheckValue(item)
387
+ return item in self._values
388
+
389
+ @overload
390
+ def get(self, key: _K) -> Optional[_V]:
391
+ ...
392
+
393
+ @overload
394
+ def get(self, key: _K, default: _T) -> Union[_V, _T]:
395
+ ...
396
+
397
+ # We need to override this explicitly, because our defaultdict-like behavior
398
+ # will make the default implementation (from our base class) always insert
399
+ # the key.
400
+ def get(self, key, default=None):
401
+ if key in self:
402
+ return self[key]
403
+ else:
404
+ return default
405
+
406
+ def __setitem__(self, key: _K, value: _V) -> _T:
407
+ checked_key = self._key_checker.CheckValue(key)
408
+ checked_value = self._value_checker.CheckValue(value)
409
+ self._values[checked_key] = checked_value
410
+ self._message_listener.Modified()
411
+
412
+ def __delitem__(self, key: _K) -> None:
413
+ del self._values[key]
414
+ self._message_listener.Modified()
415
+
416
+ def __len__(self) -> int:
417
+ return len(self._values)
418
+
419
+ def __iter__(self) -> Iterator[_K]:
420
+ return iter(self._values)
421
+
422
+ def __repr__(self) -> str:
423
+ return repr(self._values)
424
+
425
+ def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None:
426
+ self._values.update(other._values)
427
+ self._message_listener.Modified()
428
+
429
+ def InvalidateIterators(self) -> None:
430
+ # It appears that the only way to reliably invalidate iterators to
431
+ # self._values is to ensure that its size changes.
432
+ original = self._values
433
+ self._values = original.copy()
434
+ original[None] = None
435
+
436
+ # This is defined in the abstract base, but we can do it much more cheaply.
437
+ def clear(self) -> None:
438
+ self._values.clear()
439
+ self._message_listener.Modified()
440
+
441
+ def GetEntryClass(self) -> Any:
442
+ return self._entry_descriptor._concrete_class
443
+
444
+
445
+ class MessageMap(MutableMapping[_K, _V]):
446
+ """Simple, type-checked, dict-like container for with submessage values."""
447
+
448
+ # Disallows assignment to other attributes.
449
+ __slots__ = ['_key_checker', '_values', '_message_listener',
450
+ '_message_descriptor', '_entry_descriptor']
451
+
452
+ def __init__(
453
+ self,
454
+ message_listener: Any,
455
+ message_descriptor: Any,
456
+ key_checker: Any,
457
+ entry_descriptor: Any,
458
+ ) -> None:
459
+ """
460
+ Args:
461
+ message_listener: A MessageListener implementation.
462
+ The ScalarMap will call this object's Modified() method when it
463
+ is modified.
464
+ key_checker: A type_checkers.ValueChecker instance to run on keys
465
+ inserted into this container.
466
+ value_checker: A type_checkers.ValueChecker instance to run on values
467
+ inserted into this container.
468
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
469
+ """
470
+ self._message_listener = message_listener
471
+ self._message_descriptor = message_descriptor
472
+ self._key_checker = key_checker
473
+ self._entry_descriptor = entry_descriptor
474
+ self._values = {}
475
+
476
+ def __getitem__(self, key: _K) -> _V:
477
+ key = self._key_checker.CheckValue(key)
478
+ try:
479
+ return self._values[key]
480
+ except KeyError:
481
+ new_element = self._message_descriptor._concrete_class()
482
+ new_element._SetListener(self._message_listener)
483
+ self._values[key] = new_element
484
+ self._message_listener.Modified()
485
+ return new_element
486
+
487
+ def get_or_create(self, key: _K) -> _V:
488
+ """get_or_create() is an alias for getitem (ie. map[key]).
489
+
490
+ Args:
491
+ key: The key to get or create in the map.
492
+
493
+ This is useful in cases where you want to be explicit that the call is
494
+ mutating the map. This can avoid lint errors for statements like this
495
+ that otherwise would appear to be pointless statements:
496
+
497
+ msg.my_map[key]
498
+ """
499
+ return self[key]
500
+
501
+ @overload
502
+ def get(self, key: _K) -> Optional[_V]:
503
+ ...
504
+
505
+ @overload
506
+ def get(self, key: _K, default: _T) -> Union[_V, _T]:
507
+ ...
508
+
509
+ # We need to override this explicitly, because our defaultdict-like behavior
510
+ # will make the default implementation (from our base class) always insert
511
+ # the key.
512
+ def get(self, key, default=None):
513
+ if key in self:
514
+ return self[key]
515
+ else:
516
+ return default
517
+
518
+ def __contains__(self, item: _K) -> bool:
519
+ item = self._key_checker.CheckValue(item)
520
+ return item in self._values
521
+
522
+ def __setitem__(self, key: _K, value: _V) -> NoReturn:
523
+ raise ValueError('May not set values directly, call my_map[key].foo = 5')
524
+
525
+ def __delitem__(self, key: _K) -> None:
526
+ key = self._key_checker.CheckValue(key)
527
+ del self._values[key]
528
+ self._message_listener.Modified()
529
+
530
+ def __len__(self) -> int:
531
+ return len(self._values)
532
+
533
+ def __iter__(self) -> Iterator[_K]:
534
+ return iter(self._values)
535
+
536
+ def __repr__(self) -> str:
537
+ return repr(self._values)
538
+
539
+ def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None:
540
+ # pylint: disable=protected-access
541
+ for key in other._values:
542
+ # According to documentation: "When parsing from the wire or when merging,
543
+ # if there are duplicate map keys the last key seen is used".
544
+ if key in self:
545
+ del self[key]
546
+ self[key].CopyFrom(other[key])
547
+ # self._message_listener.Modified() not required here, because
548
+ # mutations to submessages already propagate.
549
+
550
+ def InvalidateIterators(self) -> None:
551
+ # It appears that the only way to reliably invalidate iterators to
552
+ # self._values is to ensure that its size changes.
553
+ original = self._values
554
+ self._values = original.copy()
555
+ original[None] = None
556
+
557
+ # This is defined in the abstract base, but we can do it much more cheaply.
558
+ def clear(self) -> None:
559
+ self._values.clear()
560
+ self._message_listener.Modified()
561
+
562
+ def GetEntryClass(self) -> Any:
563
+ return self._entry_descriptor._concrete_class
564
+
565
+
566
+ class _UnknownField:
567
+ """A parsed unknown field."""
568
+
569
+ # Disallows assignment to other attributes.
570
+ __slots__ = ['_field_number', '_wire_type', '_data']
571
+
572
+ def __init__(self, field_number, wire_type, data):
573
+ self._field_number = field_number
574
+ self._wire_type = wire_type
575
+ self._data = data
576
+ return
577
+
578
+ def __lt__(self, other):
579
+ # pylint: disable=protected-access
580
+ return self._field_number < other._field_number
581
+
582
+ def __eq__(self, other):
583
+ if self is other:
584
+ return True
585
+ # pylint: disable=protected-access
586
+ return (self._field_number == other._field_number and
587
+ self._wire_type == other._wire_type and
588
+ self._data == other._data)
589
+
590
+
591
+ class UnknownFieldRef: # pylint: disable=missing-class-docstring
592
+
593
+ def __init__(self, parent, index):
594
+ self._parent = parent
595
+ self._index = index
596
+
597
+ def _check_valid(self):
598
+ if not self._parent:
599
+ raise ValueError('UnknownField does not exist. '
600
+ 'The parent message might be cleared.')
601
+ if self._index >= len(self._parent):
602
+ raise ValueError('UnknownField does not exist. '
603
+ 'The parent message might be cleared.')
604
+
605
+ @property
606
+ def field_number(self):
607
+ self._check_valid()
608
+ # pylint: disable=protected-access
609
+ return self._parent._internal_get(self._index)._field_number
610
+
611
+ @property
612
+ def wire_type(self):
613
+ self._check_valid()
614
+ # pylint: disable=protected-access
615
+ return self._parent._internal_get(self._index)._wire_type
616
+
617
+ @property
618
+ def data(self):
619
+ self._check_valid()
620
+ # pylint: disable=protected-access
621
+ return self._parent._internal_get(self._index)._data
622
+
623
+
624
+ class UnknownFieldSet:
625
+ """UnknownField container"""
626
+
627
+ # Disallows assignment to other attributes.
628
+ __slots__ = ['_values']
629
+
630
+ def __init__(self):
631
+ self._values = []
632
+
633
+ def __getitem__(self, index):
634
+ if self._values is None:
635
+ raise ValueError('UnknownFields does not exist. '
636
+ 'The parent message might be cleared.')
637
+ size = len(self._values)
638
+ if index < 0:
639
+ index += size
640
+ if index < 0 or index >= size:
641
+ raise IndexError('index %d out of range'.index)
642
+
643
+ return UnknownFieldRef(self, index)
644
+
645
+ def _internal_get(self, index):
646
+ return self._values[index]
647
+
648
+ def __len__(self):
649
+ if self._values is None:
650
+ raise ValueError('UnknownFields does not exist. '
651
+ 'The parent message might be cleared.')
652
+ return len(self._values)
653
+
654
+ def _add(self, field_number, wire_type, data):
655
+ unknown_field = _UnknownField(field_number, wire_type, data)
656
+ self._values.append(unknown_field)
657
+ return unknown_field
658
+
659
+ def __iter__(self):
660
+ for i in range(len(self)):
661
+ yield UnknownFieldRef(self, i)
662
+
663
+ def _extend(self, other):
664
+ if other is None:
665
+ return
666
+ # pylint: disable=protected-access
667
+ self._values.extend(other._values)
668
+
669
+ def __eq__(self, other):
670
+ if self is other:
671
+ return True
672
+ # Sort unknown fields because their order shouldn't
673
+ # affect equality test.
674
+ values = list(self._values)
675
+ if other is None:
676
+ return not values
677
+ values.sort()
678
+ # pylint: disable=protected-access
679
+ other_values = sorted(other._values)
680
+ return values == other_values
681
+
682
+ def _clear(self):
683
+ for value in self._values:
684
+ # pylint: disable=protected-access
685
+ if isinstance(value._data, UnknownFieldSet):
686
+ value._data._clear() # pylint: disable=protected-access
687
+ self._values = None
lib/python3.10/site-packages/google/protobuf/internal/decoder.py ADDED
@@ -0,0 +1,1044 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Code for decoding protocol buffer primitives.
9
+
10
+ This code is very similar to encoder.py -- read the docs for that module first.
11
+
12
+ A "decoder" is a function with the signature:
13
+ Decode(buffer, pos, end, message, field_dict)
14
+ The arguments are:
15
+ buffer: The string containing the encoded message.
16
+ pos: The current position in the string.
17
+ end: The position in the string where the current message ends. May be
18
+ less than len(buffer) if we're reading a sub-message.
19
+ message: The message object into which we're parsing.
20
+ field_dict: message._fields (avoids a hashtable lookup).
21
+ The decoder reads the field and stores it into field_dict, returning the new
22
+ buffer position. A decoder for a repeated field may proactively decode all of
23
+ the elements of that field, if they appear consecutively.
24
+
25
+ Note that decoders may throw any of the following:
26
+ IndexError: Indicates a truncated message.
27
+ struct.error: Unpacking of a fixed-width field failed.
28
+ message.DecodeError: Other errors.
29
+
30
+ Decoders are expected to raise an exception if they are called with pos > end.
31
+ This allows callers to be lax about bounds checking: it's fineto read past
32
+ "end" as long as you are sure that someone else will notice and throw an
33
+ exception later on.
34
+
35
+ Something up the call stack is expected to catch IndexError and struct.error
36
+ and convert them to message.DecodeError.
37
+
38
+ Decoders are constructed using decoder constructors with the signature:
39
+ MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
40
+ The arguments are:
41
+ field_number: The field number of the field we want to decode.
42
+ is_repeated: Is the field a repeated field? (bool)
43
+ is_packed: Is the field a packed field? (bool)
44
+ key: The key to use when looking up the field within field_dict.
45
+ (This is actually the FieldDescriptor but nothing in this
46
+ file should depend on that.)
47
+ new_default: A function which takes a message object as a parameter and
48
+ returns a new instance of the default value for this field.
49
+ (This is called for repeated fields and sub-messages, when an
50
+ instance does not already exist.)
51
+
52
+ As with encoders, we define a decoder constructor for every type of field.
53
+ Then, for every field of every message class we construct an actual decoder.
54
+ That decoder goes into a dict indexed by tag, so when we decode a message
55
+ we repeatedly read a tag, look up the corresponding decoder, and invoke it.
56
+ """
57
+
58
+ __author__ = 'kenton@google.com (Kenton Varda)'
59
+
60
+ import math
61
+ import struct
62
+
63
+ from google.protobuf.internal import containers
64
+ from google.protobuf.internal import encoder
65
+ from google.protobuf.internal import wire_format
66
+ from google.protobuf import message
67
+
68
+
69
+ # This is not for optimization, but rather to avoid conflicts with local
70
+ # variables named "message".
71
+ _DecodeError = message.DecodeError
72
+
73
+
74
+ def _VarintDecoder(mask, result_type):
75
+ """Return an encoder for a basic varint value (does not include tag).
76
+
77
+ Decoded values will be bitwise-anded with the given mask before being
78
+ returned, e.g. to limit them to 32 bits. The returned decoder does not
79
+ take the usual "end" parameter -- the caller is expected to do bounds checking
80
+ after the fact (often the caller can defer such checking until later). The
81
+ decoder returns a (value, new_pos) pair.
82
+ """
83
+
84
+ def DecodeVarint(buffer, pos):
85
+ result = 0
86
+ shift = 0
87
+ while 1:
88
+ b = buffer[pos]
89
+ result |= ((b & 0x7f) << shift)
90
+ pos += 1
91
+ if not (b & 0x80):
92
+ result &= mask
93
+ result = result_type(result)
94
+ return (result, pos)
95
+ shift += 7
96
+ if shift >= 64:
97
+ raise _DecodeError('Too many bytes when decoding varint.')
98
+ return DecodeVarint
99
+
100
+
101
+ def _SignedVarintDecoder(bits, result_type):
102
+ """Like _VarintDecoder() but decodes signed values."""
103
+
104
+ signbit = 1 << (bits - 1)
105
+ mask = (1 << bits) - 1
106
+
107
+ def DecodeVarint(buffer, pos):
108
+ result = 0
109
+ shift = 0
110
+ while 1:
111
+ b = buffer[pos]
112
+ result |= ((b & 0x7f) << shift)
113
+ pos += 1
114
+ if not (b & 0x80):
115
+ result &= mask
116
+ result = (result ^ signbit) - signbit
117
+ result = result_type(result)
118
+ return (result, pos)
119
+ shift += 7
120
+ if shift >= 64:
121
+ raise _DecodeError('Too many bytes when decoding varint.')
122
+ return DecodeVarint
123
+
124
+ # All 32-bit and 64-bit values are represented as int.
125
+ _DecodeVarint = _VarintDecoder((1 << 64) - 1, int)
126
+ _DecodeSignedVarint = _SignedVarintDecoder(64, int)
127
+
128
+ # Use these versions for values which must be limited to 32 bits.
129
+ _DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
130
+ _DecodeSignedVarint32 = _SignedVarintDecoder(32, int)
131
+
132
+
133
+ def ReadTag(buffer, pos):
134
+ """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple.
135
+
136
+ We return the raw bytes of the tag rather than decoding them. The raw
137
+ bytes can then be used to look up the proper decoder. This effectively allows
138
+ us to trade some work that would be done in pure-python (decoding a varint)
139
+ for work that is done in C (searching for a byte string in a hash table).
140
+ In a low-level language it would be much cheaper to decode the varint and
141
+ use that, but not in Python.
142
+
143
+ Args:
144
+ buffer: memoryview object of the encoded bytes
145
+ pos: int of the current position to start from
146
+
147
+ Returns:
148
+ Tuple[bytes, int] of the tag data and new position.
149
+ """
150
+ start = pos
151
+ while buffer[pos] & 0x80:
152
+ pos += 1
153
+ pos += 1
154
+
155
+ tag_bytes = buffer[start:pos].tobytes()
156
+ return tag_bytes, pos
157
+
158
+
159
+ # --------------------------------------------------------------------
160
+
161
+
162
+ def _SimpleDecoder(wire_type, decode_value):
163
+ """Return a constructor for a decoder for fields of a particular type.
164
+
165
+ Args:
166
+ wire_type: The field's wire type.
167
+ decode_value: A function which decodes an individual value, e.g.
168
+ _DecodeVarint()
169
+ """
170
+
171
+ def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default,
172
+ clear_if_default=False):
173
+ if is_packed:
174
+ local_DecodeVarint = _DecodeVarint
175
+ def DecodePackedField(buffer, pos, end, message, field_dict):
176
+ value = field_dict.get(key)
177
+ if value is None:
178
+ value = field_dict.setdefault(key, new_default(message))
179
+ (endpoint, pos) = local_DecodeVarint(buffer, pos)
180
+ endpoint += pos
181
+ if endpoint > end:
182
+ raise _DecodeError('Truncated message.')
183
+ while pos < endpoint:
184
+ (element, pos) = decode_value(buffer, pos)
185
+ value.append(element)
186
+ if pos > endpoint:
187
+ del value[-1] # Discard corrupt value.
188
+ raise _DecodeError('Packed element was truncated.')
189
+ return pos
190
+ return DecodePackedField
191
+ elif is_repeated:
192
+ tag_bytes = encoder.TagBytes(field_number, wire_type)
193
+ tag_len = len(tag_bytes)
194
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
195
+ value = field_dict.get(key)
196
+ if value is None:
197
+ value = field_dict.setdefault(key, new_default(message))
198
+ while 1:
199
+ (element, new_pos) = decode_value(buffer, pos)
200
+ value.append(element)
201
+ # Predict that the next tag is another copy of the same repeated
202
+ # field.
203
+ pos = new_pos + tag_len
204
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
205
+ # Prediction failed. Return.
206
+ if new_pos > end:
207
+ raise _DecodeError('Truncated message.')
208
+ return new_pos
209
+ return DecodeRepeatedField
210
+ else:
211
+ def DecodeField(buffer, pos, end, message, field_dict):
212
+ (new_value, pos) = decode_value(buffer, pos)
213
+ if pos > end:
214
+ raise _DecodeError('Truncated message.')
215
+ if clear_if_default and not new_value:
216
+ field_dict.pop(key, None)
217
+ else:
218
+ field_dict[key] = new_value
219
+ return pos
220
+ return DecodeField
221
+
222
+ return SpecificDecoder
223
+
224
+
225
+ def _ModifiedDecoder(wire_type, decode_value, modify_value):
226
+ """Like SimpleDecoder but additionally invokes modify_value on every value
227
+ before storing it. Usually modify_value is ZigZagDecode.
228
+ """
229
+
230
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
231
+ # not enough to make a significant difference.
232
+
233
+ def InnerDecode(buffer, pos):
234
+ (result, new_pos) = decode_value(buffer, pos)
235
+ return (modify_value(result), new_pos)
236
+ return _SimpleDecoder(wire_type, InnerDecode)
237
+
238
+
239
+ def _StructPackDecoder(wire_type, format):
240
+ """Return a constructor for a decoder for a fixed-width field.
241
+
242
+ Args:
243
+ wire_type: The field's wire type.
244
+ format: The format string to pass to struct.unpack().
245
+ """
246
+
247
+ value_size = struct.calcsize(format)
248
+ local_unpack = struct.unpack
249
+
250
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
251
+ # not enough to make a significant difference.
252
+
253
+ # Note that we expect someone up-stack to catch struct.error and convert
254
+ # it to _DecodeError -- this way we don't have to set up exception-
255
+ # handling blocks every time we parse one value.
256
+
257
+ def InnerDecode(buffer, pos):
258
+ new_pos = pos + value_size
259
+ result = local_unpack(format, buffer[pos:new_pos])[0]
260
+ return (result, new_pos)
261
+ return _SimpleDecoder(wire_type, InnerDecode)
262
+
263
+
264
+ def _FloatDecoder():
265
+ """Returns a decoder for a float field.
266
+
267
+ This code works around a bug in struct.unpack for non-finite 32-bit
268
+ floating-point values.
269
+ """
270
+
271
+ local_unpack = struct.unpack
272
+
273
+ def InnerDecode(buffer, pos):
274
+ """Decode serialized float to a float and new position.
275
+
276
+ Args:
277
+ buffer: memoryview of the serialized bytes
278
+ pos: int, position in the memory view to start at.
279
+
280
+ Returns:
281
+ Tuple[float, int] of the deserialized float value and new position
282
+ in the serialized data.
283
+ """
284
+ # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign
285
+ # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
286
+ new_pos = pos + 4
287
+ float_bytes = buffer[pos:new_pos].tobytes()
288
+
289
+ # If this value has all its exponent bits set, then it's non-finite.
290
+ # In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
291
+ # To avoid that, we parse it specially.
292
+ if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
293
+ # If at least one significand bit is set...
294
+ if float_bytes[0:3] != b'\x00\x00\x80':
295
+ return (math.nan, new_pos)
296
+ # If sign bit is set...
297
+ if float_bytes[3:4] == b'\xFF':
298
+ return (-math.inf, new_pos)
299
+ return (math.inf, new_pos)
300
+
301
+ # Note that we expect someone up-stack to catch struct.error and convert
302
+ # it to _DecodeError -- this way we don't have to set up exception-
303
+ # handling blocks every time we parse one value.
304
+ result = local_unpack('<f', float_bytes)[0]
305
+ return (result, new_pos)
306
+ return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
307
+
308
+
309
+ def _DoubleDecoder():
310
+ """Returns a decoder for a double field.
311
+
312
+ This code works around a bug in struct.unpack for not-a-number.
313
+ """
314
+
315
+ local_unpack = struct.unpack
316
+
317
+ def InnerDecode(buffer, pos):
318
+ """Decode serialized double to a double and new position.
319
+
320
+ Args:
321
+ buffer: memoryview of the serialized bytes.
322
+ pos: int, position in the memory view to start at.
323
+
324
+ Returns:
325
+ Tuple[float, int] of the decoded double value and new position
326
+ in the serialized data.
327
+ """
328
+ # We expect a 64-bit value in little-endian byte order. Bit 1 is the sign
329
+ # bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
330
+ new_pos = pos + 8
331
+ double_bytes = buffer[pos:new_pos].tobytes()
332
+
333
+ # If this value has all its exponent bits set and at least one significand
334
+ # bit set, it's not a number. In Python 2.4, struct.unpack will treat it
335
+ # as inf or -inf. To avoid that, we treat it specially.
336
+ if ((double_bytes[7:8] in b'\x7F\xFF')
337
+ and (double_bytes[6:7] >= b'\xF0')
338
+ and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
339
+ return (math.nan, new_pos)
340
+
341
+ # Note that we expect someone up-stack to catch struct.error and convert
342
+ # it to _DecodeError -- this way we don't have to set up exception-
343
+ # handling blocks every time we parse one value.
344
+ result = local_unpack('<d', double_bytes)[0]
345
+ return (result, new_pos)
346
+ return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
347
+
348
+
349
+ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
350
+ clear_if_default=False):
351
+ """Returns a decoder for enum field."""
352
+ enum_type = key.enum_type
353
+ if is_packed:
354
+ local_DecodeVarint = _DecodeVarint
355
+ def DecodePackedField(buffer, pos, end, message, field_dict):
356
+ """Decode serialized packed enum to its value and a new position.
357
+
358
+ Args:
359
+ buffer: memoryview of the serialized bytes.
360
+ pos: int, position in the memory view to start at.
361
+ end: int, end position of serialized data
362
+ message: Message object to store unknown fields in
363
+ field_dict: Map[Descriptor, Any] to store decoded values in.
364
+
365
+ Returns:
366
+ int, new position in serialized data.
367
+ """
368
+ value = field_dict.get(key)
369
+ if value is None:
370
+ value = field_dict.setdefault(key, new_default(message))
371
+ (endpoint, pos) = local_DecodeVarint(buffer, pos)
372
+ endpoint += pos
373
+ if endpoint > end:
374
+ raise _DecodeError('Truncated message.')
375
+ while pos < endpoint:
376
+ value_start_pos = pos
377
+ (element, pos) = _DecodeSignedVarint32(buffer, pos)
378
+ # pylint: disable=protected-access
379
+ if element in enum_type.values_by_number:
380
+ value.append(element)
381
+ else:
382
+ if not message._unknown_fields:
383
+ message._unknown_fields = []
384
+ tag_bytes = encoder.TagBytes(field_number,
385
+ wire_format.WIRETYPE_VARINT)
386
+
387
+ message._unknown_fields.append(
388
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
389
+ if message._unknown_field_set is None:
390
+ message._unknown_field_set = containers.UnknownFieldSet()
391
+ message._unknown_field_set._add(
392
+ field_number, wire_format.WIRETYPE_VARINT, element)
393
+ # pylint: enable=protected-access
394
+ if pos > endpoint:
395
+ if element in enum_type.values_by_number:
396
+ del value[-1] # Discard corrupt value.
397
+ else:
398
+ del message._unknown_fields[-1]
399
+ # pylint: disable=protected-access
400
+ del message._unknown_field_set._values[-1]
401
+ # pylint: enable=protected-access
402
+ raise _DecodeError('Packed element was truncated.')
403
+ return pos
404
+ return DecodePackedField
405
+ elif is_repeated:
406
+ tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
407
+ tag_len = len(tag_bytes)
408
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
409
+ """Decode serialized repeated enum to its value and a new position.
410
+
411
+ Args:
412
+ buffer: memoryview of the serialized bytes.
413
+ pos: int, position in the memory view to start at.
414
+ end: int, end position of serialized data
415
+ message: Message object to store unknown fields in
416
+ field_dict: Map[Descriptor, Any] to store decoded values in.
417
+
418
+ Returns:
419
+ int, new position in serialized data.
420
+ """
421
+ value = field_dict.get(key)
422
+ if value is None:
423
+ value = field_dict.setdefault(key, new_default(message))
424
+ while 1:
425
+ (element, new_pos) = _DecodeSignedVarint32(buffer, pos)
426
+ # pylint: disable=protected-access
427
+ if element in enum_type.values_by_number:
428
+ value.append(element)
429
+ else:
430
+ if not message._unknown_fields:
431
+ message._unknown_fields = []
432
+ message._unknown_fields.append(
433
+ (tag_bytes, buffer[pos:new_pos].tobytes()))
434
+ if message._unknown_field_set is None:
435
+ message._unknown_field_set = containers.UnknownFieldSet()
436
+ message._unknown_field_set._add(
437
+ field_number, wire_format.WIRETYPE_VARINT, element)
438
+ # pylint: enable=protected-access
439
+ # Predict that the next tag is another copy of the same repeated
440
+ # field.
441
+ pos = new_pos + tag_len
442
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
443
+ # Prediction failed. Return.
444
+ if new_pos > end:
445
+ raise _DecodeError('Truncated message.')
446
+ return new_pos
447
+ return DecodeRepeatedField
448
+ else:
449
+ def DecodeField(buffer, pos, end, message, field_dict):
450
+ """Decode serialized repeated enum to its value and a new position.
451
+
452
+ Args:
453
+ buffer: memoryview of the serialized bytes.
454
+ pos: int, position in the memory view to start at.
455
+ end: int, end position of serialized data
456
+ message: Message object to store unknown fields in
457
+ field_dict: Map[Descriptor, Any] to store decoded values in.
458
+
459
+ Returns:
460
+ int, new position in serialized data.
461
+ """
462
+ value_start_pos = pos
463
+ (enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
464
+ if pos > end:
465
+ raise _DecodeError('Truncated message.')
466
+ if clear_if_default and not enum_value:
467
+ field_dict.pop(key, None)
468
+ return pos
469
+ # pylint: disable=protected-access
470
+ if enum_value in enum_type.values_by_number:
471
+ field_dict[key] = enum_value
472
+ else:
473
+ if not message._unknown_fields:
474
+ message._unknown_fields = []
475
+ tag_bytes = encoder.TagBytes(field_number,
476
+ wire_format.WIRETYPE_VARINT)
477
+ message._unknown_fields.append(
478
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
479
+ if message._unknown_field_set is None:
480
+ message._unknown_field_set = containers.UnknownFieldSet()
481
+ message._unknown_field_set._add(
482
+ field_number, wire_format.WIRETYPE_VARINT, enum_value)
483
+ # pylint: enable=protected-access
484
+ return pos
485
+ return DecodeField
486
+
487
+
488
+ # --------------------------------------------------------------------
489
+
490
+
491
+ Int32Decoder = _SimpleDecoder(
492
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
493
+
494
+ Int64Decoder = _SimpleDecoder(
495
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
496
+
497
+ UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
498
+ UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
499
+
500
+ SInt32Decoder = _ModifiedDecoder(
501
+ wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
502
+ SInt64Decoder = _ModifiedDecoder(
503
+ wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
504
+
505
+ # Note that Python conveniently guarantees that when using the '<' prefix on
506
+ # formats, they will also have the same size across all platforms (as opposed
507
+ # to without the prefix, where their sizes depend on the C compiler's basic
508
+ # type sizes).
509
+ Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
510
+ Fixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
511
+ SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
512
+ SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
513
+ FloatDecoder = _FloatDecoder()
514
+ DoubleDecoder = _DoubleDecoder()
515
+
516
+ BoolDecoder = _ModifiedDecoder(
517
+ wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
518
+
519
+
520
+ def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
521
+ clear_if_default=False):
522
+ """Returns a decoder for a string field."""
523
+
524
+ local_DecodeVarint = _DecodeVarint
525
+
526
+ def _ConvertToUnicode(memview):
527
+ """Convert byte to unicode."""
528
+ byte_str = memview.tobytes()
529
+ try:
530
+ value = str(byte_str, 'utf-8')
531
+ except UnicodeDecodeError as e:
532
+ # add more information to the error message and re-raise it.
533
+ e.reason = '%s in field: %s' % (e, key.full_name)
534
+ raise
535
+
536
+ return value
537
+
538
+ assert not is_packed
539
+ if is_repeated:
540
+ tag_bytes = encoder.TagBytes(field_number,
541
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
542
+ tag_len = len(tag_bytes)
543
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
544
+ value = field_dict.get(key)
545
+ if value is None:
546
+ value = field_dict.setdefault(key, new_default(message))
547
+ while 1:
548
+ (size, pos) = local_DecodeVarint(buffer, pos)
549
+ new_pos = pos + size
550
+ if new_pos > end:
551
+ raise _DecodeError('Truncated string.')
552
+ value.append(_ConvertToUnicode(buffer[pos:new_pos]))
553
+ # Predict that the next tag is another copy of the same repeated field.
554
+ pos = new_pos + tag_len
555
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
556
+ # Prediction failed. Return.
557
+ return new_pos
558
+ return DecodeRepeatedField
559
+ else:
560
+ def DecodeField(buffer, pos, end, message, field_dict):
561
+ (size, pos) = local_DecodeVarint(buffer, pos)
562
+ new_pos = pos + size
563
+ if new_pos > end:
564
+ raise _DecodeError('Truncated string.')
565
+ if clear_if_default and not size:
566
+ field_dict.pop(key, None)
567
+ else:
568
+ field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
569
+ return new_pos
570
+ return DecodeField
571
+
572
+
573
+ def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
574
+ clear_if_default=False):
575
+ """Returns a decoder for a bytes field."""
576
+
577
+ local_DecodeVarint = _DecodeVarint
578
+
579
+ assert not is_packed
580
+ if is_repeated:
581
+ tag_bytes = encoder.TagBytes(field_number,
582
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
583
+ tag_len = len(tag_bytes)
584
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
585
+ value = field_dict.get(key)
586
+ if value is None:
587
+ value = field_dict.setdefault(key, new_default(message))
588
+ while 1:
589
+ (size, pos) = local_DecodeVarint(buffer, pos)
590
+ new_pos = pos + size
591
+ if new_pos > end:
592
+ raise _DecodeError('Truncated string.')
593
+ value.append(buffer[pos:new_pos].tobytes())
594
+ # Predict that the next tag is another copy of the same repeated field.
595
+ pos = new_pos + tag_len
596
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
597
+ # Prediction failed. Return.
598
+ return new_pos
599
+ return DecodeRepeatedField
600
+ else:
601
+ def DecodeField(buffer, pos, end, message, field_dict):
602
+ (size, pos) = local_DecodeVarint(buffer, pos)
603
+ new_pos = pos + size
604
+ if new_pos > end:
605
+ raise _DecodeError('Truncated string.')
606
+ if clear_if_default and not size:
607
+ field_dict.pop(key, None)
608
+ else:
609
+ field_dict[key] = buffer[pos:new_pos].tobytes()
610
+ return new_pos
611
+ return DecodeField
612
+
613
+
614
+ def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
615
+ """Returns a decoder for a group field."""
616
+
617
+ end_tag_bytes = encoder.TagBytes(field_number,
618
+ wire_format.WIRETYPE_END_GROUP)
619
+ end_tag_len = len(end_tag_bytes)
620
+
621
+ assert not is_packed
622
+ if is_repeated:
623
+ tag_bytes = encoder.TagBytes(field_number,
624
+ wire_format.WIRETYPE_START_GROUP)
625
+ tag_len = len(tag_bytes)
626
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
627
+ value = field_dict.get(key)
628
+ if value is None:
629
+ value = field_dict.setdefault(key, new_default(message))
630
+ while 1:
631
+ value = field_dict.get(key)
632
+ if value is None:
633
+ value = field_dict.setdefault(key, new_default(message))
634
+ # Read sub-message.
635
+ pos = value.add()._InternalParse(buffer, pos, end)
636
+ # Read end tag.
637
+ new_pos = pos+end_tag_len
638
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
639
+ raise _DecodeError('Missing group end tag.')
640
+ # Predict that the next tag is another copy of the same repeated field.
641
+ pos = new_pos + tag_len
642
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
643
+ # Prediction failed. Return.
644
+ return new_pos
645
+ return DecodeRepeatedField
646
+ else:
647
+ def DecodeField(buffer, pos, end, message, field_dict):
648
+ value = field_dict.get(key)
649
+ if value is None:
650
+ value = field_dict.setdefault(key, new_default(message))
651
+ # Read sub-message.
652
+ pos = value._InternalParse(buffer, pos, end)
653
+ # Read end tag.
654
+ new_pos = pos+end_tag_len
655
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
656
+ raise _DecodeError('Missing group end tag.')
657
+ return new_pos
658
+ return DecodeField
659
+
660
+
661
+ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
662
+ """Returns a decoder for a message field."""
663
+
664
+ local_DecodeVarint = _DecodeVarint
665
+
666
+ assert not is_packed
667
+ if is_repeated:
668
+ tag_bytes = encoder.TagBytes(field_number,
669
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
670
+ tag_len = len(tag_bytes)
671
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
672
+ value = field_dict.get(key)
673
+ if value is None:
674
+ value = field_dict.setdefault(key, new_default(message))
675
+ while 1:
676
+ # Read length.
677
+ (size, pos) = local_DecodeVarint(buffer, pos)
678
+ new_pos = pos + size
679
+ if new_pos > end:
680
+ raise _DecodeError('Truncated message.')
681
+ # Read sub-message.
682
+ if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
683
+ # The only reason _InternalParse would return early is if it
684
+ # encountered an end-group tag.
685
+ raise _DecodeError('Unexpected end-group tag.')
686
+ # Predict that the next tag is another copy of the same repeated field.
687
+ pos = new_pos + tag_len
688
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
689
+ # Prediction failed. Return.
690
+ return new_pos
691
+ return DecodeRepeatedField
692
+ else:
693
+ def DecodeField(buffer, pos, end, message, field_dict):
694
+ value = field_dict.get(key)
695
+ if value is None:
696
+ value = field_dict.setdefault(key, new_default(message))
697
+ # Read length.
698
+ (size, pos) = local_DecodeVarint(buffer, pos)
699
+ new_pos = pos + size
700
+ if new_pos > end:
701
+ raise _DecodeError('Truncated message.')
702
+ # Read sub-message.
703
+ if value._InternalParse(buffer, pos, new_pos) != new_pos:
704
+ # The only reason _InternalParse would return early is if it encountered
705
+ # an end-group tag.
706
+ raise _DecodeError('Unexpected end-group tag.')
707
+ return new_pos
708
+ return DecodeField
709
+
710
+
711
+ # --------------------------------------------------------------------
712
+
713
+ MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
714
+
715
+ def MessageSetItemDecoder(descriptor):
716
+ """Returns a decoder for a MessageSet item.
717
+
718
+ The parameter is the message Descriptor.
719
+
720
+ The message set message looks like this:
721
+ message MessageSet {
722
+ repeated group Item = 1 {
723
+ required int32 type_id = 2;
724
+ required string message = 3;
725
+ }
726
+ }
727
+ """
728
+
729
+ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
730
+ message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
731
+ item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
732
+
733
+ local_ReadTag = ReadTag
734
+ local_DecodeVarint = _DecodeVarint
735
+ local_SkipField = SkipField
736
+
737
+ def DecodeItem(buffer, pos, end, message, field_dict):
738
+ """Decode serialized message set to its value and new position.
739
+
740
+ Args:
741
+ buffer: memoryview of the serialized bytes.
742
+ pos: int, position in the memory view to start at.
743
+ end: int, end position of serialized data
744
+ message: Message object to store unknown fields in
745
+ field_dict: Map[Descriptor, Any] to store decoded values in.
746
+
747
+ Returns:
748
+ int, new position in serialized data.
749
+ """
750
+ message_set_item_start = pos
751
+ type_id = -1
752
+ message_start = -1
753
+ message_end = -1
754
+
755
+ # Technically, type_id and message can appear in any order, so we need
756
+ # a little loop here.
757
+ while 1:
758
+ (tag_bytes, pos) = local_ReadTag(buffer, pos)
759
+ if tag_bytes == type_id_tag_bytes:
760
+ (type_id, pos) = local_DecodeVarint(buffer, pos)
761
+ elif tag_bytes == message_tag_bytes:
762
+ (size, message_start) = local_DecodeVarint(buffer, pos)
763
+ pos = message_end = message_start + size
764
+ elif tag_bytes == item_end_tag_bytes:
765
+ break
766
+ else:
767
+ pos = SkipField(buffer, pos, end, tag_bytes)
768
+ if pos == -1:
769
+ raise _DecodeError('Missing group end tag.')
770
+
771
+ if pos > end:
772
+ raise _DecodeError('Truncated message.')
773
+
774
+ if type_id == -1:
775
+ raise _DecodeError('MessageSet item missing type_id.')
776
+ if message_start == -1:
777
+ raise _DecodeError('MessageSet item missing message.')
778
+
779
+ extension = message.Extensions._FindExtensionByNumber(type_id)
780
+ # pylint: disable=protected-access
781
+ if extension is not None:
782
+ value = field_dict.get(extension)
783
+ if value is None:
784
+ message_type = extension.message_type
785
+ if not hasattr(message_type, '_concrete_class'):
786
+ message_factory.GetMessageClass(message_type)
787
+ value = field_dict.setdefault(
788
+ extension, message_type._concrete_class())
789
+ if value._InternalParse(buffer, message_start,message_end) != message_end:
790
+ # The only reason _InternalParse would return early is if it encountered
791
+ # an end-group tag.
792
+ raise _DecodeError('Unexpected end-group tag.')
793
+ else:
794
+ if not message._unknown_fields:
795
+ message._unknown_fields = []
796
+ message._unknown_fields.append(
797
+ (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes()))
798
+ if message._unknown_field_set is None:
799
+ message._unknown_field_set = containers.UnknownFieldSet()
800
+ message._unknown_field_set._add(
801
+ type_id,
802
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
803
+ buffer[message_start:message_end].tobytes())
804
+ # pylint: enable=protected-access
805
+
806
+ return pos
807
+
808
+ return DecodeItem
809
+
810
+
811
+ def UnknownMessageSetItemDecoder():
812
+ """Returns a decoder for a Unknown MessageSet item."""
813
+
814
+ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
815
+ message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
816
+ item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
817
+
818
+ def DecodeUnknownItem(buffer):
819
+ pos = 0
820
+ end = len(buffer)
821
+ message_start = -1
822
+ message_end = -1
823
+ while 1:
824
+ (tag_bytes, pos) = ReadTag(buffer, pos)
825
+ if tag_bytes == type_id_tag_bytes:
826
+ (type_id, pos) = _DecodeVarint(buffer, pos)
827
+ elif tag_bytes == message_tag_bytes:
828
+ (size, message_start) = _DecodeVarint(buffer, pos)
829
+ pos = message_end = message_start + size
830
+ elif tag_bytes == item_end_tag_bytes:
831
+ break
832
+ else:
833
+ pos = SkipField(buffer, pos, end, tag_bytes)
834
+ if pos == -1:
835
+ raise _DecodeError('Missing group end tag.')
836
+
837
+ if pos > end:
838
+ raise _DecodeError('Truncated message.')
839
+
840
+ if type_id == -1:
841
+ raise _DecodeError('MessageSet item missing type_id.')
842
+ if message_start == -1:
843
+ raise _DecodeError('MessageSet item missing message.')
844
+
845
+ return (type_id, buffer[message_start:message_end].tobytes())
846
+
847
+ return DecodeUnknownItem
848
+
849
+ # --------------------------------------------------------------------
850
+
851
+ def MapDecoder(field_descriptor, new_default, is_message_map):
852
+ """Returns a decoder for a map field."""
853
+
854
+ key = field_descriptor
855
+ tag_bytes = encoder.TagBytes(field_descriptor.number,
856
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
857
+ tag_len = len(tag_bytes)
858
+ local_DecodeVarint = _DecodeVarint
859
+ # Can't read _concrete_class yet; might not be initialized.
860
+ message_type = field_descriptor.message_type
861
+
862
+ def DecodeMap(buffer, pos, end, message, field_dict):
863
+ submsg = message_type._concrete_class()
864
+ value = field_dict.get(key)
865
+ if value is None:
866
+ value = field_dict.setdefault(key, new_default(message))
867
+ while 1:
868
+ # Read length.
869
+ (size, pos) = local_DecodeVarint(buffer, pos)
870
+ new_pos = pos + size
871
+ if new_pos > end:
872
+ raise _DecodeError('Truncated message.')
873
+ # Read sub-message.
874
+ submsg.Clear()
875
+ if submsg._InternalParse(buffer, pos, new_pos) != new_pos:
876
+ # The only reason _InternalParse would return early is if it
877
+ # encountered an end-group tag.
878
+ raise _DecodeError('Unexpected end-group tag.')
879
+
880
+ if is_message_map:
881
+ value[submsg.key].CopyFrom(submsg.value)
882
+ else:
883
+ value[submsg.key] = submsg.value
884
+
885
+ # Predict that the next tag is another copy of the same repeated field.
886
+ pos = new_pos + tag_len
887
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
888
+ # Prediction failed. Return.
889
+ return new_pos
890
+
891
+ return DecodeMap
892
+
893
+ # --------------------------------------------------------------------
894
+ # Optimization is not as heavy here because calls to SkipField() are rare,
895
+ # except for handling end-group tags.
896
+
897
+ def _SkipVarint(buffer, pos, end):
898
+ """Skip a varint value. Returns the new position."""
899
+ # Previously ord(buffer[pos]) raised IndexError when pos is out of range.
900
+ # With this code, ord(b'') raises TypeError. Both are handled in
901
+ # python_message.py to generate a 'Truncated message' error.
902
+ while ord(buffer[pos:pos+1].tobytes()) & 0x80:
903
+ pos += 1
904
+ pos += 1
905
+ if pos > end:
906
+ raise _DecodeError('Truncated message.')
907
+ return pos
908
+
909
+ def _SkipFixed64(buffer, pos, end):
910
+ """Skip a fixed64 value. Returns the new position."""
911
+
912
+ pos += 8
913
+ if pos > end:
914
+ raise _DecodeError('Truncated message.')
915
+ return pos
916
+
917
+
918
+ def _DecodeFixed64(buffer, pos):
919
+ """Decode a fixed64."""
920
+ new_pos = pos + 8
921
+ return (struct.unpack('<Q', buffer[pos:new_pos])[0], new_pos)
922
+
923
+
924
+ def _SkipLengthDelimited(buffer, pos, end):
925
+ """Skip a length-delimited value. Returns the new position."""
926
+
927
+ (size, pos) = _DecodeVarint(buffer, pos)
928
+ pos += size
929
+ if pos > end:
930
+ raise _DecodeError('Truncated message.')
931
+ return pos
932
+
933
+
934
+ def _SkipGroup(buffer, pos, end):
935
+ """Skip sub-group. Returns the new position."""
936
+
937
+ while 1:
938
+ (tag_bytes, pos) = ReadTag(buffer, pos)
939
+ new_pos = SkipField(buffer, pos, end, tag_bytes)
940
+ if new_pos == -1:
941
+ return pos
942
+ pos = new_pos
943
+
944
+
945
+ def _DecodeUnknownFieldSet(buffer, pos, end_pos=None):
946
+ """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position."""
947
+
948
+ unknown_field_set = containers.UnknownFieldSet()
949
+ while end_pos is None or pos < end_pos:
950
+ (tag_bytes, pos) = ReadTag(buffer, pos)
951
+ (tag, _) = _DecodeVarint(tag_bytes, 0)
952
+ field_number, wire_type = wire_format.UnpackTag(tag)
953
+ if wire_type == wire_format.WIRETYPE_END_GROUP:
954
+ break
955
+ (data, pos) = _DecodeUnknownField(buffer, pos, wire_type)
956
+ # pylint: disable=protected-access
957
+ unknown_field_set._add(field_number, wire_type, data)
958
+
959
+ return (unknown_field_set, pos)
960
+
961
+
962
+ def _DecodeUnknownField(buffer, pos, wire_type):
963
+ """Decode a unknown field. Returns the UnknownField and new position."""
964
+
965
+ if wire_type == wire_format.WIRETYPE_VARINT:
966
+ (data, pos) = _DecodeVarint(buffer, pos)
967
+ elif wire_type == wire_format.WIRETYPE_FIXED64:
968
+ (data, pos) = _DecodeFixed64(buffer, pos)
969
+ elif wire_type == wire_format.WIRETYPE_FIXED32:
970
+ (data, pos) = _DecodeFixed32(buffer, pos)
971
+ elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED:
972
+ (size, pos) = _DecodeVarint(buffer, pos)
973
+ data = buffer[pos:pos+size].tobytes()
974
+ pos += size
975
+ elif wire_type == wire_format.WIRETYPE_START_GROUP:
976
+ (data, pos) = _DecodeUnknownFieldSet(buffer, pos)
977
+ elif wire_type == wire_format.WIRETYPE_END_GROUP:
978
+ return (0, -1)
979
+ else:
980
+ raise _DecodeError('Wrong wire type in tag.')
981
+
982
+ return (data, pos)
983
+
984
+
985
+ def _EndGroup(buffer, pos, end):
986
+ """Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
987
+
988
+ return -1
989
+
990
+
991
+ def _SkipFixed32(buffer, pos, end):
992
+ """Skip a fixed32 value. Returns the new position."""
993
+
994
+ pos += 4
995
+ if pos > end:
996
+ raise _DecodeError('Truncated message.')
997
+ return pos
998
+
999
+
1000
+ def _DecodeFixed32(buffer, pos):
1001
+ """Decode a fixed32."""
1002
+
1003
+ new_pos = pos + 4
1004
+ return (struct.unpack('<I', buffer[pos:new_pos])[0], new_pos)
1005
+
1006
+
1007
+ def _RaiseInvalidWireType(buffer, pos, end):
1008
+ """Skip function for unknown wire types. Raises an exception."""
1009
+
1010
+ raise _DecodeError('Tag had invalid wire type.')
1011
+
1012
+ def _FieldSkipper():
1013
+ """Constructs the SkipField function."""
1014
+
1015
+ WIRETYPE_TO_SKIPPER = [
1016
+ _SkipVarint,
1017
+ _SkipFixed64,
1018
+ _SkipLengthDelimited,
1019
+ _SkipGroup,
1020
+ _EndGroup,
1021
+ _SkipFixed32,
1022
+ _RaiseInvalidWireType,
1023
+ _RaiseInvalidWireType,
1024
+ ]
1025
+
1026
+ wiretype_mask = wire_format.TAG_TYPE_MASK
1027
+
1028
+ def SkipField(buffer, pos, end, tag_bytes):
1029
+ """Skips a field with the specified tag.
1030
+
1031
+ |pos| should point to the byte immediately after the tag.
1032
+
1033
+ Returns:
1034
+ The new position (after the tag value), or -1 if the tag is an end-group
1035
+ tag (in which case the calling loop should break).
1036
+ """
1037
+
1038
+ # The wire type is always in the first byte since varints are little-endian.
1039
+ wire_type = ord(tag_bytes[0:1]) & wiretype_mask
1040
+ return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end)
1041
+
1042
+ return SkipField
1043
+
1044
+ SkipField = _FieldSkipper()
lib/python3.10/site-packages/google/protobuf/internal/enum_type_wrapper.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """A simple wrapper around enum types to expose utility functions.
9
+
10
+ Instances are created as properties with the same name as the enum they wrap
11
+ on proto classes. For usage, see:
12
+ reflection_test.py
13
+ """
14
+
15
+ __author__ = 'rabsatt@google.com (Kevin Rabsatt)'
16
+
17
+
18
+ class EnumTypeWrapper(object):
19
+ """A utility for finding the names of enum values."""
20
+
21
+ DESCRIPTOR = None
22
+
23
+ # This is a type alias, which mypy typing stubs can type as
24
+ # a genericized parameter constrained to an int, allowing subclasses
25
+ # to be typed with more constraint in .pyi stubs
26
+ # Eg.
27
+ # def MyGeneratedEnum(Message):
28
+ # ValueType = NewType('ValueType', int)
29
+ # def Name(self, number: MyGeneratedEnum.ValueType) -> str
30
+ ValueType = int
31
+
32
+ def __init__(self, enum_type):
33
+ """Inits EnumTypeWrapper with an EnumDescriptor."""
34
+ self._enum_type = enum_type
35
+ self.DESCRIPTOR = enum_type # pylint: disable=invalid-name
36
+
37
+ def Name(self, number): # pylint: disable=invalid-name
38
+ """Returns a string containing the name of an enum value."""
39
+ try:
40
+ return self._enum_type.values_by_number[number].name
41
+ except KeyError:
42
+ pass # fall out to break exception chaining
43
+
44
+ if not isinstance(number, int):
45
+ raise TypeError(
46
+ 'Enum value for {} must be an int, but got {} {!r}.'.format(
47
+ self._enum_type.name, type(number), number))
48
+ else:
49
+ # repr here to handle the odd case when you pass in a boolean.
50
+ raise ValueError('Enum {} has no name defined for value {!r}'.format(
51
+ self._enum_type.name, number))
52
+
53
+ def Value(self, name): # pylint: disable=invalid-name
54
+ """Returns the value corresponding to the given enum name."""
55
+ try:
56
+ return self._enum_type.values_by_name[name].number
57
+ except KeyError:
58
+ pass # fall out to break exception chaining
59
+ raise ValueError('Enum {} has no value defined for name {!r}'.format(
60
+ self._enum_type.name, name))
61
+
62
+ def keys(self):
63
+ """Return a list of the string names in the enum.
64
+
65
+ Returns:
66
+ A list of strs, in the order they were defined in the .proto file.
67
+ """
68
+
69
+ return [value_descriptor.name
70
+ for value_descriptor in self._enum_type.values]
71
+
72
+ def values(self):
73
+ """Return a list of the integer values in the enum.
74
+
75
+ Returns:
76
+ A list of ints, in the order they were defined in the .proto file.
77
+ """
78
+
79
+ return [value_descriptor.number
80
+ for value_descriptor in self._enum_type.values]
81
+
82
+ def items(self):
83
+ """Return a list of the (name, value) pairs of the enum.
84
+
85
+ Returns:
86
+ A list of (str, int) pairs, in the order they were defined
87
+ in the .proto file.
88
+ """
89
+ return [(value_descriptor.name, value_descriptor.number)
90
+ for value_descriptor in self._enum_type.values]
91
+
92
+ def __getattr__(self, name):
93
+ """Returns the value corresponding to the given enum name."""
94
+ try:
95
+ return super(
96
+ EnumTypeWrapper,
97
+ self).__getattribute__('_enum_type').values_by_name[name].number
98
+ except KeyError:
99
+ pass # fall out to break exception chaining
100
+ raise AttributeError('Enum {} has no value defined for name {!r}'.format(
101
+ self._enum_type.name, name))
lib/python3.10/site-packages/google/protobuf/internal/field_mask.py ADDED
@@ -0,0 +1,310 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains FieldMask class."""
9
+
10
+ from google.protobuf.descriptor import FieldDescriptor
11
+
12
+
13
+ class FieldMask(object):
14
+ """Class for FieldMask message type."""
15
+
16
+ __slots__ = ()
17
+
18
+ def ToJsonString(self):
19
+ """Converts FieldMask to string according to proto3 JSON spec."""
20
+ camelcase_paths = []
21
+ for path in self.paths:
22
+ camelcase_paths.append(_SnakeCaseToCamelCase(path))
23
+ return ','.join(camelcase_paths)
24
+
25
+ def FromJsonString(self, value):
26
+ """Converts string to FieldMask according to proto3 JSON spec."""
27
+ if not isinstance(value, str):
28
+ raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
29
+ self.Clear()
30
+ if value:
31
+ for path in value.split(','):
32
+ self.paths.append(_CamelCaseToSnakeCase(path))
33
+
34
+ def IsValidForDescriptor(self, message_descriptor):
35
+ """Checks whether the FieldMask is valid for Message Descriptor."""
36
+ for path in self.paths:
37
+ if not _IsValidPath(message_descriptor, path):
38
+ return False
39
+ return True
40
+
41
+ def AllFieldsFromDescriptor(self, message_descriptor):
42
+ """Gets all direct fields of Message Descriptor to FieldMask."""
43
+ self.Clear()
44
+ for field in message_descriptor.fields:
45
+ self.paths.append(field.name)
46
+
47
+ def CanonicalFormFromMask(self, mask):
48
+ """Converts a FieldMask to the canonical form.
49
+
50
+ Removes paths that are covered by another path. For example,
51
+ "foo.bar" is covered by "foo" and will be removed if "foo"
52
+ is also in the FieldMask. Then sorts all paths in alphabetical order.
53
+
54
+ Args:
55
+ mask: The original FieldMask to be converted.
56
+ """
57
+ tree = _FieldMaskTree(mask)
58
+ tree.ToFieldMask(self)
59
+
60
+ def Union(self, mask1, mask2):
61
+ """Merges mask1 and mask2 into this FieldMask."""
62
+ _CheckFieldMaskMessage(mask1)
63
+ _CheckFieldMaskMessage(mask2)
64
+ tree = _FieldMaskTree(mask1)
65
+ tree.MergeFromFieldMask(mask2)
66
+ tree.ToFieldMask(self)
67
+
68
+ def Intersect(self, mask1, mask2):
69
+ """Intersects mask1 and mask2 into this FieldMask."""
70
+ _CheckFieldMaskMessage(mask1)
71
+ _CheckFieldMaskMessage(mask2)
72
+ tree = _FieldMaskTree(mask1)
73
+ intersection = _FieldMaskTree()
74
+ for path in mask2.paths:
75
+ tree.IntersectPath(path, intersection)
76
+ intersection.ToFieldMask(self)
77
+
78
+ def MergeMessage(
79
+ self, source, destination,
80
+ replace_message_field=False, replace_repeated_field=False):
81
+ """Merges fields specified in FieldMask from source to destination.
82
+
83
+ Args:
84
+ source: Source message.
85
+ destination: The destination message to be merged into.
86
+ replace_message_field: Replace message field if True. Merge message
87
+ field if False.
88
+ replace_repeated_field: Replace repeated field if True. Append
89
+ elements of repeated field if False.
90
+ """
91
+ tree = _FieldMaskTree(self)
92
+ tree.MergeMessage(
93
+ source, destination, replace_message_field, replace_repeated_field)
94
+
95
+
96
+ def _IsValidPath(message_descriptor, path):
97
+ """Checks whether the path is valid for Message Descriptor."""
98
+ parts = path.split('.')
99
+ last = parts.pop()
100
+ for name in parts:
101
+ field = message_descriptor.fields_by_name.get(name)
102
+ if (field is None or
103
+ field.label == FieldDescriptor.LABEL_REPEATED or
104
+ field.type != FieldDescriptor.TYPE_MESSAGE):
105
+ return False
106
+ message_descriptor = field.message_type
107
+ return last in message_descriptor.fields_by_name
108
+
109
+
110
+ def _CheckFieldMaskMessage(message):
111
+ """Raises ValueError if message is not a FieldMask."""
112
+ message_descriptor = message.DESCRIPTOR
113
+ if (message_descriptor.name != 'FieldMask' or
114
+ message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
115
+ raise ValueError('Message {0} is not a FieldMask.'.format(
116
+ message_descriptor.full_name))
117
+
118
+
119
+ def _SnakeCaseToCamelCase(path_name):
120
+ """Converts a path name from snake_case to camelCase."""
121
+ result = []
122
+ after_underscore = False
123
+ for c in path_name:
124
+ if c.isupper():
125
+ raise ValueError(
126
+ 'Fail to print FieldMask to Json string: Path name '
127
+ '{0} must not contain uppercase letters.'.format(path_name))
128
+ if after_underscore:
129
+ if c.islower():
130
+ result.append(c.upper())
131
+ after_underscore = False
132
+ else:
133
+ raise ValueError(
134
+ 'Fail to print FieldMask to Json string: The '
135
+ 'character after a "_" must be a lowercase letter '
136
+ 'in path name {0}.'.format(path_name))
137
+ elif c == '_':
138
+ after_underscore = True
139
+ else:
140
+ result += c
141
+
142
+ if after_underscore:
143
+ raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
144
+ 'in path name {0}.'.format(path_name))
145
+ return ''.join(result)
146
+
147
+
148
+ def _CamelCaseToSnakeCase(path_name):
149
+ """Converts a field name from camelCase to snake_case."""
150
+ result = []
151
+ for c in path_name:
152
+ if c == '_':
153
+ raise ValueError('Fail to parse FieldMask: Path name '
154
+ '{0} must not contain "_"s.'.format(path_name))
155
+ if c.isupper():
156
+ result += '_'
157
+ result += c.lower()
158
+ else:
159
+ result += c
160
+ return ''.join(result)
161
+
162
+
163
+ class _FieldMaskTree(object):
164
+ """Represents a FieldMask in a tree structure.
165
+
166
+ For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
167
+ the FieldMaskTree will be:
168
+ [_root] -+- foo -+- bar
169
+ | |
170
+ | +- baz
171
+ |
172
+ +- bar --- baz
173
+ In the tree, each leaf node represents a field path.
174
+ """
175
+
176
+ __slots__ = ('_root',)
177
+
178
+ def __init__(self, field_mask=None):
179
+ """Initializes the tree by FieldMask."""
180
+ self._root = {}
181
+ if field_mask:
182
+ self.MergeFromFieldMask(field_mask)
183
+
184
+ def MergeFromFieldMask(self, field_mask):
185
+ """Merges a FieldMask to the tree."""
186
+ for path in field_mask.paths:
187
+ self.AddPath(path)
188
+
189
+ def AddPath(self, path):
190
+ """Adds a field path into the tree.
191
+
192
+ If the field path to add is a sub-path of an existing field path
193
+ in the tree (i.e., a leaf node), it means the tree already matches
194
+ the given path so nothing will be added to the tree. If the path
195
+ matches an existing non-leaf node in the tree, that non-leaf node
196
+ will be turned into a leaf node with all its children removed because
197
+ the path matches all the node's children. Otherwise, a new path will
198
+ be added.
199
+
200
+ Args:
201
+ path: The field path to add.
202
+ """
203
+ node = self._root
204
+ for name in path.split('.'):
205
+ if name not in node:
206
+ node[name] = {}
207
+ elif not node[name]:
208
+ # Pre-existing empty node implies we already have this entire tree.
209
+ return
210
+ node = node[name]
211
+ # Remove any sub-trees we might have had.
212
+ node.clear()
213
+
214
+ def ToFieldMask(self, field_mask):
215
+ """Converts the tree to a FieldMask."""
216
+ field_mask.Clear()
217
+ _AddFieldPaths(self._root, '', field_mask)
218
+
219
+ def IntersectPath(self, path, intersection):
220
+ """Calculates the intersection part of a field path with this tree.
221
+
222
+ Args:
223
+ path: The field path to calculates.
224
+ intersection: The out tree to record the intersection part.
225
+ """
226
+ node = self._root
227
+ for name in path.split('.'):
228
+ if name not in node:
229
+ return
230
+ elif not node[name]:
231
+ intersection.AddPath(path)
232
+ return
233
+ node = node[name]
234
+ intersection.AddLeafNodes(path, node)
235
+
236
+ def AddLeafNodes(self, prefix, node):
237
+ """Adds leaf nodes begin with prefix to this tree."""
238
+ if not node:
239
+ self.AddPath(prefix)
240
+ for name in node:
241
+ child_path = prefix + '.' + name
242
+ self.AddLeafNodes(child_path, node[name])
243
+
244
+ def MergeMessage(
245
+ self, source, destination,
246
+ replace_message, replace_repeated):
247
+ """Merge all fields specified by this tree from source to destination."""
248
+ _MergeMessage(
249
+ self._root, source, destination, replace_message, replace_repeated)
250
+
251
+
252
+ def _StrConvert(value):
253
+ """Converts value to str if it is not."""
254
+ # This file is imported by c extension and some methods like ClearField
255
+ # requires string for the field name. py2/py3 has different text
256
+ # type and may use unicode.
257
+ if not isinstance(value, str):
258
+ return value.encode('utf-8')
259
+ return value
260
+
261
+
262
+ def _MergeMessage(
263
+ node, source, destination, replace_message, replace_repeated):
264
+ """Merge all fields specified by a sub-tree from source to destination."""
265
+ source_descriptor = source.DESCRIPTOR
266
+ for name in node:
267
+ child = node[name]
268
+ field = source_descriptor.fields_by_name[name]
269
+ if field is None:
270
+ raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
271
+ name, source_descriptor.full_name))
272
+ if child:
273
+ # Sub-paths are only allowed for singular message fields.
274
+ if (field.label == FieldDescriptor.LABEL_REPEATED or
275
+ field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
276
+ raise ValueError('Error: Field {0} in message {1} is not a singular '
277
+ 'message field and cannot have sub-fields.'.format(
278
+ name, source_descriptor.full_name))
279
+ if source.HasField(name):
280
+ _MergeMessage(
281
+ child, getattr(source, name), getattr(destination, name),
282
+ replace_message, replace_repeated)
283
+ continue
284
+ if field.label == FieldDescriptor.LABEL_REPEATED:
285
+ if replace_repeated:
286
+ destination.ClearField(_StrConvert(name))
287
+ repeated_source = getattr(source, name)
288
+ repeated_destination = getattr(destination, name)
289
+ repeated_destination.MergeFrom(repeated_source)
290
+ else:
291
+ if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
292
+ if replace_message:
293
+ destination.ClearField(_StrConvert(name))
294
+ if source.HasField(name):
295
+ getattr(destination, name).MergeFrom(getattr(source, name))
296
+ else:
297
+ setattr(destination, name, getattr(source, name))
298
+
299
+
300
+ def _AddFieldPaths(node, prefix, field_mask):
301
+ """Adds the field paths descended from node to field_mask."""
302
+ if not node and prefix:
303
+ field_mask.paths.append(prefix)
304
+ return
305
+ for name in sorted(node):
306
+ if prefix:
307
+ child_path = prefix + '.' + name
308
+ else:
309
+ child_path = name
310
+ _AddFieldPaths(node[name], child_path, field_mask)
lib/python3.10/site-packages/google/protobuf/internal/testing_refleaks.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """A subclass of unittest.TestCase which checks for reference leaks.
9
+
10
+ To use:
11
+ - Use testing_refleak.BaseTestCase instead of unittest.TestCase
12
+ - Configure and compile Python with --with-pydebug
13
+
14
+ If sys.gettotalrefcount() is not available (because Python was built without
15
+ the Py_DEBUG option), then this module is a no-op and tests will run normally.
16
+ """
17
+
18
+ import copyreg
19
+ import gc
20
+ import sys
21
+ import unittest
22
+
23
+
24
+ class LocalTestResult(unittest.TestResult):
25
+ """A TestResult which forwards events to a parent object, except for Skips."""
26
+
27
+ def __init__(self, parent_result):
28
+ unittest.TestResult.__init__(self)
29
+ self.parent_result = parent_result
30
+
31
+ def addError(self, test, error):
32
+ self.parent_result.addError(test, error)
33
+
34
+ def addFailure(self, test, error):
35
+ self.parent_result.addFailure(test, error)
36
+
37
+ def addSkip(self, test, reason):
38
+ pass
39
+
40
+
41
+ class ReferenceLeakCheckerMixin(object):
42
+ """A mixin class for TestCase, which checks reference counts."""
43
+
44
+ NB_RUNS = 3
45
+
46
+ def run(self, result=None):
47
+ testMethod = getattr(self, self._testMethodName)
48
+ expecting_failure_method = getattr(testMethod, "__unittest_expecting_failure__", False)
49
+ expecting_failure_class = getattr(self, "__unittest_expecting_failure__", False)
50
+ if expecting_failure_class or expecting_failure_method:
51
+ return
52
+
53
+ # python_message.py registers all Message classes to some pickle global
54
+ # registry, which makes the classes immortal.
55
+ # We save a copy of this registry, and reset it before we could references.
56
+ self._saved_pickle_registry = copyreg.dispatch_table.copy()
57
+
58
+ # Run the test twice, to warm up the instance attributes.
59
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
60
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
61
+
62
+ oldrefcount = 0
63
+ local_result = LocalTestResult(result)
64
+ num_flakes = 0
65
+
66
+ refcount_deltas = []
67
+ while len(refcount_deltas) < self.NB_RUNS:
68
+ oldrefcount = self._getRefcounts()
69
+ super(ReferenceLeakCheckerMixin, self).run(result=local_result)
70
+ newrefcount = self._getRefcounts()
71
+ # If the GC was able to collect some objects after the call to run() that
72
+ # it could not collect before the call, then the counts won't match.
73
+ if newrefcount < oldrefcount and num_flakes < 2:
74
+ # This result is (probably) a flake -- garbage collectors aren't very
75
+ # predictable, but a lower ending refcount is the opposite of the
76
+ # failure we are testing for. If the result is repeatable, then we will
77
+ # eventually report it, but not after trying to eliminate it.
78
+ num_flakes += 1
79
+ continue
80
+ num_flakes = 0
81
+ refcount_deltas.append(newrefcount - oldrefcount)
82
+ print(refcount_deltas, self)
83
+
84
+ try:
85
+ self.assertEqual(refcount_deltas, [0] * self.NB_RUNS)
86
+ except Exception: # pylint: disable=broad-except
87
+ result.addError(self, sys.exc_info())
88
+
89
+ def _getRefcounts(self):
90
+ copyreg.dispatch_table.clear()
91
+ copyreg.dispatch_table.update(self._saved_pickle_registry)
92
+ # It is sometimes necessary to gc.collect() multiple times, to ensure
93
+ # that all objects can be collected.
94
+ gc.collect()
95
+ gc.collect()
96
+ gc.collect()
97
+ return sys.gettotalrefcount()
98
+
99
+
100
+ if hasattr(sys, 'gettotalrefcount'):
101
+
102
+ def TestCase(test_class):
103
+ new_bases = (ReferenceLeakCheckerMixin,) + test_class.__bases__
104
+ new_class = type(test_class)(
105
+ test_class.__name__, new_bases, dict(test_class.__dict__))
106
+ return new_class
107
+ SkipReferenceLeakChecker = unittest.skip
108
+
109
+ else:
110
+ # When PyDEBUG is not enabled, run the tests normally.
111
+
112
+ def TestCase(test_class):
113
+ return test_class
114
+
115
+ def SkipReferenceLeakChecker(reason):
116
+ del reason # Don't skip, so don't need a reason.
117
+ def Same(func):
118
+ return func
119
+ return Same
lib/python3.10/site-packages/google/protobuf/internal/type_checkers.py ADDED
@@ -0,0 +1,408 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Provides type checking routines.
9
+
10
+ This module defines type checking utilities in the forms of dictionaries:
11
+
12
+ VALUE_CHECKERS: A dictionary of field types and a value validation object.
13
+ TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
14
+ function.
15
+ TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
16
+ function.
17
+ FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
18
+ corresponding wire types.
19
+ TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
20
+ function.
21
+ """
22
+
23
+ __author__ = 'robinson@google.com (Will Robinson)'
24
+
25
+ import ctypes
26
+ import numbers
27
+
28
+ from google.protobuf.internal import decoder
29
+ from google.protobuf.internal import encoder
30
+ from google.protobuf.internal import wire_format
31
+ from google.protobuf import descriptor
32
+
33
+ _FieldDescriptor = descriptor.FieldDescriptor
34
+
35
+
36
+ def TruncateToFourByteFloat(original):
37
+ return ctypes.c_float(original).value
38
+
39
+
40
+ def ToShortestFloat(original):
41
+ """Returns the shortest float that has same value in wire."""
42
+ # All 4 byte floats have between 6 and 9 significant digits, so we
43
+ # start with 6 as the lower bound.
44
+ # It has to be iterative because use '.9g' directly can not get rid
45
+ # of the noises for most values. For example if set a float_field=0.9
46
+ # use '.9g' will print 0.899999976.
47
+ precision = 6
48
+ rounded = float('{0:.{1}g}'.format(original, precision))
49
+ while TruncateToFourByteFloat(rounded) != original:
50
+ precision += 1
51
+ rounded = float('{0:.{1}g}'.format(original, precision))
52
+ return rounded
53
+
54
+
55
+ def GetTypeChecker(field):
56
+ """Returns a type checker for a message field of the specified types.
57
+
58
+ Args:
59
+ field: FieldDescriptor object for this field.
60
+
61
+ Returns:
62
+ An instance of TypeChecker which can be used to verify the types
63
+ of values assigned to a field of the specified type.
64
+ """
65
+ if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
66
+ field.type == _FieldDescriptor.TYPE_STRING):
67
+ return UnicodeValueChecker()
68
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
69
+ if field.enum_type.is_closed:
70
+ return EnumValueChecker(field.enum_type)
71
+ else:
72
+ # When open enums are supported, any int32 can be assigned.
73
+ return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
74
+ return _VALUE_CHECKERS[field.cpp_type]
75
+
76
+
77
+ # None of the typecheckers below make any attempt to guard against people
78
+ # subclassing builtin types and doing weird things. We're not trying to
79
+ # protect against malicious clients here, just people accidentally shooting
80
+ # themselves in the foot in obvious ways.
81
+ class TypeChecker(object):
82
+
83
+ """Type checker used to catch type errors as early as possible
84
+ when the client is setting scalar fields in protocol messages.
85
+ """
86
+
87
+ def __init__(self, *acceptable_types):
88
+ self._acceptable_types = acceptable_types
89
+
90
+ def CheckValue(self, proposed_value):
91
+ """Type check the provided value and return it.
92
+
93
+ The returned value might have been normalized to another type.
94
+ """
95
+ if not isinstance(proposed_value, self._acceptable_types):
96
+ message = ('%.1024r has type %s, but expected one of: %s' %
97
+ (proposed_value, type(proposed_value), self._acceptable_types))
98
+ raise TypeError(message)
99
+ return proposed_value
100
+
101
+
102
+ class TypeCheckerWithDefault(TypeChecker):
103
+
104
+ def __init__(self, default_value, *acceptable_types):
105
+ TypeChecker.__init__(self, *acceptable_types)
106
+ self._default_value = default_value
107
+
108
+ def DefaultValue(self):
109
+ return self._default_value
110
+
111
+
112
+ class BoolValueChecker(object):
113
+ """Type checker used for bool fields."""
114
+
115
+ def CheckValue(self, proposed_value):
116
+ if not hasattr(proposed_value, '__index__') or (
117
+ type(proposed_value).__module__ == 'numpy' and
118
+ type(proposed_value).__name__ == 'ndarray'):
119
+ message = ('%.1024r has type %s, but expected one of: %s' %
120
+ (proposed_value, type(proposed_value), (bool, int)))
121
+ raise TypeError(message)
122
+ return bool(proposed_value)
123
+
124
+ def DefaultValue(self):
125
+ return False
126
+
127
+
128
+ # IntValueChecker and its subclasses perform integer type-checks
129
+ # and bounds-checks.
130
+ class IntValueChecker(object):
131
+
132
+ """Checker used for integer fields. Performs type-check and range check."""
133
+
134
+ def CheckValue(self, proposed_value):
135
+ if not hasattr(proposed_value, '__index__') or (
136
+ type(proposed_value).__module__ == 'numpy' and
137
+ type(proposed_value).__name__ == 'ndarray'):
138
+ message = ('%.1024r has type %s, but expected one of: %s' %
139
+ (proposed_value, type(proposed_value), (int,)))
140
+ raise TypeError(message)
141
+
142
+ if not self._MIN <= int(proposed_value) <= self._MAX:
143
+ raise ValueError('Value out of range: %d' % proposed_value)
144
+ # We force all values to int to make alternate implementations where the
145
+ # distinction is more significant (e.g. the C++ implementation) simpler.
146
+ proposed_value = int(proposed_value)
147
+ return proposed_value
148
+
149
+ def DefaultValue(self):
150
+ return 0
151
+
152
+
153
+ class EnumValueChecker(object):
154
+
155
+ """Checker used for enum fields. Performs type-check and range check."""
156
+
157
+ def __init__(self, enum_type):
158
+ self._enum_type = enum_type
159
+
160
+ def CheckValue(self, proposed_value):
161
+ if not isinstance(proposed_value, numbers.Integral):
162
+ message = ('%.1024r has type %s, but expected one of: %s' %
163
+ (proposed_value, type(proposed_value), (int,)))
164
+ raise TypeError(message)
165
+ if int(proposed_value) not in self._enum_type.values_by_number:
166
+ raise ValueError('Unknown enum value: %d' % proposed_value)
167
+ return proposed_value
168
+
169
+ def DefaultValue(self):
170
+ return self._enum_type.values[0].number
171
+
172
+
173
+ class UnicodeValueChecker(object):
174
+
175
+ """Checker used for string fields.
176
+
177
+ Always returns a unicode value, even if the input is of type str.
178
+ """
179
+
180
+ def CheckValue(self, proposed_value):
181
+ if not isinstance(proposed_value, (bytes, str)):
182
+ message = ('%.1024r has type %s, but expected one of: %s' %
183
+ (proposed_value, type(proposed_value), (bytes, str)))
184
+ raise TypeError(message)
185
+
186
+ # If the value is of type 'bytes' make sure that it is valid UTF-8 data.
187
+ if isinstance(proposed_value, bytes):
188
+ try:
189
+ proposed_value = proposed_value.decode('utf-8')
190
+ except UnicodeDecodeError:
191
+ raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
192
+ 'encoding. Non-UTF-8 strings must be converted to '
193
+ 'unicode objects before being added.' %
194
+ (proposed_value))
195
+ else:
196
+ try:
197
+ proposed_value.encode('utf8')
198
+ except UnicodeEncodeError:
199
+ raise ValueError('%.1024r isn\'t a valid unicode string and '
200
+ 'can\'t be encoded in UTF-8.'%
201
+ (proposed_value))
202
+
203
+ return proposed_value
204
+
205
+ def DefaultValue(self):
206
+ return u""
207
+
208
+
209
+ class Int32ValueChecker(IntValueChecker):
210
+ # We're sure to use ints instead of longs here since comparison may be more
211
+ # efficient.
212
+ _MIN = -2147483648
213
+ _MAX = 2147483647
214
+
215
+
216
+ class Uint32ValueChecker(IntValueChecker):
217
+ _MIN = 0
218
+ _MAX = (1 << 32) - 1
219
+
220
+
221
+ class Int64ValueChecker(IntValueChecker):
222
+ _MIN = -(1 << 63)
223
+ _MAX = (1 << 63) - 1
224
+
225
+
226
+ class Uint64ValueChecker(IntValueChecker):
227
+ _MIN = 0
228
+ _MAX = (1 << 64) - 1
229
+
230
+
231
+ # The max 4 bytes float is about 3.4028234663852886e+38
232
+ _FLOAT_MAX = float.fromhex('0x1.fffffep+127')
233
+ _FLOAT_MIN = -_FLOAT_MAX
234
+ _INF = float('inf')
235
+ _NEG_INF = float('-inf')
236
+
237
+
238
+ class DoubleValueChecker(object):
239
+ """Checker used for double fields.
240
+
241
+ Performs type-check and range check.
242
+ """
243
+
244
+ def CheckValue(self, proposed_value):
245
+ """Check and convert proposed_value to float."""
246
+ if (not hasattr(proposed_value, '__float__') and
247
+ not hasattr(proposed_value, '__index__')) or (
248
+ type(proposed_value).__module__ == 'numpy' and
249
+ type(proposed_value).__name__ == 'ndarray'):
250
+ message = ('%.1024r has type %s, but expected one of: int, float' %
251
+ (proposed_value, type(proposed_value)))
252
+ raise TypeError(message)
253
+ return float(proposed_value)
254
+
255
+ def DefaultValue(self):
256
+ return 0.0
257
+
258
+
259
+ class FloatValueChecker(DoubleValueChecker):
260
+ """Checker used for float fields.
261
+
262
+ Performs type-check and range check.
263
+
264
+ Values exceeding a 32-bit float will be converted to inf/-inf.
265
+ """
266
+
267
+ def CheckValue(self, proposed_value):
268
+ """Check and convert proposed_value to float."""
269
+ converted_value = super().CheckValue(proposed_value)
270
+ # This inf rounding matches the C++ proto SafeDoubleToFloat logic.
271
+ if converted_value > _FLOAT_MAX:
272
+ return _INF
273
+ if converted_value < _FLOAT_MIN:
274
+ return _NEG_INF
275
+
276
+ return TruncateToFourByteFloat(converted_value)
277
+
278
+ # Type-checkers for all scalar CPPTYPEs.
279
+ _VALUE_CHECKERS = {
280
+ _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
281
+ _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
282
+ _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
283
+ _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
284
+ _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(),
285
+ _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(),
286
+ _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(),
287
+ _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
288
+ }
289
+
290
+
291
+ # Map from field type to a function F, such that F(field_num, value)
292
+ # gives the total byte size for a value of the given type. This
293
+ # byte size includes tag information and any other additional space
294
+ # associated with serializing "value".
295
+ TYPE_TO_BYTE_SIZE_FN = {
296
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
297
+ _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
298
+ _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
299
+ _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
300
+ _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
301
+ _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
302
+ _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
303
+ _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
304
+ _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
305
+ _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
306
+ _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
307
+ _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
308
+ _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
309
+ _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
310
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
311
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
312
+ _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
313
+ _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
314
+ }
315
+
316
+
317
+ # Maps from field types to encoder constructors.
318
+ TYPE_TO_ENCODER = {
319
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
320
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
321
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
322
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
323
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
324
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
325
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
326
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
327
+ _FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
328
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
329
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
330
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
331
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
332
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
333
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
334
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
335
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
336
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
337
+ }
338
+
339
+
340
+ # Maps from field types to sizer constructors.
341
+ TYPE_TO_SIZER = {
342
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
343
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
344
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
345
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
346
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
347
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
348
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
349
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
350
+ _FieldDescriptor.TYPE_STRING: encoder.StringSizer,
351
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
352
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
353
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
354
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
355
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
356
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
357
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
358
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
359
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
360
+ }
361
+
362
+
363
+ # Maps from field type to a decoder constructor.
364
+ TYPE_TO_DECODER = {
365
+ _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
366
+ _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
367
+ _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
368
+ _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
369
+ _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
370
+ _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
371
+ _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
372
+ _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
373
+ _FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
374
+ _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
375
+ _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
376
+ _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
377
+ _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
378
+ _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
379
+ _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
380
+ _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
381
+ _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
382
+ _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
383
+ }
384
+
385
+ # Maps from field type to expected wiretype.
386
+ FIELD_TYPE_TO_WIRE_TYPE = {
387
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
388
+ _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
389
+ _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
390
+ _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
391
+ _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
392
+ _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
393
+ _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
394
+ _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
395
+ _FieldDescriptor.TYPE_STRING:
396
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
397
+ _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
398
+ _FieldDescriptor.TYPE_MESSAGE:
399
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
400
+ _FieldDescriptor.TYPE_BYTES:
401
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
402
+ _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
403
+ _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
404
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
405
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
406
+ _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
407
+ _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
408
+ }
lib/python3.10/site-packages/google/protobuf/internal/wire_format.py ADDED
@@ -0,0 +1,245 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Constants and static functions to support protocol buffer wire format."""
9
+
10
+ __author__ = 'robinson@google.com (Will Robinson)'
11
+
12
+ import struct
13
+ from google.protobuf import descriptor
14
+ from google.protobuf import message
15
+
16
+
17
+ TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
18
+ TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
19
+
20
+ # These numbers identify the wire type of a protocol buffer value.
21
+ # We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
22
+ # tag-and-type to store one of these WIRETYPE_* constants.
23
+ # These values must match WireType enum in //google/protobuf/wire_format.h.
24
+ WIRETYPE_VARINT = 0
25
+ WIRETYPE_FIXED64 = 1
26
+ WIRETYPE_LENGTH_DELIMITED = 2
27
+ WIRETYPE_START_GROUP = 3
28
+ WIRETYPE_END_GROUP = 4
29
+ WIRETYPE_FIXED32 = 5
30
+ _WIRETYPE_MAX = 5
31
+
32
+
33
+ # Bounds for various integer types.
34
+ INT32_MAX = int((1 << 31) - 1)
35
+ INT32_MIN = int(-(1 << 31))
36
+ UINT32_MAX = (1 << 32) - 1
37
+
38
+ INT64_MAX = (1 << 63) - 1
39
+ INT64_MIN = -(1 << 63)
40
+ UINT64_MAX = (1 << 64) - 1
41
+
42
+ # "struct" format strings that will encode/decode the specified formats.
43
+ FORMAT_UINT32_LITTLE_ENDIAN = '<I'
44
+ FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
45
+ FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
46
+ FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
47
+
48
+
49
+ # We'll have to provide alternate implementations of AppendLittleEndian*() on
50
+ # any architectures where these checks fail.
51
+ if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
52
+ raise AssertionError('Format "I" is not a 32-bit number.')
53
+ if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
54
+ raise AssertionError('Format "Q" is not a 64-bit number.')
55
+
56
+
57
+ def PackTag(field_number, wire_type):
58
+ """Returns an unsigned 32-bit integer that encodes the field number and
59
+ wire type information in standard protocol message wire format.
60
+
61
+ Args:
62
+ field_number: Expected to be an integer in the range [1, 1 << 29)
63
+ wire_type: One of the WIRETYPE_* constants.
64
+ """
65
+ if not 0 <= wire_type <= _WIRETYPE_MAX:
66
+ raise message.EncodeError('Unknown wire type: %d' % wire_type)
67
+ return (field_number << TAG_TYPE_BITS) | wire_type
68
+
69
+
70
+ def UnpackTag(tag):
71
+ """The inverse of PackTag(). Given an unsigned 32-bit number,
72
+ returns a (field_number, wire_type) tuple.
73
+ """
74
+ return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
75
+
76
+
77
+ def ZigZagEncode(value):
78
+ """ZigZag Transform: Encodes signed integers so that they can be
79
+ effectively used with varint encoding. See wire_format.h for
80
+ more details.
81
+ """
82
+ if value >= 0:
83
+ return value << 1
84
+ return (value << 1) ^ (~0)
85
+
86
+
87
+ def ZigZagDecode(value):
88
+ """Inverse of ZigZagEncode()."""
89
+ if not value & 0x1:
90
+ return value >> 1
91
+ return (value >> 1) ^ (~0)
92
+
93
+
94
+
95
+ # The *ByteSize() functions below return the number of bytes required to
96
+ # serialize "field number + type" information and then serialize the value.
97
+
98
+
99
+ def Int32ByteSize(field_number, int32):
100
+ return Int64ByteSize(field_number, int32)
101
+
102
+
103
+ def Int32ByteSizeNoTag(int32):
104
+ return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
105
+
106
+
107
+ def Int64ByteSize(field_number, int64):
108
+ # Have to convert to uint before calling UInt64ByteSize().
109
+ return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
110
+
111
+
112
+ def UInt32ByteSize(field_number, uint32):
113
+ return UInt64ByteSize(field_number, uint32)
114
+
115
+
116
+ def UInt64ByteSize(field_number, uint64):
117
+ return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
118
+
119
+
120
+ def SInt32ByteSize(field_number, int32):
121
+ return UInt32ByteSize(field_number, ZigZagEncode(int32))
122
+
123
+
124
+ def SInt64ByteSize(field_number, int64):
125
+ return UInt64ByteSize(field_number, ZigZagEncode(int64))
126
+
127
+
128
+ def Fixed32ByteSize(field_number, fixed32):
129
+ return TagByteSize(field_number) + 4
130
+
131
+
132
+ def Fixed64ByteSize(field_number, fixed64):
133
+ return TagByteSize(field_number) + 8
134
+
135
+
136
+ def SFixed32ByteSize(field_number, sfixed32):
137
+ return TagByteSize(field_number) + 4
138
+
139
+
140
+ def SFixed64ByteSize(field_number, sfixed64):
141
+ return TagByteSize(field_number) + 8
142
+
143
+
144
+ def FloatByteSize(field_number, flt):
145
+ return TagByteSize(field_number) + 4
146
+
147
+
148
+ def DoubleByteSize(field_number, double):
149
+ return TagByteSize(field_number) + 8
150
+
151
+
152
+ def BoolByteSize(field_number, b):
153
+ return TagByteSize(field_number) + 1
154
+
155
+
156
+ def EnumByteSize(field_number, enum):
157
+ return UInt32ByteSize(field_number, enum)
158
+
159
+
160
+ def StringByteSize(field_number, string):
161
+ return BytesByteSize(field_number, string.encode('utf-8'))
162
+
163
+
164
+ def BytesByteSize(field_number, b):
165
+ return (TagByteSize(field_number)
166
+ + _VarUInt64ByteSizeNoTag(len(b))
167
+ + len(b))
168
+
169
+
170
+ def GroupByteSize(field_number, message):
171
+ return (2 * TagByteSize(field_number) # START and END group.
172
+ + message.ByteSize())
173
+
174
+
175
+ def MessageByteSize(field_number, message):
176
+ return (TagByteSize(field_number)
177
+ + _VarUInt64ByteSizeNoTag(message.ByteSize())
178
+ + message.ByteSize())
179
+
180
+
181
+ def MessageSetItemByteSize(field_number, msg):
182
+ # First compute the sizes of the tags.
183
+ # There are 2 tags for the beginning and ending of the repeated group, that
184
+ # is field number 1, one with field number 2 (type_id) and one with field
185
+ # number 3 (message).
186
+ total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
187
+
188
+ # Add the number of bytes for type_id.
189
+ total_size += _VarUInt64ByteSizeNoTag(field_number)
190
+
191
+ message_size = msg.ByteSize()
192
+
193
+ # The number of bytes for encoding the length of the message.
194
+ total_size += _VarUInt64ByteSizeNoTag(message_size)
195
+
196
+ # The size of the message.
197
+ total_size += message_size
198
+ return total_size
199
+
200
+
201
+ def TagByteSize(field_number):
202
+ """Returns the bytes required to serialize a tag with this field number."""
203
+ # Just pass in type 0, since the type won't affect the tag+type size.
204
+ return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
205
+
206
+
207
+ # Private helper function for the *ByteSize() functions above.
208
+
209
+ def _VarUInt64ByteSizeNoTag(uint64):
210
+ """Returns the number of bytes required to serialize a single varint
211
+ using boundary value comparisons. (unrolled loop optimization -WPierce)
212
+ uint64 must be unsigned.
213
+ """
214
+ if uint64 <= 0x7f: return 1
215
+ if uint64 <= 0x3fff: return 2
216
+ if uint64 <= 0x1fffff: return 3
217
+ if uint64 <= 0xfffffff: return 4
218
+ if uint64 <= 0x7ffffffff: return 5
219
+ if uint64 <= 0x3ffffffffff: return 6
220
+ if uint64 <= 0x1ffffffffffff: return 7
221
+ if uint64 <= 0xffffffffffffff: return 8
222
+ if uint64 <= 0x7fffffffffffffff: return 9
223
+ if uint64 > UINT64_MAX:
224
+ raise message.EncodeError('Value out of range: %d' % uint64)
225
+ return 10
226
+
227
+
228
+ NON_PACKABLE_TYPES = (
229
+ descriptor.FieldDescriptor.TYPE_STRING,
230
+ descriptor.FieldDescriptor.TYPE_GROUP,
231
+ descriptor.FieldDescriptor.TYPE_MESSAGE,
232
+ descriptor.FieldDescriptor.TYPE_BYTES
233
+ )
234
+
235
+
236
+ def IsTypePackable(field_type):
237
+ """Return true iff packable = true is valid for fields of this type.
238
+
239
+ Args:
240
+ field_type: a FieldDescriptor::Type value.
241
+
242
+ Returns:
243
+ True iff fields of this type are packable.
244
+ """
245
+ return field_type not in NON_PACKABLE_TYPES
lib/python3.10/site-packages/grpc/__init__.py ADDED
@@ -0,0 +1,2348 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015-2016 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """gRPC's Python API."""
15
+
16
+ import abc
17
+ import contextlib
18
+ import enum
19
+ import logging
20
+ import sys
21
+
22
+ from grpc import _compression
23
+ from grpc._cython import cygrpc as _cygrpc
24
+ from grpc._runtime_protos import protos
25
+ from grpc._runtime_protos import protos_and_services
26
+ from grpc._runtime_protos import services
27
+
28
+ logging.getLogger(__name__).addHandler(logging.NullHandler())
29
+
30
+ try:
31
+ # pylint: disable=ungrouped-imports
32
+ from grpc._grpcio_metadata import __version__
33
+ except ImportError:
34
+ __version__ = "dev0"
35
+
36
+ ############################## Future Interface ###############################
37
+
38
+
39
+ class FutureTimeoutError(Exception):
40
+ """Indicates that a method call on a Future timed out."""
41
+
42
+
43
+ class FutureCancelledError(Exception):
44
+ """Indicates that the computation underlying a Future was cancelled."""
45
+
46
+
47
+ class Future(abc.ABC):
48
+ """A representation of a computation in another control flow.
49
+
50
+ Computations represented by a Future may be yet to be begun,
51
+ may be ongoing, or may have already completed.
52
+ """
53
+
54
+ @abc.abstractmethod
55
+ def cancel(self):
56
+ """Attempts to cancel the computation.
57
+
58
+ This method does not block.
59
+
60
+ Returns:
61
+ bool:
62
+ Returns True if the computation was canceled.
63
+
64
+ Returns False under all other circumstances, for example:
65
+
66
+ 1. computation has begun and could not be canceled.
67
+ 2. computation has finished
68
+ 3. computation is scheduled for execution and it is impossible
69
+ to determine its state without blocking.
70
+ """
71
+ raise NotImplementedError()
72
+
73
+ @abc.abstractmethod
74
+ def cancelled(self):
75
+ """Describes whether the computation was cancelled.
76
+
77
+ This method does not block.
78
+
79
+ Returns:
80
+ bool:
81
+ Returns True if the computation was cancelled before its result became
82
+ available.
83
+
84
+ Returns False under all other circumstances, for example:
85
+
86
+ 1. computation was not cancelled.
87
+ 2. computation's result is available.
88
+ """
89
+ raise NotImplementedError()
90
+
91
+ @abc.abstractmethod
92
+ def running(self):
93
+ """Describes whether the computation is taking place.
94
+
95
+ This method does not block.
96
+
97
+ Returns:
98
+ Returns True if the computation is scheduled for execution or
99
+ currently executing.
100
+
101
+ Returns False if the computation already executed or was cancelled.
102
+ """
103
+ raise NotImplementedError()
104
+
105
+ @abc.abstractmethod
106
+ def done(self):
107
+ """Describes whether the computation has taken place.
108
+
109
+ This method does not block.
110
+
111
+ Returns:
112
+ bool:
113
+ Returns True if the computation already executed or was cancelled.
114
+ Returns False if the computation is scheduled for execution or
115
+ currently executing.
116
+ This is exactly opposite of the running() method's result.
117
+ """
118
+ raise NotImplementedError()
119
+
120
+ @abc.abstractmethod
121
+ def result(self, timeout=None):
122
+ """Returns the result of the computation or raises its exception.
123
+
124
+ This method may return immediately or may block.
125
+
126
+ Args:
127
+ timeout: The length of time in seconds to wait for the computation to
128
+ finish or be cancelled. If None, the call will block until the
129
+ computations's termination.
130
+
131
+ Returns:
132
+ The return value of the computation.
133
+
134
+ Raises:
135
+ FutureTimeoutError: If a timeout value is passed and the computation
136
+ does not terminate within the allotted time.
137
+ FutureCancelledError: If the computation was cancelled.
138
+ Exception: If the computation raised an exception, this call will
139
+ raise the same exception.
140
+ """
141
+ raise NotImplementedError()
142
+
143
+ @abc.abstractmethod
144
+ def exception(self, timeout=None):
145
+ """Return the exception raised by the computation.
146
+
147
+ This method may return immediately or may block.
148
+
149
+ Args:
150
+ timeout: The length of time in seconds to wait for the computation to
151
+ terminate or be cancelled. If None, the call will block until the
152
+ computations's termination.
153
+
154
+ Returns:
155
+ The exception raised by the computation, or None if the computation
156
+ did not raise an exception.
157
+
158
+ Raises:
159
+ FutureTimeoutError: If a timeout value is passed and the computation
160
+ does not terminate within the allotted time.
161
+ FutureCancelledError: If the computation was cancelled.
162
+ """
163
+ raise NotImplementedError()
164
+
165
+ @abc.abstractmethod
166
+ def traceback(self, timeout=None):
167
+ """Access the traceback of the exception raised by the computation.
168
+
169
+ This method may return immediately or may block.
170
+
171
+ Args:
172
+ timeout: The length of time in seconds to wait for the computation
173
+ to terminate or be cancelled. If None, the call will block until
174
+ the computation's termination.
175
+
176
+ Returns:
177
+ The traceback of the exception raised by the computation, or None
178
+ if the computation did not raise an exception.
179
+
180
+ Raises:
181
+ FutureTimeoutError: If a timeout value is passed and the computation
182
+ does not terminate within the allotted time.
183
+ FutureCancelledError: If the computation was cancelled.
184
+ """
185
+ raise NotImplementedError()
186
+
187
+ @abc.abstractmethod
188
+ def add_done_callback(self, fn):
189
+ """Adds a function to be called at completion of the computation.
190
+
191
+ The callback will be passed this Future object describing the outcome
192
+ of the computation. Callbacks will be invoked after the future is
193
+ terminated, whether successfully or not.
194
+
195
+ If the computation has already completed, the callback will be called
196
+ immediately.
197
+
198
+ Exceptions raised in the callback will be logged at ERROR level, but
199
+ will not terminate any threads of execution.
200
+
201
+ Args:
202
+ fn: A callable taking this Future object as its single parameter.
203
+ """
204
+ raise NotImplementedError()
205
+
206
+
207
+ ################################ gRPC Enums ##################################
208
+
209
+
210
+ @enum.unique
211
+ class ChannelConnectivity(enum.Enum):
212
+ """Mirrors grpc_connectivity_state in the gRPC Core.
213
+
214
+ Attributes:
215
+ IDLE: The channel is idle.
216
+ CONNECTING: The channel is connecting.
217
+ READY: The channel is ready to conduct RPCs.
218
+ TRANSIENT_FAILURE: The channel has seen a failure from which it expects
219
+ to recover.
220
+ SHUTDOWN: The channel has seen a failure from which it cannot recover.
221
+ """
222
+
223
+ IDLE = (_cygrpc.ConnectivityState.idle, "idle")
224
+ CONNECTING = (_cygrpc.ConnectivityState.connecting, "connecting")
225
+ READY = (_cygrpc.ConnectivityState.ready, "ready")
226
+ TRANSIENT_FAILURE = (
227
+ _cygrpc.ConnectivityState.transient_failure,
228
+ "transient failure",
229
+ )
230
+ SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, "shutdown")
231
+
232
+
233
+ @enum.unique
234
+ class StatusCode(enum.Enum):
235
+ """Mirrors grpc_status_code in the gRPC Core.
236
+
237
+ Attributes:
238
+ OK: Not an error; returned on success
239
+ CANCELLED: The operation was cancelled (typically by the caller).
240
+ UNKNOWN: Unknown error.
241
+ INVALID_ARGUMENT: Client specified an invalid argument.
242
+ DEADLINE_EXCEEDED: Deadline expired before operation could complete.
243
+ NOT_FOUND: Some requested entity (e.g., file or directory) was not found.
244
+ ALREADY_EXISTS: Some entity that we attempted to create (e.g., file or directory)
245
+ already exists.
246
+ PERMISSION_DENIED: The caller does not have permission to execute the specified
247
+ operation.
248
+ UNAUTHENTICATED: The request does not have valid authentication credentials for the
249
+ operation.
250
+ RESOURCE_EXHAUSTED: Some resource has been exhausted, perhaps a per-user quota, or
251
+ perhaps the entire file system is out of space.
252
+ FAILED_PRECONDITION: Operation was rejected because the system is not in a state
253
+ required for the operation's execution.
254
+ ABORTED: The operation was aborted, typically due to a concurrency issue
255
+ like sequencer check failures, transaction aborts, etc.
256
+ UNIMPLEMENTED: Operation is not implemented or not supported/enabled in this service.
257
+ INTERNAL: Internal errors. Means some invariants expected by underlying
258
+ system has been broken.
259
+ UNAVAILABLE: The service is currently unavailable.
260
+ DATA_LOSS: Unrecoverable data loss or corruption.
261
+ """
262
+
263
+ OK = (_cygrpc.StatusCode.ok, "ok")
264
+ CANCELLED = (_cygrpc.StatusCode.cancelled, "cancelled")
265
+ UNKNOWN = (_cygrpc.StatusCode.unknown, "unknown")
266
+ INVALID_ARGUMENT = (_cygrpc.StatusCode.invalid_argument, "invalid argument")
267
+ DEADLINE_EXCEEDED = (
268
+ _cygrpc.StatusCode.deadline_exceeded,
269
+ "deadline exceeded",
270
+ )
271
+ NOT_FOUND = (_cygrpc.StatusCode.not_found, "not found")
272
+ ALREADY_EXISTS = (_cygrpc.StatusCode.already_exists, "already exists")
273
+ PERMISSION_DENIED = (
274
+ _cygrpc.StatusCode.permission_denied,
275
+ "permission denied",
276
+ )
277
+ RESOURCE_EXHAUSTED = (
278
+ _cygrpc.StatusCode.resource_exhausted,
279
+ "resource exhausted",
280
+ )
281
+ FAILED_PRECONDITION = (
282
+ _cygrpc.StatusCode.failed_precondition,
283
+ "failed precondition",
284
+ )
285
+ ABORTED = (_cygrpc.StatusCode.aborted, "aborted")
286
+ OUT_OF_RANGE = (_cygrpc.StatusCode.out_of_range, "out of range")
287
+ UNIMPLEMENTED = (_cygrpc.StatusCode.unimplemented, "unimplemented")
288
+ INTERNAL = (_cygrpc.StatusCode.internal, "internal")
289
+ UNAVAILABLE = (_cygrpc.StatusCode.unavailable, "unavailable")
290
+ DATA_LOSS = (_cygrpc.StatusCode.data_loss, "data loss")
291
+ UNAUTHENTICATED = (_cygrpc.StatusCode.unauthenticated, "unauthenticated")
292
+
293
+
294
+ ############################# gRPC Status ################################
295
+
296
+
297
+ class Status(abc.ABC):
298
+ """Describes the status of an RPC.
299
+
300
+ This is an EXPERIMENTAL API.
301
+
302
+ Attributes:
303
+ code: A StatusCode object to be sent to the client.
304
+ details: A UTF-8-encodable string to be sent to the client upon
305
+ termination of the RPC.
306
+ trailing_metadata: The trailing :term:`metadata` in the RPC.
307
+ """
308
+
309
+
310
+ ############################# gRPC Exceptions ################################
311
+
312
+
313
+ class RpcError(Exception):
314
+ """Raised by the gRPC library to indicate non-OK-status RPC termination."""
315
+
316
+
317
+ ############################## Shared Context ################################
318
+
319
+
320
+ class RpcContext(abc.ABC):
321
+ """Provides RPC-related information and action."""
322
+
323
+ @abc.abstractmethod
324
+ def is_active(self):
325
+ """Describes whether the RPC is active or has terminated.
326
+
327
+ Returns:
328
+ bool:
329
+ True if RPC is active, False otherwise.
330
+ """
331
+ raise NotImplementedError()
332
+
333
+ @abc.abstractmethod
334
+ def time_remaining(self):
335
+ """Describes the length of allowed time remaining for the RPC.
336
+
337
+ Returns:
338
+ A nonnegative float indicating the length of allowed time in seconds
339
+ remaining for the RPC to complete before it is considered to have
340
+ timed out, or None if no deadline was specified for the RPC.
341
+ """
342
+ raise NotImplementedError()
343
+
344
+ @abc.abstractmethod
345
+ def cancel(self):
346
+ """Cancels the RPC.
347
+
348
+ Idempotent and has no effect if the RPC has already terminated.
349
+ """
350
+ raise NotImplementedError()
351
+
352
+ @abc.abstractmethod
353
+ def add_callback(self, callback):
354
+ """Registers a callback to be called on RPC termination.
355
+
356
+ Args:
357
+ callback: A no-parameter callable to be called on RPC termination.
358
+
359
+ Returns:
360
+ True if the callback was added and will be called later; False if
361
+ the callback was not added and will not be called (because the RPC
362
+ already terminated or some other reason).
363
+ """
364
+ raise NotImplementedError()
365
+
366
+
367
+ ######################### Invocation-Side Context ############################
368
+
369
+
370
+ class Call(RpcContext, metaclass=abc.ABCMeta):
371
+ """Invocation-side utility object for an RPC."""
372
+
373
+ @abc.abstractmethod
374
+ def initial_metadata(self):
375
+ """Accesses the initial metadata sent by the server.
376
+
377
+ This method blocks until the value is available.
378
+
379
+ Returns:
380
+ The initial :term:`metadata`.
381
+ """
382
+ raise NotImplementedError()
383
+
384
+ @abc.abstractmethod
385
+ def trailing_metadata(self):
386
+ """Accesses the trailing metadata sent by the server.
387
+
388
+ This method blocks until the value is available.
389
+
390
+ Returns:
391
+ The trailing :term:`metadata`.
392
+ """
393
+ raise NotImplementedError()
394
+
395
+ @abc.abstractmethod
396
+ def code(self):
397
+ """Accesses the status code sent by the server.
398
+
399
+ This method blocks until the value is available.
400
+
401
+ Returns:
402
+ The StatusCode value for the RPC.
403
+ """
404
+ raise NotImplementedError()
405
+
406
+ @abc.abstractmethod
407
+ def details(self):
408
+ """Accesses the details sent by the server.
409
+
410
+ This method blocks until the value is available.
411
+
412
+ Returns:
413
+ The details string of the RPC.
414
+ """
415
+ raise NotImplementedError()
416
+
417
+
418
+ ############## Invocation-Side Interceptor Interfaces & Classes ##############
419
+
420
+
421
+ class ClientCallDetails(abc.ABC):
422
+ """Describes an RPC to be invoked.
423
+
424
+ Attributes:
425
+ method: The method name of the RPC.
426
+ timeout: An optional duration of time in seconds to allow for the RPC.
427
+ metadata: Optional :term:`metadata` to be transmitted to
428
+ the service-side of the RPC.
429
+ credentials: An optional CallCredentials for the RPC.
430
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
431
+ compression: An element of grpc.compression, e.g.
432
+ grpc.compression.Gzip.
433
+ """
434
+
435
+
436
+ class UnaryUnaryClientInterceptor(abc.ABC):
437
+ """Affords intercepting unary-unary invocations."""
438
+
439
+ @abc.abstractmethod
440
+ def intercept_unary_unary(self, continuation, client_call_details, request):
441
+ """Intercepts a unary-unary invocation asynchronously.
442
+
443
+ Args:
444
+ continuation: A function that proceeds with the invocation by
445
+ executing the next interceptor in chain or invoking the
446
+ actual RPC on the underlying Channel. It is the interceptor's
447
+ responsibility to call it if it decides to move the RPC forward.
448
+ The interceptor can use
449
+ `response_future = continuation(client_call_details, request)`
450
+ to continue with the RPC. `continuation` returns an object that is
451
+ both a Call for the RPC and a Future. In the event of RPC
452
+ completion, the return Call-Future's result value will be
453
+ the response message of the RPC. Should the event terminate
454
+ with non-OK status, the returned Call-Future's exception value
455
+ will be an RpcError.
456
+ client_call_details: A ClientCallDetails object describing the
457
+ outgoing RPC.
458
+ request: The request value for the RPC.
459
+
460
+ Returns:
461
+ An object that is both a Call for the RPC and a Future.
462
+ In the event of RPC completion, the return Call-Future's
463
+ result value will be the response message of the RPC.
464
+ Should the event terminate with non-OK status, the returned
465
+ Call-Future's exception value will be an RpcError.
466
+ """
467
+ raise NotImplementedError()
468
+
469
+
470
+ class UnaryStreamClientInterceptor(abc.ABC):
471
+ """Affords intercepting unary-stream invocations."""
472
+
473
+ @abc.abstractmethod
474
+ def intercept_unary_stream(
475
+ self, continuation, client_call_details, request
476
+ ):
477
+ """Intercepts a unary-stream invocation.
478
+
479
+ Args:
480
+ continuation: A function that proceeds with the invocation by
481
+ executing the next interceptor in chain or invoking the
482
+ actual RPC on the underlying Channel. It is the interceptor's
483
+ responsibility to call it if it decides to move the RPC forward.
484
+ The interceptor can use
485
+ `response_iterator = continuation(client_call_details, request)`
486
+ to continue with the RPC. `continuation` returns an object that is
487
+ both a Call for the RPC and an iterator for response values.
488
+ Drawing response values from the returned Call-iterator may
489
+ raise RpcError indicating termination of the RPC with non-OK
490
+ status.
491
+ client_call_details: A ClientCallDetails object describing the
492
+ outgoing RPC.
493
+ request: The request value for the RPC.
494
+
495
+ Returns:
496
+ An object that is both a Call for the RPC and an iterator of
497
+ response values. Drawing response values from the returned
498
+ Call-iterator may raise RpcError indicating termination of
499
+ the RPC with non-OK status. This object *should* also fulfill the
500
+ Future interface, though it may not.
501
+ """
502
+ raise NotImplementedError()
503
+
504
+
505
+ class StreamUnaryClientInterceptor(abc.ABC):
506
+ """Affords intercepting stream-unary invocations."""
507
+
508
+ @abc.abstractmethod
509
+ def intercept_stream_unary(
510
+ self, continuation, client_call_details, request_iterator
511
+ ):
512
+ """Intercepts a stream-unary invocation asynchronously.
513
+
514
+ Args:
515
+ continuation: A function that proceeds with the invocation by
516
+ executing the next interceptor in chain or invoking the
517
+ actual RPC on the underlying Channel. It is the interceptor's
518
+ responsibility to call it if it decides to move the RPC forward.
519
+ The interceptor can use
520
+ `response_future = continuation(client_call_details, request_iterator)`
521
+ to continue with the RPC. `continuation` returns an object that is
522
+ both a Call for the RPC and a Future. In the event of RPC completion,
523
+ the return Call-Future's result value will be the response message
524
+ of the RPC. Should the event terminate with non-OK status, the
525
+ returned Call-Future's exception value will be an RpcError.
526
+ client_call_details: A ClientCallDetails object describing the
527
+ outgoing RPC.
528
+ request_iterator: An iterator that yields request values for the RPC.
529
+
530
+ Returns:
531
+ An object that is both a Call for the RPC and a Future.
532
+ In the event of RPC completion, the return Call-Future's
533
+ result value will be the response message of the RPC.
534
+ Should the event terminate with non-OK status, the returned
535
+ Call-Future's exception value will be an RpcError.
536
+ """
537
+ raise NotImplementedError()
538
+
539
+
540
+ class StreamStreamClientInterceptor(abc.ABC):
541
+ """Affords intercepting stream-stream invocations."""
542
+
543
+ @abc.abstractmethod
544
+ def intercept_stream_stream(
545
+ self, continuation, client_call_details, request_iterator
546
+ ):
547
+ """Intercepts a stream-stream invocation.
548
+
549
+ Args:
550
+ continuation: A function that proceeds with the invocation by
551
+ executing the next interceptor in chain or invoking the
552
+ actual RPC on the underlying Channel. It is the interceptor's
553
+ responsibility to call it if it decides to move the RPC forward.
554
+ The interceptor can use
555
+ `response_iterator = continuation(client_call_details, request_iterator)`
556
+ to continue with the RPC. `continuation` returns an object that is
557
+ both a Call for the RPC and an iterator for response values.
558
+ Drawing response values from the returned Call-iterator may
559
+ raise RpcError indicating termination of the RPC with non-OK
560
+ status.
561
+ client_call_details: A ClientCallDetails object describing the
562
+ outgoing RPC.
563
+ request_iterator: An iterator that yields request values for the RPC.
564
+
565
+ Returns:
566
+ An object that is both a Call for the RPC and an iterator of
567
+ response values. Drawing response values from the returned
568
+ Call-iterator may raise RpcError indicating termination of
569
+ the RPC with non-OK status. This object *should* also fulfill the
570
+ Future interface, though it may not.
571
+ """
572
+ raise NotImplementedError()
573
+
574
+
575
+ ############ Authentication & Authorization Interfaces & Classes #############
576
+
577
+
578
+ class ChannelCredentials(object):
579
+ """An encapsulation of the data required to create a secure Channel.
580
+
581
+ This class has no supported interface - it exists to define the type of its
582
+ instances and its instances exist to be passed to other functions. For
583
+ example, ssl_channel_credentials returns an instance of this class and
584
+ secure_channel requires an instance of this class.
585
+ """
586
+
587
+ def __init__(self, credentials):
588
+ self._credentials = credentials
589
+
590
+
591
+ class CallCredentials(object):
592
+ """An encapsulation of the data required to assert an identity over a call.
593
+
594
+ A CallCredentials has to be used with secure Channel, otherwise the
595
+ metadata will not be transmitted to the server.
596
+
597
+ A CallCredentials may be composed with ChannelCredentials to always assert
598
+ identity for every call over that Channel.
599
+
600
+ This class has no supported interface - it exists to define the type of its
601
+ instances and its instances exist to be passed to other functions.
602
+ """
603
+
604
+ def __init__(self, credentials):
605
+ self._credentials = credentials
606
+
607
+
608
+ class AuthMetadataContext(abc.ABC):
609
+ """Provides information to call credentials metadata plugins.
610
+
611
+ Attributes:
612
+ service_url: A string URL of the service being called into.
613
+ method_name: A string of the fully qualified method name being called.
614
+ """
615
+
616
+
617
+ class AuthMetadataPluginCallback(abc.ABC):
618
+ """Callback object received by a metadata plugin."""
619
+
620
+ def __call__(self, metadata, error):
621
+ """Passes to the gRPC runtime authentication metadata for an RPC.
622
+
623
+ Args:
624
+ metadata: The :term:`metadata` used to construct the CallCredentials.
625
+ error: An Exception to indicate error or None to indicate success.
626
+ """
627
+ raise NotImplementedError()
628
+
629
+
630
+ class AuthMetadataPlugin(abc.ABC):
631
+ """A specification for custom authentication."""
632
+
633
+ def __call__(self, context, callback):
634
+ """Implements authentication by passing metadata to a callback.
635
+
636
+ This method will be invoked asynchronously in a separate thread.
637
+
638
+ Args:
639
+ context: An AuthMetadataContext providing information on the RPC that
640
+ the plugin is being called to authenticate.
641
+ callback: An AuthMetadataPluginCallback to be invoked either
642
+ synchronously or asynchronously.
643
+ """
644
+ raise NotImplementedError()
645
+
646
+
647
+ class ServerCredentials(object):
648
+ """An encapsulation of the data required to open a secure port on a Server.
649
+
650
+ This class has no supported interface - it exists to define the type of its
651
+ instances and its instances exist to be passed to other functions.
652
+ """
653
+
654
+ def __init__(self, credentials):
655
+ self._credentials = credentials
656
+
657
+
658
+ class ServerCertificateConfiguration(object):
659
+ """A certificate configuration for use with an SSL-enabled Server.
660
+
661
+ Instances of this class can be returned in the certificate configuration
662
+ fetching callback.
663
+
664
+ This class has no supported interface -- it exists to define the
665
+ type of its instances and its instances exist to be passed to
666
+ other functions.
667
+ """
668
+
669
+ def __init__(self, certificate_configuration):
670
+ self._certificate_configuration = certificate_configuration
671
+
672
+
673
+ ######################## Multi-Callable Interfaces ###########################
674
+
675
+
676
+ class UnaryUnaryMultiCallable(abc.ABC):
677
+ """Affords invoking a unary-unary RPC from client-side."""
678
+
679
+ @abc.abstractmethod
680
+ def __call__(
681
+ self,
682
+ request,
683
+ timeout=None,
684
+ metadata=None,
685
+ credentials=None,
686
+ wait_for_ready=None,
687
+ compression=None,
688
+ ):
689
+ """Synchronously invokes the underlying RPC.
690
+
691
+ Args:
692
+ request: The request value for the RPC.
693
+ timeout: An optional duration of time in seconds to allow
694
+ for the RPC.
695
+ metadata: Optional :term:`metadata` to be transmitted to the
696
+ service-side of the RPC.
697
+ credentials: An optional CallCredentials for the RPC. Only valid for
698
+ secure Channel.
699
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
700
+ compression: An element of grpc.compression, e.g.
701
+ grpc.compression.Gzip.
702
+
703
+ Returns:
704
+ The response value for the RPC.
705
+
706
+ Raises:
707
+ RpcError: Indicating that the RPC terminated with non-OK status. The
708
+ raised RpcError will also be a Call for the RPC affording the RPC's
709
+ metadata, status code, and details.
710
+ """
711
+ raise NotImplementedError()
712
+
713
+ @abc.abstractmethod
714
+ def with_call(
715
+ self,
716
+ request,
717
+ timeout=None,
718
+ metadata=None,
719
+ credentials=None,
720
+ wait_for_ready=None,
721
+ compression=None,
722
+ ):
723
+ """Synchronously invokes the underlying RPC.
724
+
725
+ Args:
726
+ request: The request value for the RPC.
727
+ timeout: An optional durating of time in seconds to allow for
728
+ the RPC.
729
+ metadata: Optional :term:`metadata` to be transmitted to the
730
+ service-side of the RPC.
731
+ credentials: An optional CallCredentials for the RPC. Only valid for
732
+ secure Channel.
733
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
734
+ compression: An element of grpc.compression, e.g.
735
+ grpc.compression.Gzip.
736
+
737
+ Returns:
738
+ The response value for the RPC and a Call value for the RPC.
739
+
740
+ Raises:
741
+ RpcError: Indicating that the RPC terminated with non-OK status. The
742
+ raised RpcError will also be a Call for the RPC affording the RPC's
743
+ metadata, status code, and details.
744
+ """
745
+ raise NotImplementedError()
746
+
747
+ @abc.abstractmethod
748
+ def future(
749
+ self,
750
+ request,
751
+ timeout=None,
752
+ metadata=None,
753
+ credentials=None,
754
+ wait_for_ready=None,
755
+ compression=None,
756
+ ):
757
+ """Asynchronously invokes the underlying RPC.
758
+
759
+ Args:
760
+ request: The request value for the RPC.
761
+ timeout: An optional duration of time in seconds to allow for
762
+ the RPC.
763
+ metadata: Optional :term:`metadata` to be transmitted to the
764
+ service-side of the RPC.
765
+ credentials: An optional CallCredentials for the RPC. Only valid for
766
+ secure Channel.
767
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
768
+ compression: An element of grpc.compression, e.g.
769
+ grpc.compression.Gzip.
770
+
771
+ Returns:
772
+ An object that is both a Call for the RPC and a Future.
773
+ In the event of RPC completion, the return Call-Future's result
774
+ value will be the response message of the RPC.
775
+ Should the event terminate with non-OK status,
776
+ the returned Call-Future's exception value will be an RpcError.
777
+ """
778
+ raise NotImplementedError()
779
+
780
+
781
+ class UnaryStreamMultiCallable(abc.ABC):
782
+ """Affords invoking a unary-stream RPC from client-side."""
783
+
784
+ @abc.abstractmethod
785
+ def __call__(
786
+ self,
787
+ request,
788
+ timeout=None,
789
+ metadata=None,
790
+ credentials=None,
791
+ wait_for_ready=None,
792
+ compression=None,
793
+ ):
794
+ """Invokes the underlying RPC.
795
+
796
+ Args:
797
+ request: The request value for the RPC.
798
+ timeout: An optional duration of time in seconds to allow for
799
+ the RPC. If None, the timeout is considered infinite.
800
+ metadata: An optional :term:`metadata` to be transmitted to the
801
+ service-side of the RPC.
802
+ credentials: An optional CallCredentials for the RPC. Only valid for
803
+ secure Channel.
804
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
805
+ compression: An element of grpc.compression, e.g.
806
+ grpc.compression.Gzip.
807
+
808
+ Returns:
809
+ An object that is a Call for the RPC, an iterator of response
810
+ values, and a Future for the RPC. Drawing response values from the
811
+ returned Call-iterator may raise RpcError indicating termination of
812
+ the RPC with non-OK status.
813
+ """
814
+ raise NotImplementedError()
815
+
816
+
817
+ class StreamUnaryMultiCallable(abc.ABC):
818
+ """Affords invoking a stream-unary RPC from client-side."""
819
+
820
+ @abc.abstractmethod
821
+ def __call__(
822
+ self,
823
+ request_iterator,
824
+ timeout=None,
825
+ metadata=None,
826
+ credentials=None,
827
+ wait_for_ready=None,
828
+ compression=None,
829
+ ):
830
+ """Synchronously invokes the underlying RPC.
831
+
832
+ Args:
833
+ request_iterator: An iterator that yields request values for
834
+ the RPC.
835
+ timeout: An optional duration of time in seconds to allow for
836
+ the RPC. If None, the timeout is considered infinite.
837
+ metadata: Optional :term:`metadata` to be transmitted to the
838
+ service-side of the RPC.
839
+ credentials: An optional CallCredentials for the RPC. Only valid for
840
+ secure Channel.
841
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
842
+ compression: An element of grpc.compression, e.g.
843
+ grpc.compression.Gzip.
844
+
845
+ Returns:
846
+ The response value for the RPC.
847
+
848
+ Raises:
849
+ RpcError: Indicating that the RPC terminated with non-OK status. The
850
+ raised RpcError will also implement grpc.Call, affording methods
851
+ such as metadata, code, and details.
852
+ """
853
+ raise NotImplementedError()
854
+
855
+ @abc.abstractmethod
856
+ def with_call(
857
+ self,
858
+ request_iterator,
859
+ timeout=None,
860
+ metadata=None,
861
+ credentials=None,
862
+ wait_for_ready=None,
863
+ compression=None,
864
+ ):
865
+ """Synchronously invokes the underlying RPC on the client.
866
+
867
+ Args:
868
+ request_iterator: An iterator that yields request values for
869
+ the RPC.
870
+ timeout: An optional duration of time in seconds to allow for
871
+ the RPC. If None, the timeout is considered infinite.
872
+ metadata: Optional :term:`metadata` to be transmitted to the
873
+ service-side of the RPC.
874
+ credentials: An optional CallCredentials for the RPC. Only valid for
875
+ secure Channel.
876
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
877
+ compression: An element of grpc.compression, e.g.
878
+ grpc.compression.Gzip.
879
+
880
+ Returns:
881
+ The response value for the RPC and a Call object for the RPC.
882
+
883
+ Raises:
884
+ RpcError: Indicating that the RPC terminated with non-OK status. The
885
+ raised RpcError will also be a Call for the RPC affording the RPC's
886
+ metadata, status code, and details.
887
+ """
888
+ raise NotImplementedError()
889
+
890
+ @abc.abstractmethod
891
+ def future(
892
+ self,
893
+ request_iterator,
894
+ timeout=None,
895
+ metadata=None,
896
+ credentials=None,
897
+ wait_for_ready=None,
898
+ compression=None,
899
+ ):
900
+ """Asynchronously invokes the underlying RPC on the client.
901
+
902
+ Args:
903
+ request_iterator: An iterator that yields request values for the RPC.
904
+ timeout: An optional duration of time in seconds to allow for
905
+ the RPC. If None, the timeout is considered infinite.
906
+ metadata: Optional :term:`metadata` to be transmitted to the
907
+ service-side of the RPC.
908
+ credentials: An optional CallCredentials for the RPC. Only valid for
909
+ secure Channel.
910
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
911
+ compression: An element of grpc.compression, e.g.
912
+ grpc.compression.Gzip.
913
+
914
+ Returns:
915
+ An object that is both a Call for the RPC and a Future.
916
+ In the event of RPC completion, the return Call-Future's result value
917
+ will be the response message of the RPC. Should the event terminate
918
+ with non-OK status, the returned Call-Future's exception value will
919
+ be an RpcError.
920
+ """
921
+ raise NotImplementedError()
922
+
923
+
924
+ class StreamStreamMultiCallable(abc.ABC):
925
+ """Affords invoking a stream-stream RPC on client-side."""
926
+
927
+ @abc.abstractmethod
928
+ def __call__(
929
+ self,
930
+ request_iterator,
931
+ timeout=None,
932
+ metadata=None,
933
+ credentials=None,
934
+ wait_for_ready=None,
935
+ compression=None,
936
+ ):
937
+ """Invokes the underlying RPC on the client.
938
+
939
+ Args:
940
+ request_iterator: An iterator that yields request values for the RPC.
941
+ timeout: An optional duration of time in seconds to allow for
942
+ the RPC. If not specified, the timeout is considered infinite.
943
+ metadata: Optional :term:`metadata` to be transmitted to the
944
+ service-side of the RPC.
945
+ credentials: An optional CallCredentials for the RPC. Only valid for
946
+ secure Channel.
947
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
948
+ compression: An element of grpc.compression, e.g.
949
+ grpc.compression.Gzip.
950
+
951
+ Returns:
952
+ An object that is a Call for the RPC, an iterator of response
953
+ values, and a Future for the RPC. Drawing response values from the
954
+ returned Call-iterator may raise RpcError indicating termination of
955
+ the RPC with non-OK status.
956
+ """
957
+ raise NotImplementedError()
958
+
959
+
960
+ ############################# Channel Interface ##############################
961
+
962
+
963
+ class Channel(abc.ABC):
964
+ """Affords RPC invocation via generic methods on client-side.
965
+
966
+ Channel objects implement the Context Manager type, although they need not
967
+ support being entered and exited multiple times.
968
+ """
969
+
970
+ @abc.abstractmethod
971
+ def subscribe(self, callback, try_to_connect=False):
972
+ """Subscribe to this Channel's connectivity state machine.
973
+
974
+ A Channel may be in any of the states described by ChannelConnectivity.
975
+ This method allows application to monitor the state transitions.
976
+ The typical use case is to debug or gain better visibility into gRPC
977
+ runtime's state.
978
+
979
+ Args:
980
+ callback: A callable to be invoked with ChannelConnectivity argument.
981
+ ChannelConnectivity describes current state of the channel.
982
+ The callable will be invoked immediately upon subscription
983
+ and again for every change to ChannelConnectivity until it
984
+ is unsubscribed or this Channel object goes out of scope.
985
+ try_to_connect: A boolean indicating whether or not this Channel
986
+ should attempt to connect immediately. If set to False, gRPC
987
+ runtime decides when to connect.
988
+ """
989
+ raise NotImplementedError()
990
+
991
+ @abc.abstractmethod
992
+ def unsubscribe(self, callback):
993
+ """Unsubscribes a subscribed callback from this Channel's connectivity.
994
+
995
+ Args:
996
+ callback: A callable previously registered with this Channel from
997
+ having been passed to its "subscribe" method.
998
+ """
999
+ raise NotImplementedError()
1000
+
1001
+ @abc.abstractmethod
1002
+ def unary_unary(
1003
+ self,
1004
+ method,
1005
+ request_serializer=None,
1006
+ response_deserializer=None,
1007
+ _registered_method=False,
1008
+ ):
1009
+ """Creates a UnaryUnaryMultiCallable for a unary-unary method.
1010
+
1011
+ Args:
1012
+ method: The name of the RPC method.
1013
+ request_serializer: Optional :term:`serializer` for serializing the request
1014
+ message. Request goes unserialized in case None is passed.
1015
+ response_deserializer: Optional :term:`deserializer` for deserializing the
1016
+ response message. Response goes undeserialized in case None
1017
+ is passed.
1018
+ _registered_method: Implementation Private. A bool representing whether the method
1019
+ is registered.
1020
+
1021
+ Returns:
1022
+ A UnaryUnaryMultiCallable value for the named unary-unary method.
1023
+ """
1024
+ raise NotImplementedError()
1025
+
1026
+ @abc.abstractmethod
1027
+ def unary_stream(
1028
+ self,
1029
+ method,
1030
+ request_serializer=None,
1031
+ response_deserializer=None,
1032
+ _registered_method=False,
1033
+ ):
1034
+ """Creates a UnaryStreamMultiCallable for a unary-stream method.
1035
+
1036
+ Args:
1037
+ method: The name of the RPC method.
1038
+ request_serializer: Optional :term:`serializer` for serializing the request
1039
+ message. Request goes unserialized in case None is passed.
1040
+ response_deserializer: Optional :term:`deserializer` for deserializing the
1041
+ response message. Response goes undeserialized in case None is
1042
+ passed.
1043
+ _registered_method: Implementation Private. A bool representing whether the method
1044
+ is registered.
1045
+
1046
+ Returns:
1047
+ A UnaryStreamMultiCallable value for the name unary-stream method.
1048
+ """
1049
+ raise NotImplementedError()
1050
+
1051
+ @abc.abstractmethod
1052
+ def stream_unary(
1053
+ self,
1054
+ method,
1055
+ request_serializer=None,
1056
+ response_deserializer=None,
1057
+ _registered_method=False,
1058
+ ):
1059
+ """Creates a StreamUnaryMultiCallable for a stream-unary method.
1060
+
1061
+ Args:
1062
+ method: The name of the RPC method.
1063
+ request_serializer: Optional :term:`serializer` for serializing the request
1064
+ message. Request goes unserialized in case None is passed.
1065
+ response_deserializer: Optional :term:`deserializer` for deserializing the
1066
+ response message. Response goes undeserialized in case None is
1067
+ passed.
1068
+ _registered_method: Implementation Private. A bool representing whether the method
1069
+ is registered.
1070
+
1071
+ Returns:
1072
+ A StreamUnaryMultiCallable value for the named stream-unary method.
1073
+ """
1074
+ raise NotImplementedError()
1075
+
1076
+ @abc.abstractmethod
1077
+ def stream_stream(
1078
+ self,
1079
+ method,
1080
+ request_serializer=None,
1081
+ response_deserializer=None,
1082
+ _registered_method=False,
1083
+ ):
1084
+ """Creates a StreamStreamMultiCallable for a stream-stream method.
1085
+
1086
+ Args:
1087
+ method: The name of the RPC method.
1088
+ request_serializer: Optional :term:`serializer` for serializing the request
1089
+ message. Request goes unserialized in case None is passed.
1090
+ response_deserializer: Optional :term:`deserializer` for deserializing the
1091
+ response message. Response goes undeserialized in case None
1092
+ is passed.
1093
+ _registered_method: Implementation Private. A bool representing whether the method
1094
+ is registered.
1095
+
1096
+ Returns:
1097
+ A StreamStreamMultiCallable value for the named stream-stream method.
1098
+ """
1099
+ raise NotImplementedError()
1100
+
1101
+ @abc.abstractmethod
1102
+ def close(self):
1103
+ """Closes this Channel and releases all resources held by it.
1104
+
1105
+ Closing the Channel will immediately terminate all RPCs active with the
1106
+ Channel and it is not valid to invoke new RPCs with the Channel.
1107
+
1108
+ This method is idempotent.
1109
+ """
1110
+ raise NotImplementedError()
1111
+
1112
+ def __enter__(self):
1113
+ """Enters the runtime context related to the channel object."""
1114
+ raise NotImplementedError()
1115
+
1116
+ def __exit__(self, exc_type, exc_val, exc_tb):
1117
+ """Exits the runtime context related to the channel object."""
1118
+ raise NotImplementedError()
1119
+
1120
+
1121
+ ########################## Service-Side Context ##############################
1122
+
1123
+
1124
+ class ServicerContext(RpcContext, metaclass=abc.ABCMeta):
1125
+ """A context object passed to method implementations."""
1126
+
1127
+ @abc.abstractmethod
1128
+ def invocation_metadata(self):
1129
+ """Accesses the metadata sent by the client.
1130
+
1131
+ Returns:
1132
+ The invocation :term:`metadata`.
1133
+ """
1134
+ raise NotImplementedError()
1135
+
1136
+ @abc.abstractmethod
1137
+ def peer(self):
1138
+ """Identifies the peer that invoked the RPC being serviced.
1139
+
1140
+ Returns:
1141
+ A string identifying the peer that invoked the RPC being serviced.
1142
+ The string format is determined by gRPC runtime.
1143
+ """
1144
+ raise NotImplementedError()
1145
+
1146
+ @abc.abstractmethod
1147
+ def peer_identities(self):
1148
+ """Gets one or more peer identity(s).
1149
+
1150
+ Equivalent to
1151
+ servicer_context.auth_context().get(servicer_context.peer_identity_key())
1152
+
1153
+ Returns:
1154
+ An iterable of the identities, or None if the call is not
1155
+ authenticated. Each identity is returned as a raw bytes type.
1156
+ """
1157
+ raise NotImplementedError()
1158
+
1159
+ @abc.abstractmethod
1160
+ def peer_identity_key(self):
1161
+ """The auth property used to identify the peer.
1162
+
1163
+ For example, "x509_common_name" or "x509_subject_alternative_name" are
1164
+ used to identify an SSL peer.
1165
+
1166
+ Returns:
1167
+ The auth property (string) that indicates the
1168
+ peer identity, or None if the call is not authenticated.
1169
+ """
1170
+ raise NotImplementedError()
1171
+
1172
+ @abc.abstractmethod
1173
+ def auth_context(self):
1174
+ """Gets the auth context for the call.
1175
+
1176
+ Returns:
1177
+ A map of strings to an iterable of bytes for each auth property.
1178
+ """
1179
+ raise NotImplementedError()
1180
+
1181
+ def set_compression(self, compression):
1182
+ """Set the compression algorithm to be used for the entire call.
1183
+
1184
+ Args:
1185
+ compression: An element of grpc.compression, e.g.
1186
+ grpc.compression.Gzip.
1187
+ """
1188
+ raise NotImplementedError()
1189
+
1190
+ @abc.abstractmethod
1191
+ def send_initial_metadata(self, initial_metadata):
1192
+ """Sends the initial metadata value to the client.
1193
+
1194
+ This method need not be called by implementations if they have no
1195
+ metadata to add to what the gRPC runtime will transmit.
1196
+
1197
+ Args:
1198
+ initial_metadata: The initial :term:`metadata`.
1199
+ """
1200
+ raise NotImplementedError()
1201
+
1202
+ @abc.abstractmethod
1203
+ def set_trailing_metadata(self, trailing_metadata):
1204
+ """Sets the trailing metadata for the RPC.
1205
+
1206
+ Sets the trailing metadata to be sent upon completion of the RPC.
1207
+
1208
+ If this method is invoked multiple times throughout the lifetime of an
1209
+ RPC, the value supplied in the final invocation will be the value sent
1210
+ over the wire.
1211
+
1212
+ This method need not be called by implementations if they have no
1213
+ metadata to add to what the gRPC runtime will transmit.
1214
+
1215
+ Args:
1216
+ trailing_metadata: The trailing :term:`metadata`.
1217
+ """
1218
+ raise NotImplementedError()
1219
+
1220
+ def trailing_metadata(self):
1221
+ """Access value to be used as trailing metadata upon RPC completion.
1222
+
1223
+ This is an EXPERIMENTAL API.
1224
+
1225
+ Returns:
1226
+ The trailing :term:`metadata` for the RPC.
1227
+ """
1228
+ raise NotImplementedError()
1229
+
1230
+ @abc.abstractmethod
1231
+ def abort(self, code, details):
1232
+ """Raises an exception to terminate the RPC with a non-OK status.
1233
+
1234
+ The code and details passed as arguments will supersede any existing
1235
+ ones.
1236
+
1237
+ Args:
1238
+ code: A StatusCode object to be sent to the client.
1239
+ It must not be StatusCode.OK.
1240
+ details: A UTF-8-encodable string to be sent to the client upon
1241
+ termination of the RPC.
1242
+
1243
+ Raises:
1244
+ Exception: An exception is always raised to signal the abortion the
1245
+ RPC to the gRPC runtime.
1246
+ """
1247
+ raise NotImplementedError()
1248
+
1249
+ @abc.abstractmethod
1250
+ def abort_with_status(self, status):
1251
+ """Raises an exception to terminate the RPC with a non-OK status.
1252
+
1253
+ The status passed as argument will supersede any existing status code,
1254
+ status message and trailing metadata.
1255
+
1256
+ This is an EXPERIMENTAL API.
1257
+
1258
+ Args:
1259
+ status: A grpc.Status object. The status code in it must not be
1260
+ StatusCode.OK.
1261
+
1262
+ Raises:
1263
+ Exception: An exception is always raised to signal the abortion the
1264
+ RPC to the gRPC runtime.
1265
+ """
1266
+ raise NotImplementedError()
1267
+
1268
+ @abc.abstractmethod
1269
+ def set_code(self, code):
1270
+ """Sets the value to be used as status code upon RPC completion.
1271
+
1272
+ This method need not be called by method implementations if they wish
1273
+ the gRPC runtime to determine the status code of the RPC.
1274
+
1275
+ Args:
1276
+ code: A StatusCode object to be sent to the client.
1277
+ """
1278
+ raise NotImplementedError()
1279
+
1280
+ @abc.abstractmethod
1281
+ def set_details(self, details):
1282
+ """Sets the value to be used as detail string upon RPC completion.
1283
+
1284
+ This method need not be called by method implementations if they have
1285
+ no details to transmit.
1286
+
1287
+ Args:
1288
+ details: A UTF-8-encodable string to be sent to the client upon
1289
+ termination of the RPC.
1290
+ """
1291
+ raise NotImplementedError()
1292
+
1293
+ def code(self):
1294
+ """Accesses the value to be used as status code upon RPC completion.
1295
+
1296
+ This is an EXPERIMENTAL API.
1297
+
1298
+ Returns:
1299
+ The StatusCode value for the RPC.
1300
+ """
1301
+ raise NotImplementedError()
1302
+
1303
+ def details(self):
1304
+ """Accesses the value to be used as detail string upon RPC completion.
1305
+
1306
+ This is an EXPERIMENTAL API.
1307
+
1308
+ Returns:
1309
+ The details string of the RPC.
1310
+ """
1311
+ raise NotImplementedError()
1312
+
1313
+ def disable_next_message_compression(self):
1314
+ """Disables compression for the next response message.
1315
+
1316
+ This method will override any compression configuration set during
1317
+ server creation or set on the call.
1318
+ """
1319
+ raise NotImplementedError()
1320
+
1321
+
1322
+ ##################### Service-Side Handler Interfaces ########################
1323
+
1324
+
1325
+ class RpcMethodHandler(abc.ABC):
1326
+ """An implementation of a single RPC method.
1327
+
1328
+ Attributes:
1329
+ request_streaming: Whether the RPC supports exactly one request message
1330
+ or any arbitrary number of request messages.
1331
+ response_streaming: Whether the RPC supports exactly one response message
1332
+ or any arbitrary number of response messages.
1333
+ request_deserializer: A callable :term:`deserializer` that accepts a byte string and
1334
+ returns an object suitable to be passed to this object's business
1335
+ logic, or None to indicate that this object's business logic should be
1336
+ passed the raw request bytes.
1337
+ response_serializer: A callable :term:`serializer` that accepts an object produced
1338
+ by this object's business logic and returns a byte string, or None to
1339
+ indicate that the byte strings produced by this object's business logic
1340
+ should be transmitted on the wire as they are.
1341
+ unary_unary: This object's application-specific business logic as a
1342
+ callable value that takes a request value and a ServicerContext object
1343
+ and returns a response value. Only non-None if both request_streaming
1344
+ and response_streaming are False.
1345
+ unary_stream: This object's application-specific business logic as a
1346
+ callable value that takes a request value and a ServicerContext object
1347
+ and returns an iterator of response values. Only non-None if
1348
+ request_streaming is False and response_streaming is True.
1349
+ stream_unary: This object's application-specific business logic as a
1350
+ callable value that takes an iterator of request values and a
1351
+ ServicerContext object and returns a response value. Only non-None if
1352
+ request_streaming is True and response_streaming is False.
1353
+ stream_stream: This object's application-specific business logic as a
1354
+ callable value that takes an iterator of request values and a
1355
+ ServicerContext object and returns an iterator of response values.
1356
+ Only non-None if request_streaming and response_streaming are both
1357
+ True.
1358
+ """
1359
+
1360
+
1361
+ class HandlerCallDetails(abc.ABC):
1362
+ """Describes an RPC that has just arrived for service.
1363
+
1364
+ Attributes:
1365
+ method: The method name of the RPC.
1366
+ invocation_metadata: The :term:`metadata` sent by the client.
1367
+ """
1368
+
1369
+
1370
+ class GenericRpcHandler(abc.ABC):
1371
+ """An implementation of arbitrarily many RPC methods."""
1372
+
1373
+ @abc.abstractmethod
1374
+ def service(self, handler_call_details):
1375
+ """Returns the handler for servicing the RPC.
1376
+
1377
+ Args:
1378
+ handler_call_details: A HandlerCallDetails describing the RPC.
1379
+
1380
+ Returns:
1381
+ An RpcMethodHandler with which the RPC may be serviced if the
1382
+ implementation chooses to service this RPC, or None otherwise.
1383
+ """
1384
+ raise NotImplementedError()
1385
+
1386
+
1387
+ class ServiceRpcHandler(GenericRpcHandler, metaclass=abc.ABCMeta):
1388
+ """An implementation of RPC methods belonging to a service.
1389
+
1390
+ A service handles RPC methods with structured names of the form
1391
+ '/Service.Name/Service.Method', where 'Service.Name' is the value
1392
+ returned by service_name(), and 'Service.Method' is the method
1393
+ name. A service can have multiple method names, but only a single
1394
+ service name.
1395
+ """
1396
+
1397
+ @abc.abstractmethod
1398
+ def service_name(self):
1399
+ """Returns this service's name.
1400
+
1401
+ Returns:
1402
+ The service name.
1403
+ """
1404
+ raise NotImplementedError()
1405
+
1406
+
1407
+ #################### Service-Side Interceptor Interfaces #####################
1408
+
1409
+
1410
+ class ServerInterceptor(abc.ABC):
1411
+ """Affords intercepting incoming RPCs on the service-side."""
1412
+
1413
+ @abc.abstractmethod
1414
+ def intercept_service(self, continuation, handler_call_details):
1415
+ """Intercepts incoming RPCs before handing them over to a handler.
1416
+
1417
+ State can be passed from an interceptor to downstream interceptors
1418
+ via contextvars. The first interceptor is called from an empty
1419
+ contextvars.Context, and the same Context is used for downstream
1420
+ interceptors and for the final handler call. Note that there are no
1421
+ guarantees that interceptors and handlers will be called from the
1422
+ same thread.
1423
+
1424
+ Args:
1425
+ continuation: A function that takes a HandlerCallDetails and
1426
+ proceeds to invoke the next interceptor in the chain, if any,
1427
+ or the RPC handler lookup logic, with the call details passed
1428
+ as an argument, and returns an RpcMethodHandler instance if
1429
+ the RPC is considered serviced, or None otherwise.
1430
+ handler_call_details: A HandlerCallDetails describing the RPC.
1431
+
1432
+ Returns:
1433
+ An RpcMethodHandler with which the RPC may be serviced if the
1434
+ interceptor chooses to service this RPC, or None otherwise.
1435
+ """
1436
+ raise NotImplementedError()
1437
+
1438
+
1439
+ ############################# Server Interface ###############################
1440
+
1441
+
1442
+ class Server(abc.ABC):
1443
+ """Services RPCs."""
1444
+
1445
+ @abc.abstractmethod
1446
+ def add_generic_rpc_handlers(self, generic_rpc_handlers):
1447
+ """Registers GenericRpcHandlers with this Server.
1448
+
1449
+ This method is only safe to call before the server is started.
1450
+
1451
+ Args:
1452
+ generic_rpc_handlers: An iterable of GenericRpcHandlers that will be
1453
+ used to service RPCs.
1454
+ """
1455
+ raise NotImplementedError()
1456
+
1457
+ def add_registered_method_handlers(self, service_name, method_handlers):
1458
+ """Registers GenericRpcHandlers with this Server.
1459
+
1460
+ This method is only safe to call before the server is started.
1461
+
1462
+ If the same method have both generic and registered handler,
1463
+ registered handler will take precedence.
1464
+
1465
+ Args:
1466
+ service_name: The service name.
1467
+ method_handlers: A dictionary that maps method names to corresponding
1468
+ RpcMethodHandler.
1469
+ """
1470
+
1471
+ @abc.abstractmethod
1472
+ def add_insecure_port(self, address):
1473
+ """Opens an insecure port for accepting RPCs.
1474
+
1475
+ This method may only be called before starting the server.
1476
+
1477
+ Args:
1478
+ address: The address for which to open a port. If the port is 0,
1479
+ or not specified in the address, then gRPC runtime will choose a port.
1480
+
1481
+ Returns:
1482
+ An integer port on which server will accept RPC requests.
1483
+ """
1484
+ raise NotImplementedError()
1485
+
1486
+ @abc.abstractmethod
1487
+ def add_secure_port(self, address, server_credentials):
1488
+ """Opens a secure port for accepting RPCs.
1489
+
1490
+ This method may only be called before starting the server.
1491
+
1492
+ Args:
1493
+ address: The address for which to open a port.
1494
+ if the port is 0, or not specified in the address, then gRPC
1495
+ runtime will choose a port.
1496
+ server_credentials: A ServerCredentials object.
1497
+
1498
+ Returns:
1499
+ An integer port on which server will accept RPC requests.
1500
+ """
1501
+ raise NotImplementedError()
1502
+
1503
+ @abc.abstractmethod
1504
+ def start(self):
1505
+ """Starts this Server.
1506
+
1507
+ This method may only be called once. (i.e. it is not idempotent).
1508
+ """
1509
+ raise NotImplementedError()
1510
+
1511
+ @abc.abstractmethod
1512
+ def stop(self, grace):
1513
+ """Stops this Server.
1514
+
1515
+ This method immediately stop service of new RPCs in all cases.
1516
+
1517
+ If a grace period is specified, this method waits until all active
1518
+ RPCs are finished or until the grace period is reached. RPCs that haven't
1519
+ been terminated within the grace period are aborted.
1520
+ If a grace period is not specified (by passing None for `grace`),
1521
+ all existing RPCs are aborted immediately and this method
1522
+ blocks until the last RPC handler terminates.
1523
+
1524
+ This method is idempotent and may be called at any time.
1525
+ Passing a smaller grace value in a subsequent call will have
1526
+ the effect of stopping the Server sooner (passing None will
1527
+ have the effect of stopping the server immediately). Passing
1528
+ a larger grace value in a subsequent call *will not* have the
1529
+ effect of stopping the server later (i.e. the most restrictive
1530
+ grace value is used).
1531
+
1532
+ Args:
1533
+ grace: A duration of time in seconds or None.
1534
+
1535
+ Returns:
1536
+ A threading.Event that will be set when this Server has completely
1537
+ stopped, i.e. when running RPCs either complete or are aborted and
1538
+ all handlers have terminated.
1539
+ """
1540
+ raise NotImplementedError()
1541
+
1542
+ def wait_for_termination(self, timeout=None):
1543
+ """Block current thread until the server stops.
1544
+
1545
+ This is an EXPERIMENTAL API.
1546
+
1547
+ The wait will not consume computational resources during blocking, and
1548
+ it will block until one of the two following conditions are met:
1549
+
1550
+ 1) The server is stopped or terminated;
1551
+ 2) A timeout occurs if timeout is not `None`.
1552
+
1553
+ The timeout argument works in the same way as `threading.Event.wait()`.
1554
+ https://docs.python.org/3/library/threading.html#threading.Event.wait
1555
+
1556
+ Args:
1557
+ timeout: A floating point number specifying a timeout for the
1558
+ operation in seconds.
1559
+
1560
+ Returns:
1561
+ A bool indicates if the operation times out.
1562
+ """
1563
+ raise NotImplementedError()
1564
+
1565
+
1566
+ ################################# Functions ################################
1567
+
1568
+
1569
+ def unary_unary_rpc_method_handler(
1570
+ behavior, request_deserializer=None, response_serializer=None
1571
+ ):
1572
+ """Creates an RpcMethodHandler for a unary-unary RPC method.
1573
+
1574
+ Args:
1575
+ behavior: The implementation of an RPC that accepts one request
1576
+ and returns one response.
1577
+ request_deserializer: An optional :term:`deserializer` for request deserialization.
1578
+ response_serializer: An optional :term:`serializer` for response serialization.
1579
+
1580
+ Returns:
1581
+ An RpcMethodHandler object that is typically used by grpc.Server.
1582
+ """
1583
+ from grpc import _utilities # pylint: disable=cyclic-import
1584
+
1585
+ return _utilities.RpcMethodHandler(
1586
+ False,
1587
+ False,
1588
+ request_deserializer,
1589
+ response_serializer,
1590
+ behavior,
1591
+ None,
1592
+ None,
1593
+ None,
1594
+ )
1595
+
1596
+
1597
+ def unary_stream_rpc_method_handler(
1598
+ behavior, request_deserializer=None, response_serializer=None
1599
+ ):
1600
+ """Creates an RpcMethodHandler for a unary-stream RPC method.
1601
+
1602
+ Args:
1603
+ behavior: The implementation of an RPC that accepts one request
1604
+ and returns an iterator of response values.
1605
+ request_deserializer: An optional :term:`deserializer` for request deserialization.
1606
+ response_serializer: An optional :term:`serializer` for response serialization.
1607
+
1608
+ Returns:
1609
+ An RpcMethodHandler object that is typically used by grpc.Server.
1610
+ """
1611
+ from grpc import _utilities # pylint: disable=cyclic-import
1612
+
1613
+ return _utilities.RpcMethodHandler(
1614
+ False,
1615
+ True,
1616
+ request_deserializer,
1617
+ response_serializer,
1618
+ None,
1619
+ behavior,
1620
+ None,
1621
+ None,
1622
+ )
1623
+
1624
+
1625
+ def stream_unary_rpc_method_handler(
1626
+ behavior, request_deserializer=None, response_serializer=None
1627
+ ):
1628
+ """Creates an RpcMethodHandler for a stream-unary RPC method.
1629
+
1630
+ Args:
1631
+ behavior: The implementation of an RPC that accepts an iterator of
1632
+ request values and returns a single response value.
1633
+ request_deserializer: An optional :term:`deserializer` for request deserialization.
1634
+ response_serializer: An optional :term:`serializer` for response serialization.
1635
+
1636
+ Returns:
1637
+ An RpcMethodHandler object that is typically used by grpc.Server.
1638
+ """
1639
+ from grpc import _utilities # pylint: disable=cyclic-import
1640
+
1641
+ return _utilities.RpcMethodHandler(
1642
+ True,
1643
+ False,
1644
+ request_deserializer,
1645
+ response_serializer,
1646
+ None,
1647
+ None,
1648
+ behavior,
1649
+ None,
1650
+ )
1651
+
1652
+
1653
+ def stream_stream_rpc_method_handler(
1654
+ behavior, request_deserializer=None, response_serializer=None
1655
+ ):
1656
+ """Creates an RpcMethodHandler for a stream-stream RPC method.
1657
+
1658
+ Args:
1659
+ behavior: The implementation of an RPC that accepts an iterator of
1660
+ request values and returns an iterator of response values.
1661
+ request_deserializer: An optional :term:`deserializer` for request deserialization.
1662
+ response_serializer: An optional :term:`serializer` for response serialization.
1663
+
1664
+ Returns:
1665
+ An RpcMethodHandler object that is typically used by grpc.Server.
1666
+ """
1667
+ from grpc import _utilities # pylint: disable=cyclic-import
1668
+
1669
+ return _utilities.RpcMethodHandler(
1670
+ True,
1671
+ True,
1672
+ request_deserializer,
1673
+ response_serializer,
1674
+ None,
1675
+ None,
1676
+ None,
1677
+ behavior,
1678
+ )
1679
+
1680
+
1681
+ def method_handlers_generic_handler(service, method_handlers):
1682
+ """Creates a GenericRpcHandler from RpcMethodHandlers.
1683
+
1684
+ Args:
1685
+ service: The name of the service that is implemented by the
1686
+ method_handlers.
1687
+ method_handlers: A dictionary that maps method names to corresponding
1688
+ RpcMethodHandler.
1689
+
1690
+ Returns:
1691
+ A GenericRpcHandler. This is typically added to the grpc.Server object
1692
+ with add_generic_rpc_handlers() before starting the server.
1693
+ """
1694
+ from grpc import _utilities # pylint: disable=cyclic-import
1695
+
1696
+ return _utilities.DictionaryGenericHandler(service, method_handlers)
1697
+
1698
+
1699
+ def ssl_channel_credentials(
1700
+ root_certificates=None, private_key=None, certificate_chain=None
1701
+ ):
1702
+ """Creates a ChannelCredentials for use with an SSL-enabled Channel.
1703
+
1704
+ Args:
1705
+ root_certificates: The PEM-encoded root certificates as a byte string,
1706
+ or None to retrieve them from a default location chosen by gRPC
1707
+ runtime.
1708
+ private_key: The PEM-encoded private key as a byte string, or None if no
1709
+ private key should be used.
1710
+ certificate_chain: The PEM-encoded certificate chain as a byte string
1711
+ to use or None if no certificate chain should be used.
1712
+
1713
+ Returns:
1714
+ A ChannelCredentials for use with an SSL-enabled Channel.
1715
+ """
1716
+ return ChannelCredentials(
1717
+ _cygrpc.SSLChannelCredentials(
1718
+ root_certificates, private_key, certificate_chain
1719
+ )
1720
+ )
1721
+
1722
+
1723
+ def xds_channel_credentials(fallback_credentials=None):
1724
+ """Creates a ChannelCredentials for use with xDS. This is an EXPERIMENTAL
1725
+ API.
1726
+
1727
+ Args:
1728
+ fallback_credentials: Credentials to use in case it is not possible to
1729
+ establish a secure connection via xDS. If no fallback_credentials
1730
+ argument is supplied, a default SSLChannelCredentials is used.
1731
+ """
1732
+ fallback_credentials = (
1733
+ ssl_channel_credentials()
1734
+ if fallback_credentials is None
1735
+ else fallback_credentials
1736
+ )
1737
+ return ChannelCredentials(
1738
+ _cygrpc.XDSChannelCredentials(fallback_credentials._credentials)
1739
+ )
1740
+
1741
+
1742
+ def metadata_call_credentials(metadata_plugin, name=None):
1743
+ """Construct CallCredentials from an AuthMetadataPlugin.
1744
+
1745
+ Args:
1746
+ metadata_plugin: An AuthMetadataPlugin to use for authentication.
1747
+ name: An optional name for the plugin.
1748
+
1749
+ Returns:
1750
+ A CallCredentials.
1751
+ """
1752
+ from grpc import _plugin_wrapping # pylint: disable=cyclic-import
1753
+
1754
+ return _plugin_wrapping.metadata_plugin_call_credentials(
1755
+ metadata_plugin, name
1756
+ )
1757
+
1758
+
1759
+ def access_token_call_credentials(access_token):
1760
+ """Construct CallCredentials from an access token.
1761
+
1762
+ Args:
1763
+ access_token: A string to place directly in the http request
1764
+ authorization header, for example
1765
+ "authorization: Bearer <access_token>".
1766
+
1767
+ Returns:
1768
+ A CallCredentials.
1769
+ """
1770
+ from grpc import _auth # pylint: disable=cyclic-import
1771
+ from grpc import _plugin_wrapping # pylint: disable=cyclic-import
1772
+
1773
+ return _plugin_wrapping.metadata_plugin_call_credentials(
1774
+ _auth.AccessTokenAuthMetadataPlugin(access_token), None
1775
+ )
1776
+
1777
+
1778
+ def composite_call_credentials(*call_credentials):
1779
+ """Compose multiple CallCredentials to make a new CallCredentials.
1780
+
1781
+ Args:
1782
+ *call_credentials: At least two CallCredentials objects.
1783
+
1784
+ Returns:
1785
+ A CallCredentials object composed of the given CallCredentials objects.
1786
+ """
1787
+ return CallCredentials(
1788
+ _cygrpc.CompositeCallCredentials(
1789
+ tuple(
1790
+ single_call_credentials._credentials
1791
+ for single_call_credentials in call_credentials
1792
+ )
1793
+ )
1794
+ )
1795
+
1796
+
1797
+ def composite_channel_credentials(channel_credentials, *call_credentials):
1798
+ """Compose a ChannelCredentials and one or more CallCredentials objects.
1799
+
1800
+ Args:
1801
+ channel_credentials: A ChannelCredentials object.
1802
+ *call_credentials: One or more CallCredentials objects.
1803
+
1804
+ Returns:
1805
+ A ChannelCredentials composed of the given ChannelCredentials and
1806
+ CallCredentials objects.
1807
+ """
1808
+ return ChannelCredentials(
1809
+ _cygrpc.CompositeChannelCredentials(
1810
+ tuple(
1811
+ single_call_credentials._credentials
1812
+ for single_call_credentials in call_credentials
1813
+ ),
1814
+ channel_credentials._credentials,
1815
+ )
1816
+ )
1817
+
1818
+
1819
+ def ssl_server_credentials(
1820
+ private_key_certificate_chain_pairs,
1821
+ root_certificates=None,
1822
+ require_client_auth=False,
1823
+ ):
1824
+ """Creates a ServerCredentials for use with an SSL-enabled Server.
1825
+
1826
+ Args:
1827
+ private_key_certificate_chain_pairs: A list of pairs of the form
1828
+ [PEM-encoded private key, PEM-encoded certificate chain].
1829
+ root_certificates: An optional byte string of PEM-encoded client root
1830
+ certificates that the server will use to verify client authentication.
1831
+ If omitted, require_client_auth must also be False.
1832
+ require_client_auth: A boolean indicating whether or not to require
1833
+ clients to be authenticated. May only be True if root_certificates
1834
+ is not None.
1835
+
1836
+ Returns:
1837
+ A ServerCredentials for use with an SSL-enabled Server. Typically, this
1838
+ object is an argument to add_secure_port() method during server setup.
1839
+ """
1840
+ if not private_key_certificate_chain_pairs:
1841
+ raise ValueError(
1842
+ "At least one private key-certificate chain pair is required!"
1843
+ )
1844
+ elif require_client_auth and root_certificates is None:
1845
+ raise ValueError(
1846
+ "Illegal to require client auth without providing root"
1847
+ " certificates!"
1848
+ )
1849
+ else:
1850
+ return ServerCredentials(
1851
+ _cygrpc.server_credentials_ssl(
1852
+ root_certificates,
1853
+ [
1854
+ _cygrpc.SslPemKeyCertPair(key, pem)
1855
+ for key, pem in private_key_certificate_chain_pairs
1856
+ ],
1857
+ require_client_auth,
1858
+ )
1859
+ )
1860
+
1861
+
1862
+ def xds_server_credentials(fallback_credentials):
1863
+ """Creates a ServerCredentials for use with xDS. This is an EXPERIMENTAL
1864
+ API.
1865
+
1866
+ Args:
1867
+ fallback_credentials: Credentials to use in case it is not possible to
1868
+ establish a secure connection via xDS. No default value is provided.
1869
+ """
1870
+ return ServerCredentials(
1871
+ _cygrpc.xds_server_credentials(fallback_credentials._credentials)
1872
+ )
1873
+
1874
+
1875
+ def insecure_server_credentials():
1876
+ """Creates a credentials object directing the server to use no credentials.
1877
+ This is an EXPERIMENTAL API.
1878
+
1879
+ This object cannot be used directly in a call to `add_secure_port`.
1880
+ Instead, it should be used to construct other credentials objects, e.g.
1881
+ with xds_server_credentials.
1882
+ """
1883
+ return ServerCredentials(_cygrpc.insecure_server_credentials())
1884
+
1885
+
1886
+ def ssl_server_certificate_configuration(
1887
+ private_key_certificate_chain_pairs, root_certificates=None
1888
+ ):
1889
+ """Creates a ServerCertificateConfiguration for use with a Server.
1890
+
1891
+ Args:
1892
+ private_key_certificate_chain_pairs: A collection of pairs of
1893
+ the form [PEM-encoded private key, PEM-encoded certificate
1894
+ chain].
1895
+ root_certificates: An optional byte string of PEM-encoded client root
1896
+ certificates that the server will use to verify client authentication.
1897
+
1898
+ Returns:
1899
+ A ServerCertificateConfiguration that can be returned in the certificate
1900
+ configuration fetching callback.
1901
+ """
1902
+ if private_key_certificate_chain_pairs:
1903
+ return ServerCertificateConfiguration(
1904
+ _cygrpc.server_certificate_config_ssl(
1905
+ root_certificates,
1906
+ [
1907
+ _cygrpc.SslPemKeyCertPair(key, pem)
1908
+ for key, pem in private_key_certificate_chain_pairs
1909
+ ],
1910
+ )
1911
+ )
1912
+ else:
1913
+ raise ValueError(
1914
+ "At least one private key-certificate chain pair is required!"
1915
+ )
1916
+
1917
+
1918
+ def dynamic_ssl_server_credentials(
1919
+ initial_certificate_configuration,
1920
+ certificate_configuration_fetcher,
1921
+ require_client_authentication=False,
1922
+ ):
1923
+ """Creates a ServerCredentials for use with an SSL-enabled Server.
1924
+
1925
+ Args:
1926
+ initial_certificate_configuration (ServerCertificateConfiguration): The
1927
+ certificate configuration with which the server will be initialized.
1928
+ certificate_configuration_fetcher (callable): A callable that takes no
1929
+ arguments and should return a ServerCertificateConfiguration to
1930
+ replace the server's current certificate, or None for no change
1931
+ (i.e., the server will continue its current certificate
1932
+ config). The library will call this callback on *every* new
1933
+ client connection before starting the TLS handshake with the
1934
+ client, thus allowing the user application to optionally
1935
+ return a new ServerCertificateConfiguration that the server will then
1936
+ use for the handshake.
1937
+ require_client_authentication: A boolean indicating whether or not to
1938
+ require clients to be authenticated.
1939
+
1940
+ Returns:
1941
+ A ServerCredentials.
1942
+ """
1943
+ return ServerCredentials(
1944
+ _cygrpc.server_credentials_ssl_dynamic_cert_config(
1945
+ initial_certificate_configuration,
1946
+ certificate_configuration_fetcher,
1947
+ require_client_authentication,
1948
+ )
1949
+ )
1950
+
1951
+
1952
+ @enum.unique
1953
+ class LocalConnectionType(enum.Enum):
1954
+ """Types of local connection for local credential creation.
1955
+
1956
+ Attributes:
1957
+ UDS: Unix domain socket connections
1958
+ LOCAL_TCP: Local TCP connections.
1959
+ """
1960
+
1961
+ UDS = _cygrpc.LocalConnectionType.uds
1962
+ LOCAL_TCP = _cygrpc.LocalConnectionType.local_tcp
1963
+
1964
+
1965
+ def local_channel_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP):
1966
+ """Creates a local ChannelCredentials used for local connections.
1967
+
1968
+ This is an EXPERIMENTAL API.
1969
+
1970
+ Local credentials are used by local TCP endpoints (e.g. localhost:10000)
1971
+ also UDS connections.
1972
+
1973
+ The connections created by local channel credentials are not
1974
+ encrypted, but will be checked if they are local or not.
1975
+ The UDS connections are considered secure by providing peer authentication
1976
+ and data confidentiality while TCP connections are considered insecure.
1977
+
1978
+ It is allowed to transmit call credentials over connections created by
1979
+ local channel credentials.
1980
+
1981
+ Local channel credentials are useful for 1) eliminating insecure_channel usage;
1982
+ 2) enable unit testing for call credentials without setting up secrets.
1983
+
1984
+ Args:
1985
+ local_connect_type: Local connection type (either
1986
+ grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP)
1987
+
1988
+ Returns:
1989
+ A ChannelCredentials for use with a local Channel
1990
+ """
1991
+ return ChannelCredentials(
1992
+ _cygrpc.channel_credentials_local(local_connect_type.value)
1993
+ )
1994
+
1995
+
1996
+ def local_server_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP):
1997
+ """Creates a local ServerCredentials used for local connections.
1998
+
1999
+ This is an EXPERIMENTAL API.
2000
+
2001
+ Local credentials are used by local TCP endpoints (e.g. localhost:10000)
2002
+ also UDS connections.
2003
+
2004
+ The connections created by local server credentials are not
2005
+ encrypted, but will be checked if they are local or not.
2006
+ The UDS connections are considered secure by providing peer authentication
2007
+ and data confidentiality while TCP connections are considered insecure.
2008
+
2009
+ It is allowed to transmit call credentials over connections created by local
2010
+ server credentials.
2011
+
2012
+ Local server credentials are useful for 1) eliminating insecure_channel usage;
2013
+ 2) enable unit testing for call credentials without setting up secrets.
2014
+
2015
+ Args:
2016
+ local_connect_type: Local connection type (either
2017
+ grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP)
2018
+
2019
+ Returns:
2020
+ A ServerCredentials for use with a local Server
2021
+ """
2022
+ return ServerCredentials(
2023
+ _cygrpc.server_credentials_local(local_connect_type.value)
2024
+ )
2025
+
2026
+
2027
+ def alts_channel_credentials(service_accounts=None):
2028
+ """Creates a ChannelCredentials for use with an ALTS-enabled Channel.
2029
+
2030
+ This is an EXPERIMENTAL API.
2031
+ ALTS credentials API can only be used in GCP environment as it relies on
2032
+ handshaker service being available. For more info about ALTS see
2033
+ https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security
2034
+
2035
+ Args:
2036
+ service_accounts: A list of server identities accepted by the client.
2037
+ If target service accounts are provided and none of them matches the
2038
+ peer identity of the server, handshake will fail. The arg can be empty
2039
+ if the client does not have any information about trusted server
2040
+ identity.
2041
+ Returns:
2042
+ A ChannelCredentials for use with an ALTS-enabled Channel
2043
+ """
2044
+ return ChannelCredentials(
2045
+ _cygrpc.channel_credentials_alts(service_accounts or [])
2046
+ )
2047
+
2048
+
2049
+ def alts_server_credentials():
2050
+ """Creates a ServerCredentials for use with an ALTS-enabled connection.
2051
+
2052
+ This is an EXPERIMENTAL API.
2053
+ ALTS credentials API can only be used in GCP environment as it relies on
2054
+ handshaker service being available. For more info about ALTS see
2055
+ https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security
2056
+
2057
+ Returns:
2058
+ A ServerCredentials for use with an ALTS-enabled Server
2059
+ """
2060
+ return ServerCredentials(_cygrpc.server_credentials_alts())
2061
+
2062
+
2063
+ def compute_engine_channel_credentials(call_credentials):
2064
+ """Creates a compute engine channel credential.
2065
+
2066
+ This credential can only be used in a GCP environment as it relies on
2067
+ a handshaker service. For more info about ALTS, see
2068
+ https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security
2069
+
2070
+ This channel credential is expected to be used as part of a composite
2071
+ credential in conjunction with a call credentials that authenticates the
2072
+ VM's default service account. If used with any other sort of call
2073
+ credential, the connection may suddenly and unexpectedly begin failing RPCs.
2074
+ """
2075
+ return ChannelCredentials(
2076
+ _cygrpc.channel_credentials_compute_engine(
2077
+ call_credentials._credentials
2078
+ )
2079
+ )
2080
+
2081
+
2082
+ def channel_ready_future(channel):
2083
+ """Creates a Future that tracks when a Channel is ready.
2084
+
2085
+ Cancelling the Future does not affect the channel's state machine.
2086
+ It merely decouples the Future from channel state machine.
2087
+
2088
+ Args:
2089
+ channel: A Channel object.
2090
+
2091
+ Returns:
2092
+ A Future object that matures when the channel connectivity is
2093
+ ChannelConnectivity.READY.
2094
+ """
2095
+ from grpc import _utilities # pylint: disable=cyclic-import
2096
+
2097
+ return _utilities.channel_ready_future(channel)
2098
+
2099
+
2100
+ def insecure_channel(target, options=None, compression=None):
2101
+ """Creates an insecure Channel to a server.
2102
+
2103
+ The returned Channel is thread-safe.
2104
+
2105
+ Args:
2106
+ target: The server address
2107
+ options: An optional list of key-value pairs (:term:`channel_arguments`
2108
+ in gRPC Core runtime) to configure the channel.
2109
+ compression: An optional value indicating the compression method to be
2110
+ used over the lifetime of the channel.
2111
+
2112
+ Returns:
2113
+ A Channel.
2114
+ """
2115
+ from grpc import _channel # pylint: disable=cyclic-import
2116
+
2117
+ return _channel.Channel(
2118
+ target, () if options is None else options, None, compression
2119
+ )
2120
+
2121
+
2122
+ def secure_channel(target, credentials, options=None, compression=None):
2123
+ """Creates a secure Channel to a server.
2124
+
2125
+ The returned Channel is thread-safe.
2126
+
2127
+ Args:
2128
+ target: The server address.
2129
+ credentials: A ChannelCredentials instance.
2130
+ options: An optional list of key-value pairs (:term:`channel_arguments`
2131
+ in gRPC Core runtime) to configure the channel.
2132
+ compression: An optional value indicating the compression method to be
2133
+ used over the lifetime of the channel.
2134
+
2135
+ Returns:
2136
+ A Channel.
2137
+ """
2138
+ from grpc import _channel # pylint: disable=cyclic-import
2139
+ from grpc.experimental import _insecure_channel_credentials
2140
+
2141
+ if credentials._credentials is _insecure_channel_credentials:
2142
+ raise ValueError(
2143
+ "secure_channel cannot be called with insecure credentials."
2144
+ + " Call insecure_channel instead."
2145
+ )
2146
+ return _channel.Channel(
2147
+ target,
2148
+ () if options is None else options,
2149
+ credentials._credentials,
2150
+ compression,
2151
+ )
2152
+
2153
+
2154
+ def intercept_channel(channel, *interceptors):
2155
+ """Intercepts a channel through a set of interceptors.
2156
+
2157
+ Args:
2158
+ channel: A Channel.
2159
+ interceptors: Zero or more objects of type
2160
+ UnaryUnaryClientInterceptor,
2161
+ UnaryStreamClientInterceptor,
2162
+ StreamUnaryClientInterceptor, or
2163
+ StreamStreamClientInterceptor.
2164
+ Interceptors are given control in the order they are listed.
2165
+
2166
+ Returns:
2167
+ A Channel that intercepts each invocation via the provided interceptors.
2168
+
2169
+ Raises:
2170
+ TypeError: If interceptor does not derive from any of
2171
+ UnaryUnaryClientInterceptor,
2172
+ UnaryStreamClientInterceptor,
2173
+ StreamUnaryClientInterceptor, or
2174
+ StreamStreamClientInterceptor.
2175
+ """
2176
+ from grpc import _interceptor # pylint: disable=cyclic-import
2177
+
2178
+ return _interceptor.intercept_channel(channel, *interceptors)
2179
+
2180
+
2181
+ def server(
2182
+ thread_pool,
2183
+ handlers=None,
2184
+ interceptors=None,
2185
+ options=None,
2186
+ maximum_concurrent_rpcs=None,
2187
+ compression=None,
2188
+ xds=False,
2189
+ ):
2190
+ """Creates a Server with which RPCs can be serviced.
2191
+
2192
+ Args:
2193
+ thread_pool: A futures.ThreadPoolExecutor to be used by the Server
2194
+ to execute RPC handlers.
2195
+ handlers: An optional list of GenericRpcHandlers used for executing RPCs.
2196
+ More handlers may be added by calling add_generic_rpc_handlers any time
2197
+ before the server is started.
2198
+ interceptors: An optional list of ServerInterceptor objects that observe
2199
+ and optionally manipulate the incoming RPCs before handing them over to
2200
+ handlers. The interceptors are given control in the order they are
2201
+ specified. This is an EXPERIMENTAL API.
2202
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime)
2203
+ to configure the channel.
2204
+ maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server
2205
+ will service before returning RESOURCE_EXHAUSTED status, or None to
2206
+ indicate no limit.
2207
+ compression: An element of grpc.compression, e.g.
2208
+ grpc.compression.Gzip. This compression algorithm will be used for the
2209
+ lifetime of the server unless overridden.
2210
+ xds: If set to true, retrieves server configuration via xDS. This is an
2211
+ EXPERIMENTAL option.
2212
+
2213
+ Returns:
2214
+ A Server object.
2215
+ """
2216
+ from grpc import _server # pylint: disable=cyclic-import
2217
+
2218
+ return _server.create_server(
2219
+ thread_pool,
2220
+ () if handlers is None else handlers,
2221
+ () if interceptors is None else interceptors,
2222
+ () if options is None else options,
2223
+ maximum_concurrent_rpcs,
2224
+ compression,
2225
+ xds,
2226
+ )
2227
+
2228
+
2229
+ @contextlib.contextmanager
2230
+ def _create_servicer_context(rpc_event, state, request_deserializer):
2231
+ from grpc import _server # pylint: disable=cyclic-import
2232
+
2233
+ context = _server._Context(rpc_event, state, request_deserializer)
2234
+ yield context
2235
+ context._finalize_state() # pylint: disable=protected-access
2236
+
2237
+
2238
+ @enum.unique
2239
+ class Compression(enum.IntEnum):
2240
+ """Indicates the compression method to be used for an RPC.
2241
+
2242
+ Attributes:
2243
+ NoCompression: Do not use compression algorithm.
2244
+ Deflate: Use "Deflate" compression algorithm.
2245
+ Gzip: Use "Gzip" compression algorithm.
2246
+ """
2247
+
2248
+ NoCompression = _compression.NoCompression
2249
+ Deflate = _compression.Deflate
2250
+ Gzip = _compression.Gzip
2251
+
2252
+
2253
+ ################################### __all__ #################################
2254
+
2255
+ __all__ = (
2256
+ "FutureTimeoutError",
2257
+ "FutureCancelledError",
2258
+ "Future",
2259
+ "ChannelConnectivity",
2260
+ "StatusCode",
2261
+ "Status",
2262
+ "RpcError",
2263
+ "RpcContext",
2264
+ "Call",
2265
+ "ChannelCredentials",
2266
+ "CallCredentials",
2267
+ "AuthMetadataContext",
2268
+ "AuthMetadataPluginCallback",
2269
+ "AuthMetadataPlugin",
2270
+ "Compression",
2271
+ "ClientCallDetails",
2272
+ "ServerCertificateConfiguration",
2273
+ "ServerCredentials",
2274
+ "LocalConnectionType",
2275
+ "UnaryUnaryMultiCallable",
2276
+ "UnaryStreamMultiCallable",
2277
+ "StreamUnaryMultiCallable",
2278
+ "StreamStreamMultiCallable",
2279
+ "UnaryUnaryClientInterceptor",
2280
+ "UnaryStreamClientInterceptor",
2281
+ "StreamUnaryClientInterceptor",
2282
+ "StreamStreamClientInterceptor",
2283
+ "Channel",
2284
+ "ServicerContext",
2285
+ "RpcMethodHandler",
2286
+ "HandlerCallDetails",
2287
+ "GenericRpcHandler",
2288
+ "ServiceRpcHandler",
2289
+ "Server",
2290
+ "ServerInterceptor",
2291
+ "unary_unary_rpc_method_handler",
2292
+ "unary_stream_rpc_method_handler",
2293
+ "stream_unary_rpc_method_handler",
2294
+ "stream_stream_rpc_method_handler",
2295
+ "method_handlers_generic_handler",
2296
+ "ssl_channel_credentials",
2297
+ "metadata_call_credentials",
2298
+ "access_token_call_credentials",
2299
+ "composite_call_credentials",
2300
+ "composite_channel_credentials",
2301
+ "compute_engine_channel_credentials",
2302
+ "local_channel_credentials",
2303
+ "local_server_credentials",
2304
+ "alts_channel_credentials",
2305
+ "alts_server_credentials",
2306
+ "ssl_server_credentials",
2307
+ "ssl_server_certificate_configuration",
2308
+ "dynamic_ssl_server_credentials",
2309
+ "channel_ready_future",
2310
+ "insecure_channel",
2311
+ "secure_channel",
2312
+ "intercept_channel",
2313
+ "server",
2314
+ "protos",
2315
+ "services",
2316
+ "protos_and_services",
2317
+ "xds_channel_credentials",
2318
+ "xds_server_credentials",
2319
+ "insecure_server_credentials",
2320
+ )
2321
+
2322
+ ############################### Extension Shims ################################
2323
+
2324
+ # Here to maintain backwards compatibility; avoid using these in new code!
2325
+ try:
2326
+ import grpc_tools
2327
+
2328
+ sys.modules.update({"grpc.tools": grpc_tools})
2329
+ except ImportError:
2330
+ pass
2331
+ try:
2332
+ import grpc_health
2333
+
2334
+ sys.modules.update({"grpc.health": grpc_health})
2335
+ except ImportError:
2336
+ pass
2337
+ try:
2338
+ import grpc_reflection
2339
+
2340
+ sys.modules.update({"grpc.reflection": grpc_reflection})
2341
+ except ImportError:
2342
+ pass
2343
+
2344
+ # Prevents import order issue in the case of renamed path.
2345
+ if sys.version_info >= (3, 6) and __name__ == "grpc":
2346
+ from grpc import aio # pylint: disable=ungrouped-imports
2347
+
2348
+ sys.modules.update({"grpc.aio": aio})
lib/python3.10/site-packages/grpc/_auth.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """GRPCAuthMetadataPlugins for standard authentication."""
15
+
16
+ import inspect
17
+ from typing import Any, Optional
18
+
19
+ import grpc
20
+
21
+
22
+ def _sign_request(
23
+ callback: grpc.AuthMetadataPluginCallback,
24
+ token: Optional[str],
25
+ error: Optional[Exception],
26
+ ):
27
+ metadata = (("authorization", "Bearer {}".format(token)),)
28
+ callback(metadata, error)
29
+
30
+
31
+ class GoogleCallCredentials(grpc.AuthMetadataPlugin):
32
+ """Metadata wrapper for GoogleCredentials from the oauth2client library."""
33
+
34
+ _is_jwt: bool
35
+ _credentials: Any
36
+
37
+ # TODO(xuanwn): Give credentials an actual type.
38
+ def __init__(self, credentials: Any):
39
+ self._credentials = credentials
40
+ # Hack to determine if these are JWT creds and we need to pass
41
+ # additional_claims when getting a token
42
+ self._is_jwt = (
43
+ "additional_claims"
44
+ in inspect.getfullargspec(credentials.get_access_token).args
45
+ )
46
+
47
+ def __call__(
48
+ self,
49
+ context: grpc.AuthMetadataContext,
50
+ callback: grpc.AuthMetadataPluginCallback,
51
+ ):
52
+ try:
53
+ if self._is_jwt:
54
+ access_token = self._credentials.get_access_token(
55
+ additional_claims={
56
+ "aud": context.service_url # pytype: disable=attribute-error
57
+ }
58
+ ).access_token
59
+ else:
60
+ access_token = self._credentials.get_access_token().access_token
61
+ except Exception as exception: # pylint: disable=broad-except
62
+ _sign_request(callback, None, exception)
63
+ else:
64
+ _sign_request(callback, access_token, None)
65
+
66
+
67
+ class AccessTokenAuthMetadataPlugin(grpc.AuthMetadataPlugin):
68
+ """Metadata wrapper for raw access token credentials."""
69
+
70
+ _access_token: str
71
+
72
+ def __init__(self, access_token: str):
73
+ self._access_token = access_token
74
+
75
+ def __call__(
76
+ self,
77
+ context: grpc.AuthMetadataContext,
78
+ callback: grpc.AuthMetadataPluginCallback,
79
+ ):
80
+ _sign_request(callback, self._access_token, None)
lib/python3.10/site-packages/grpc/_channel.py ADDED
@@ -0,0 +1,2267 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Invocation-side implementation of gRPC Python."""
15
+
16
+ import copy
17
+ import functools
18
+ import logging
19
+ import os
20
+ import sys
21
+ import threading
22
+ import time
23
+ import types
24
+ from typing import (
25
+ Any,
26
+ Callable,
27
+ Dict,
28
+ Iterator,
29
+ List,
30
+ Optional,
31
+ Sequence,
32
+ Set,
33
+ Tuple,
34
+ Union,
35
+ )
36
+
37
+ import grpc # pytype: disable=pyi-error
38
+ from grpc import _common # pytype: disable=pyi-error
39
+ from grpc import _compression # pytype: disable=pyi-error
40
+ from grpc import _grpcio_metadata # pytype: disable=pyi-error
41
+ from grpc import _observability # pytype: disable=pyi-error
42
+ from grpc._cython import cygrpc
43
+ from grpc._typing import ChannelArgumentType
44
+ from grpc._typing import DeserializingFunction
45
+ from grpc._typing import IntegratedCallFactory
46
+ from grpc._typing import MetadataType
47
+ from grpc._typing import NullaryCallbackType
48
+ from grpc._typing import ResponseType
49
+ from grpc._typing import SerializingFunction
50
+ from grpc._typing import UserTag
51
+ import grpc.experimental # pytype: disable=pyi-error
52
+
53
+ _LOGGER = logging.getLogger(__name__)
54
+
55
+ _USER_AGENT = "grpc-python/{}".format(_grpcio_metadata.__version__)
56
+
57
+ _EMPTY_FLAGS = 0
58
+
59
+ # NOTE(rbellevi): No guarantees are given about the maintenance of this
60
+ # environment variable.
61
+ _DEFAULT_SINGLE_THREADED_UNARY_STREAM = (
62
+ os.getenv("GRPC_SINGLE_THREADED_UNARY_STREAM") is not None
63
+ )
64
+
65
+ _UNARY_UNARY_INITIAL_DUE = (
66
+ cygrpc.OperationType.send_initial_metadata,
67
+ cygrpc.OperationType.send_message,
68
+ cygrpc.OperationType.send_close_from_client,
69
+ cygrpc.OperationType.receive_initial_metadata,
70
+ cygrpc.OperationType.receive_message,
71
+ cygrpc.OperationType.receive_status_on_client,
72
+ )
73
+ _UNARY_STREAM_INITIAL_DUE = (
74
+ cygrpc.OperationType.send_initial_metadata,
75
+ cygrpc.OperationType.send_message,
76
+ cygrpc.OperationType.send_close_from_client,
77
+ cygrpc.OperationType.receive_initial_metadata,
78
+ cygrpc.OperationType.receive_status_on_client,
79
+ )
80
+ _STREAM_UNARY_INITIAL_DUE = (
81
+ cygrpc.OperationType.send_initial_metadata,
82
+ cygrpc.OperationType.receive_initial_metadata,
83
+ cygrpc.OperationType.receive_message,
84
+ cygrpc.OperationType.receive_status_on_client,
85
+ )
86
+ _STREAM_STREAM_INITIAL_DUE = (
87
+ cygrpc.OperationType.send_initial_metadata,
88
+ cygrpc.OperationType.receive_initial_metadata,
89
+ cygrpc.OperationType.receive_status_on_client,
90
+ )
91
+
92
+ _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = (
93
+ "Exception calling channel subscription callback!"
94
+ )
95
+
96
+ _OK_RENDEZVOUS_REPR_FORMAT = (
97
+ '<{} of RPC that terminated with:\n\tstatus = {}\n\tdetails = "{}"\n>'
98
+ )
99
+
100
+ _NON_OK_RENDEZVOUS_REPR_FORMAT = (
101
+ "<{} of RPC that terminated with:\n"
102
+ "\tstatus = {}\n"
103
+ '\tdetails = "{}"\n'
104
+ '\tdebug_error_string = "{}"\n'
105
+ ">"
106
+ )
107
+
108
+
109
+ def _deadline(timeout: Optional[float]) -> Optional[float]:
110
+ return None if timeout is None else time.time() + timeout
111
+
112
+
113
+ def _unknown_code_details(
114
+ unknown_cygrpc_code: Optional[grpc.StatusCode], details: Optional[str]
115
+ ) -> str:
116
+ return 'Server sent unknown code {} and details "{}"'.format(
117
+ unknown_cygrpc_code, details
118
+ )
119
+
120
+
121
+ class _RPCState(object):
122
+ condition: threading.Condition
123
+ due: Set[cygrpc.OperationType]
124
+ initial_metadata: Optional[MetadataType]
125
+ response: Any
126
+ trailing_metadata: Optional[MetadataType]
127
+ code: Optional[grpc.StatusCode]
128
+ details: Optional[str]
129
+ debug_error_string: Optional[str]
130
+ cancelled: bool
131
+ callbacks: List[NullaryCallbackType]
132
+ fork_epoch: Optional[int]
133
+ rpc_start_time: Optional[float] # In relative seconds
134
+ rpc_end_time: Optional[float] # In relative seconds
135
+ method: Optional[str]
136
+ target: Optional[str]
137
+
138
+ def __init__(
139
+ self,
140
+ due: Sequence[cygrpc.OperationType],
141
+ initial_metadata: Optional[MetadataType],
142
+ trailing_metadata: Optional[MetadataType],
143
+ code: Optional[grpc.StatusCode],
144
+ details: Optional[str],
145
+ ):
146
+ # `condition` guards all members of _RPCState. `notify_all` is called on
147
+ # `condition` when the state of the RPC has changed.
148
+ self.condition = threading.Condition()
149
+
150
+ # The cygrpc.OperationType objects representing events due from the RPC's
151
+ # completion queue. If an operation is in `due`, it is guaranteed that
152
+ # `operate()` has been called on a corresponding operation. But the
153
+ # converse is not true. That is, in the case of failed `operate()`
154
+ # calls, there may briefly be events in `due` that do not correspond to
155
+ # operations submitted to Core.
156
+ self.due = set(due)
157
+ self.initial_metadata = initial_metadata
158
+ self.response = None
159
+ self.trailing_metadata = trailing_metadata
160
+ self.code = code
161
+ self.details = details
162
+ self.debug_error_string = None
163
+ # The following three fields are used for observability.
164
+ # Updates to those fields do not trigger self.condition.
165
+ self.rpc_start_time = None
166
+ self.rpc_end_time = None
167
+ self.method = None
168
+ self.target = None
169
+
170
+ # The semantics of grpc.Future.cancel and grpc.Future.cancelled are
171
+ # slightly wonky, so they have to be tracked separately from the rest of the
172
+ # result of the RPC. This field tracks whether cancellation was requested
173
+ # prior to termination of the RPC.
174
+ self.cancelled = False
175
+ self.callbacks = []
176
+ self.fork_epoch = cygrpc.get_fork_epoch()
177
+
178
+ def reset_postfork_child(self):
179
+ self.condition = threading.Condition()
180
+
181
+
182
+ def _abort(state: _RPCState, code: grpc.StatusCode, details: str) -> None:
183
+ if state.code is None:
184
+ state.code = code
185
+ state.details = details
186
+ if state.initial_metadata is None:
187
+ state.initial_metadata = ()
188
+ state.trailing_metadata = ()
189
+
190
+
191
+ def _handle_event(
192
+ event: cygrpc.BaseEvent,
193
+ state: _RPCState,
194
+ response_deserializer: Optional[DeserializingFunction],
195
+ ) -> List[NullaryCallbackType]:
196
+ callbacks = []
197
+ for batch_operation in event.batch_operations:
198
+ operation_type = batch_operation.type()
199
+ state.due.remove(operation_type)
200
+ if operation_type == cygrpc.OperationType.receive_initial_metadata:
201
+ state.initial_metadata = batch_operation.initial_metadata()
202
+ elif operation_type == cygrpc.OperationType.receive_message:
203
+ serialized_response = batch_operation.message()
204
+ if serialized_response is not None:
205
+ response = _common.deserialize(
206
+ serialized_response, response_deserializer
207
+ )
208
+ if response is None:
209
+ details = "Exception deserializing response!"
210
+ _abort(state, grpc.StatusCode.INTERNAL, details)
211
+ else:
212
+ state.response = response
213
+ elif operation_type == cygrpc.OperationType.receive_status_on_client:
214
+ state.trailing_metadata = batch_operation.trailing_metadata()
215
+ if state.code is None:
216
+ code = _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE.get(
217
+ batch_operation.code()
218
+ )
219
+ if code is None:
220
+ state.code = grpc.StatusCode.UNKNOWN
221
+ state.details = _unknown_code_details(
222
+ code, batch_operation.details()
223
+ )
224
+ else:
225
+ state.code = code
226
+ state.details = batch_operation.details()
227
+ state.debug_error_string = batch_operation.error_string()
228
+ state.rpc_end_time = time.perf_counter()
229
+ _observability.maybe_record_rpc_latency(state)
230
+ callbacks.extend(state.callbacks)
231
+ state.callbacks = None
232
+ return callbacks
233
+
234
+
235
+ def _event_handler(
236
+ state: _RPCState, response_deserializer: Optional[DeserializingFunction]
237
+ ) -> UserTag:
238
+ def handle_event(event):
239
+ with state.condition:
240
+ callbacks = _handle_event(event, state, response_deserializer)
241
+ state.condition.notify_all()
242
+ done = not state.due
243
+ for callback in callbacks:
244
+ try:
245
+ callback()
246
+ except Exception as e: # pylint: disable=broad-except
247
+ # NOTE(rbellevi): We suppress but log errors here so as not to
248
+ # kill the channel spin thread.
249
+ logging.error(
250
+ "Exception in callback %s: %s", repr(callback.func), repr(e)
251
+ )
252
+ return done and state.fork_epoch >= cygrpc.get_fork_epoch()
253
+
254
+ return handle_event
255
+
256
+
257
+ # TODO(xuanwn): Create a base class for IntegratedCall and SegregatedCall.
258
+ # pylint: disable=too-many-statements
259
+ def _consume_request_iterator(
260
+ request_iterator: Iterator,
261
+ state: _RPCState,
262
+ call: Union[cygrpc.IntegratedCall, cygrpc.SegregatedCall],
263
+ request_serializer: SerializingFunction,
264
+ event_handler: Optional[UserTag],
265
+ ) -> None:
266
+ """Consume a request supplied by the user."""
267
+
268
+ def consume_request_iterator(): # pylint: disable=too-many-branches
269
+ # Iterate over the request iterator until it is exhausted or an error
270
+ # condition is encountered.
271
+ while True:
272
+ return_from_user_request_generator_invoked = False
273
+ try:
274
+ # The thread may die in user-code. Do not block fork for this.
275
+ cygrpc.enter_user_request_generator()
276
+ request = next(request_iterator)
277
+ except StopIteration:
278
+ break
279
+ except Exception: # pylint: disable=broad-except
280
+ cygrpc.return_from_user_request_generator()
281
+ return_from_user_request_generator_invoked = True
282
+ code = grpc.StatusCode.UNKNOWN
283
+ details = "Exception iterating requests!"
284
+ _LOGGER.exception(details)
285
+ call.cancel(
286
+ _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code], details
287
+ )
288
+ _abort(state, code, details)
289
+ return
290
+ finally:
291
+ if not return_from_user_request_generator_invoked:
292
+ cygrpc.return_from_user_request_generator()
293
+ serialized_request = _common.serialize(request, request_serializer)
294
+ with state.condition:
295
+ if state.code is None and not state.cancelled:
296
+ if serialized_request is None:
297
+ code = grpc.StatusCode.INTERNAL
298
+ details = "Exception serializing request!"
299
+ call.cancel(
300
+ _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code],
301
+ details,
302
+ )
303
+ _abort(state, code, details)
304
+ return
305
+ else:
306
+ state.due.add(cygrpc.OperationType.send_message)
307
+ operations = (
308
+ cygrpc.SendMessageOperation(
309
+ serialized_request, _EMPTY_FLAGS
310
+ ),
311
+ )
312
+ operating = call.operate(operations, event_handler)
313
+ if not operating:
314
+ state.due.remove(cygrpc.OperationType.send_message)
315
+ return
316
+
317
+ def _done():
318
+ return (
319
+ state.code is not None
320
+ or cygrpc.OperationType.send_message
321
+ not in state.due
322
+ )
323
+
324
+ _common.wait(
325
+ state.condition.wait,
326
+ _done,
327
+ spin_cb=functools.partial(
328
+ cygrpc.block_if_fork_in_progress, state
329
+ ),
330
+ )
331
+ if state.code is not None:
332
+ return
333
+ else:
334
+ return
335
+ with state.condition:
336
+ if state.code is None:
337
+ state.due.add(cygrpc.OperationType.send_close_from_client)
338
+ operations = (
339
+ cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
340
+ )
341
+ operating = call.operate(operations, event_handler)
342
+ if not operating:
343
+ state.due.remove(
344
+ cygrpc.OperationType.send_close_from_client
345
+ )
346
+
347
+ consumption_thread = cygrpc.ForkManagedThread(
348
+ target=consume_request_iterator
349
+ )
350
+ consumption_thread.setDaemon(True)
351
+ consumption_thread.start()
352
+
353
+
354
+ def _rpc_state_string(class_name: str, rpc_state: _RPCState) -> str:
355
+ """Calculates error string for RPC."""
356
+ with rpc_state.condition:
357
+ if rpc_state.code is None:
358
+ return "<{} object>".format(class_name)
359
+ elif rpc_state.code is grpc.StatusCode.OK:
360
+ return _OK_RENDEZVOUS_REPR_FORMAT.format(
361
+ class_name, rpc_state.code, rpc_state.details
362
+ )
363
+ else:
364
+ return _NON_OK_RENDEZVOUS_REPR_FORMAT.format(
365
+ class_name,
366
+ rpc_state.code,
367
+ rpc_state.details,
368
+ rpc_state.debug_error_string,
369
+ )
370
+
371
+
372
+ class _InactiveRpcError(grpc.RpcError, grpc.Call, grpc.Future):
373
+ """An RPC error not tied to the execution of a particular RPC.
374
+
375
+ The RPC represented by the state object must not be in-progress or
376
+ cancelled.
377
+
378
+ Attributes:
379
+ _state: An instance of _RPCState.
380
+ """
381
+
382
+ _state: _RPCState
383
+
384
+ def __init__(self, state: _RPCState):
385
+ with state.condition:
386
+ self._state = _RPCState(
387
+ (),
388
+ copy.deepcopy(state.initial_metadata),
389
+ copy.deepcopy(state.trailing_metadata),
390
+ state.code,
391
+ copy.deepcopy(state.details),
392
+ )
393
+ self._state.response = copy.copy(state.response)
394
+ self._state.debug_error_string = copy.copy(state.debug_error_string)
395
+
396
+ def initial_metadata(self) -> Optional[MetadataType]:
397
+ return self._state.initial_metadata
398
+
399
+ def trailing_metadata(self) -> Optional[MetadataType]:
400
+ return self._state.trailing_metadata
401
+
402
+ def code(self) -> Optional[grpc.StatusCode]:
403
+ return self._state.code
404
+
405
+ def details(self) -> Optional[str]:
406
+ return _common.decode(self._state.details)
407
+
408
+ def debug_error_string(self) -> Optional[str]:
409
+ return _common.decode(self._state.debug_error_string)
410
+
411
+ def _repr(self) -> str:
412
+ return _rpc_state_string(self.__class__.__name__, self._state)
413
+
414
+ def __repr__(self) -> str:
415
+ return self._repr()
416
+
417
+ def __str__(self) -> str:
418
+ return self._repr()
419
+
420
+ def cancel(self) -> bool:
421
+ """See grpc.Future.cancel."""
422
+ return False
423
+
424
+ def cancelled(self) -> bool:
425
+ """See grpc.Future.cancelled."""
426
+ return False
427
+
428
+ def running(self) -> bool:
429
+ """See grpc.Future.running."""
430
+ return False
431
+
432
+ def done(self) -> bool:
433
+ """See grpc.Future.done."""
434
+ return True
435
+
436
+ def result(
437
+ self, timeout: Optional[float] = None
438
+ ) -> Any: # pylint: disable=unused-argument
439
+ """See grpc.Future.result."""
440
+ raise self
441
+
442
+ def exception(
443
+ self, timeout: Optional[float] = None # pylint: disable=unused-argument
444
+ ) -> Optional[Exception]:
445
+ """See grpc.Future.exception."""
446
+ return self
447
+
448
+ def traceback(
449
+ self, timeout: Optional[float] = None # pylint: disable=unused-argument
450
+ ) -> Optional[types.TracebackType]:
451
+ """See grpc.Future.traceback."""
452
+ try:
453
+ raise self
454
+ except grpc.RpcError:
455
+ return sys.exc_info()[2]
456
+
457
+ def add_done_callback(
458
+ self,
459
+ fn: Callable[[grpc.Future], None],
460
+ timeout: Optional[float] = None, # pylint: disable=unused-argument
461
+ ) -> None:
462
+ """See grpc.Future.add_done_callback."""
463
+ fn(self)
464
+
465
+
466
+ class _Rendezvous(grpc.RpcError, grpc.RpcContext):
467
+ """An RPC iterator.
468
+
469
+ Attributes:
470
+ _state: An instance of _RPCState.
471
+ _call: An instance of SegregatedCall or IntegratedCall.
472
+ In either case, the _call object is expected to have operate, cancel,
473
+ and next_event methods.
474
+ _response_deserializer: A callable taking bytes and return a Python
475
+ object.
476
+ _deadline: A float representing the deadline of the RPC in seconds. Or
477
+ possibly None, to represent an RPC with no deadline at all.
478
+ """
479
+
480
+ _state: _RPCState
481
+ _call: Union[cygrpc.SegregatedCall, cygrpc.IntegratedCall]
482
+ _response_deserializer: Optional[DeserializingFunction]
483
+ _deadline: Optional[float]
484
+
485
+ def __init__(
486
+ self,
487
+ state: _RPCState,
488
+ call: Union[cygrpc.SegregatedCall, cygrpc.IntegratedCall],
489
+ response_deserializer: Optional[DeserializingFunction],
490
+ deadline: Optional[float],
491
+ ):
492
+ super(_Rendezvous, self).__init__()
493
+ self._state = state
494
+ self._call = call
495
+ self._response_deserializer = response_deserializer
496
+ self._deadline = deadline
497
+
498
+ def is_active(self) -> bool:
499
+ """See grpc.RpcContext.is_active"""
500
+ with self._state.condition:
501
+ return self._state.code is None
502
+
503
+ def time_remaining(self) -> Optional[float]:
504
+ """See grpc.RpcContext.time_remaining"""
505
+ with self._state.condition:
506
+ if self._deadline is None:
507
+ return None
508
+ else:
509
+ return max(self._deadline - time.time(), 0)
510
+
511
+ def cancel(self) -> bool:
512
+ """See grpc.RpcContext.cancel"""
513
+ with self._state.condition:
514
+ if self._state.code is None:
515
+ code = grpc.StatusCode.CANCELLED
516
+ details = "Locally cancelled by application!"
517
+ self._call.cancel(
518
+ _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code], details
519
+ )
520
+ self._state.cancelled = True
521
+ _abort(self._state, code, details)
522
+ self._state.condition.notify_all()
523
+ return True
524
+ else:
525
+ return False
526
+
527
+ def add_callback(self, callback: NullaryCallbackType) -> bool:
528
+ """See grpc.RpcContext.add_callback"""
529
+ with self._state.condition:
530
+ if self._state.callbacks is None:
531
+ return False
532
+ else:
533
+ self._state.callbacks.append(callback)
534
+ return True
535
+
536
+ def __iter__(self):
537
+ return self
538
+
539
+ def next(self):
540
+ return self._next()
541
+
542
+ def __next__(self):
543
+ return self._next()
544
+
545
+ def _next(self):
546
+ raise NotImplementedError()
547
+
548
+ def debug_error_string(self) -> Optional[str]:
549
+ raise NotImplementedError()
550
+
551
+ def _repr(self) -> str:
552
+ return _rpc_state_string(self.__class__.__name__, self._state)
553
+
554
+ def __repr__(self) -> str:
555
+ return self._repr()
556
+
557
+ def __str__(self) -> str:
558
+ return self._repr()
559
+
560
+ def __del__(self) -> None:
561
+ with self._state.condition:
562
+ if self._state.code is None:
563
+ self._state.code = grpc.StatusCode.CANCELLED
564
+ self._state.details = "Cancelled upon garbage collection!"
565
+ self._state.cancelled = True
566
+ self._call.cancel(
567
+ _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[self._state.code],
568
+ self._state.details,
569
+ )
570
+ self._state.condition.notify_all()
571
+
572
+
573
+ class _SingleThreadedRendezvous(
574
+ _Rendezvous, grpc.Call, grpc.Future
575
+ ): # pylint: disable=too-many-ancestors
576
+ """An RPC iterator operating entirely on a single thread.
577
+
578
+ The __next__ method of _SingleThreadedRendezvous does not depend on the
579
+ existence of any other thread, including the "channel spin thread".
580
+ However, this means that its interface is entirely synchronous. So this
581
+ class cannot completely fulfill the grpc.Future interface. The result,
582
+ exception, and traceback methods will never block and will instead raise
583
+ an exception if calling the method would result in blocking.
584
+
585
+ This means that these methods are safe to call from add_done_callback
586
+ handlers.
587
+ """
588
+
589
+ _state: _RPCState
590
+
591
+ def _is_complete(self) -> bool:
592
+ return self._state.code is not None
593
+
594
+ def cancelled(self) -> bool:
595
+ with self._state.condition:
596
+ return self._state.cancelled
597
+
598
+ def running(self) -> bool:
599
+ with self._state.condition:
600
+ return self._state.code is None
601
+
602
+ def done(self) -> bool:
603
+ with self._state.condition:
604
+ return self._state.code is not None
605
+
606
+ def result(self, timeout: Optional[float] = None) -> Any:
607
+ """Returns the result of the computation or raises its exception.
608
+
609
+ This method will never block. Instead, it will raise an exception
610
+ if calling this method would otherwise result in blocking.
611
+
612
+ Since this method will never block, any `timeout` argument passed will
613
+ be ignored.
614
+ """
615
+ del timeout
616
+ with self._state.condition:
617
+ if not self._is_complete():
618
+ raise grpc.experimental.UsageError(
619
+ "_SingleThreadedRendezvous only supports result() when the"
620
+ " RPC is complete."
621
+ )
622
+ if self._state.code is grpc.StatusCode.OK:
623
+ return self._state.response
624
+ elif self._state.cancelled:
625
+ raise grpc.FutureCancelledError()
626
+ else:
627
+ raise self
628
+
629
+ def exception(self, timeout: Optional[float] = None) -> Optional[Exception]:
630
+ """Return the exception raised by the computation.
631
+
632
+ This method will never block. Instead, it will raise an exception
633
+ if calling this method would otherwise result in blocking.
634
+
635
+ Since this method will never block, any `timeout` argument passed will
636
+ be ignored.
637
+ """
638
+ del timeout
639
+ with self._state.condition:
640
+ if not self._is_complete():
641
+ raise grpc.experimental.UsageError(
642
+ "_SingleThreadedRendezvous only supports exception() when"
643
+ " the RPC is complete."
644
+ )
645
+ if self._state.code is grpc.StatusCode.OK:
646
+ return None
647
+ elif self._state.cancelled:
648
+ raise grpc.FutureCancelledError()
649
+ else:
650
+ return self
651
+
652
+ def traceback(
653
+ self, timeout: Optional[float] = None
654
+ ) -> Optional[types.TracebackType]:
655
+ """Access the traceback of the exception raised by the computation.
656
+
657
+ This method will never block. Instead, it will raise an exception
658
+ if calling this method would otherwise result in blocking.
659
+
660
+ Since this method will never block, any `timeout` argument passed will
661
+ be ignored.
662
+ """
663
+ del timeout
664
+ with self._state.condition:
665
+ if not self._is_complete():
666
+ raise grpc.experimental.UsageError(
667
+ "_SingleThreadedRendezvous only supports traceback() when"
668
+ " the RPC is complete."
669
+ )
670
+ if self._state.code is grpc.StatusCode.OK:
671
+ return None
672
+ elif self._state.cancelled:
673
+ raise grpc.FutureCancelledError()
674
+ else:
675
+ try:
676
+ raise self
677
+ except grpc.RpcError:
678
+ return sys.exc_info()[2]
679
+
680
+ def add_done_callback(self, fn: Callable[[grpc.Future], None]) -> None:
681
+ with self._state.condition:
682
+ if self._state.code is None:
683
+ self._state.callbacks.append(functools.partial(fn, self))
684
+ return
685
+
686
+ fn(self)
687
+
688
+ def initial_metadata(self) -> Optional[MetadataType]:
689
+ """See grpc.Call.initial_metadata"""
690
+ with self._state.condition:
691
+ # NOTE(gnossen): Based on our initial call batch, we are guaranteed
692
+ # to receive initial metadata before any messages.
693
+ while self._state.initial_metadata is None:
694
+ self._consume_next_event()
695
+ return self._state.initial_metadata
696
+
697
+ def trailing_metadata(self) -> Optional[MetadataType]:
698
+ """See grpc.Call.trailing_metadata"""
699
+ with self._state.condition:
700
+ if self._state.trailing_metadata is None:
701
+ raise grpc.experimental.UsageError(
702
+ "Cannot get trailing metadata until RPC is completed."
703
+ )
704
+ return self._state.trailing_metadata
705
+
706
+ def code(self) -> Optional[grpc.StatusCode]:
707
+ """See grpc.Call.code"""
708
+ with self._state.condition:
709
+ if self._state.code is None:
710
+ raise grpc.experimental.UsageError(
711
+ "Cannot get code until RPC is completed."
712
+ )
713
+ return self._state.code
714
+
715
+ def details(self) -> Optional[str]:
716
+ """See grpc.Call.details"""
717
+ with self._state.condition:
718
+ if self._state.details is None:
719
+ raise grpc.experimental.UsageError(
720
+ "Cannot get details until RPC is completed."
721
+ )
722
+ return _common.decode(self._state.details)
723
+
724
+ def _consume_next_event(self) -> Optional[cygrpc.BaseEvent]:
725
+ event = self._call.next_event()
726
+ with self._state.condition:
727
+ callbacks = _handle_event(
728
+ event, self._state, self._response_deserializer
729
+ )
730
+ for callback in callbacks:
731
+ # NOTE(gnossen): We intentionally allow exceptions to bubble up
732
+ # to the user when running on a single thread.
733
+ callback()
734
+ return event
735
+
736
+ def _next_response(self) -> Any:
737
+ while True:
738
+ self._consume_next_event()
739
+ with self._state.condition:
740
+ if self._state.response is not None:
741
+ response = self._state.response
742
+ self._state.response = None
743
+ return response
744
+ elif (
745
+ cygrpc.OperationType.receive_message not in self._state.due
746
+ ):
747
+ if self._state.code is grpc.StatusCode.OK:
748
+ raise StopIteration()
749
+ elif self._state.code is not None:
750
+ raise self
751
+
752
+ def _next(self) -> Any:
753
+ with self._state.condition:
754
+ if self._state.code is None:
755
+ # We tentatively add the operation as expected and remove
756
+ # it if the enqueue operation fails. This allows us to guarantee that
757
+ # if an event has been submitted to the core completion queue,
758
+ # it is in `due`. If we waited until after a successful
759
+ # enqueue operation then a signal could interrupt this
760
+ # thread between the enqueue operation and the addition of the
761
+ # operation to `due`. This would cause an exception on the
762
+ # channel spin thread when the operation completes and no
763
+ # corresponding operation would be present in state.due.
764
+ # Note that, since `condition` is held through this block, there is
765
+ # no data race on `due`.
766
+ self._state.due.add(cygrpc.OperationType.receive_message)
767
+ operating = self._call.operate(
768
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), None
769
+ )
770
+ if not operating:
771
+ self._state.due.remove(cygrpc.OperationType.receive_message)
772
+ elif self._state.code is grpc.StatusCode.OK:
773
+ raise StopIteration()
774
+ else:
775
+ raise self
776
+ return self._next_response()
777
+
778
+ def debug_error_string(self) -> Optional[str]:
779
+ with self._state.condition:
780
+ if self._state.debug_error_string is None:
781
+ raise grpc.experimental.UsageError(
782
+ "Cannot get debug error string until RPC is completed."
783
+ )
784
+ return _common.decode(self._state.debug_error_string)
785
+
786
+
787
+ class _MultiThreadedRendezvous(
788
+ _Rendezvous, grpc.Call, grpc.Future
789
+ ): # pylint: disable=too-many-ancestors
790
+ """An RPC iterator that depends on a channel spin thread.
791
+
792
+ This iterator relies upon a per-channel thread running in the background,
793
+ dequeueing events from the completion queue, and notifying threads waiting
794
+ on the threading.Condition object in the _RPCState object.
795
+
796
+ This extra thread allows _MultiThreadedRendezvous to fulfill the grpc.Future interface
797
+ and to mediate a bidirection streaming RPC.
798
+ """
799
+
800
+ _state: _RPCState
801
+
802
+ def initial_metadata(self) -> Optional[MetadataType]:
803
+ """See grpc.Call.initial_metadata"""
804
+ with self._state.condition:
805
+
806
+ def _done():
807
+ return self._state.initial_metadata is not None
808
+
809
+ _common.wait(self._state.condition.wait, _done)
810
+ return self._state.initial_metadata
811
+
812
+ def trailing_metadata(self) -> Optional[MetadataType]:
813
+ """See grpc.Call.trailing_metadata"""
814
+ with self._state.condition:
815
+
816
+ def _done():
817
+ return self._state.trailing_metadata is not None
818
+
819
+ _common.wait(self._state.condition.wait, _done)
820
+ return self._state.trailing_metadata
821
+
822
+ def code(self) -> Optional[grpc.StatusCode]:
823
+ """See grpc.Call.code"""
824
+ with self._state.condition:
825
+
826
+ def _done():
827
+ return self._state.code is not None
828
+
829
+ _common.wait(self._state.condition.wait, _done)
830
+ return self._state.code
831
+
832
+ def details(self) -> Optional[str]:
833
+ """See grpc.Call.details"""
834
+ with self._state.condition:
835
+
836
+ def _done():
837
+ return self._state.details is not None
838
+
839
+ _common.wait(self._state.condition.wait, _done)
840
+ return _common.decode(self._state.details)
841
+
842
+ def debug_error_string(self) -> Optional[str]:
843
+ with self._state.condition:
844
+
845
+ def _done():
846
+ return self._state.debug_error_string is not None
847
+
848
+ _common.wait(self._state.condition.wait, _done)
849
+ return _common.decode(self._state.debug_error_string)
850
+
851
+ def cancelled(self) -> bool:
852
+ with self._state.condition:
853
+ return self._state.cancelled
854
+
855
+ def running(self) -> bool:
856
+ with self._state.condition:
857
+ return self._state.code is None
858
+
859
+ def done(self) -> bool:
860
+ with self._state.condition:
861
+ return self._state.code is not None
862
+
863
+ def _is_complete(self) -> bool:
864
+ return self._state.code is not None
865
+
866
+ def result(self, timeout: Optional[float] = None) -> Any:
867
+ """Returns the result of the computation or raises its exception.
868
+
869
+ See grpc.Future.result for the full API contract.
870
+ """
871
+ with self._state.condition:
872
+ timed_out = _common.wait(
873
+ self._state.condition.wait, self._is_complete, timeout=timeout
874
+ )
875
+ if timed_out:
876
+ raise grpc.FutureTimeoutError()
877
+ else:
878
+ if self._state.code is grpc.StatusCode.OK:
879
+ return self._state.response
880
+ elif self._state.cancelled:
881
+ raise grpc.FutureCancelledError()
882
+ else:
883
+ raise self
884
+
885
+ def exception(self, timeout: Optional[float] = None) -> Optional[Exception]:
886
+ """Return the exception raised by the computation.
887
+
888
+ See grpc.Future.exception for the full API contract.
889
+ """
890
+ with self._state.condition:
891
+ timed_out = _common.wait(
892
+ self._state.condition.wait, self._is_complete, timeout=timeout
893
+ )
894
+ if timed_out:
895
+ raise grpc.FutureTimeoutError()
896
+ else:
897
+ if self._state.code is grpc.StatusCode.OK:
898
+ return None
899
+ elif self._state.cancelled:
900
+ raise grpc.FutureCancelledError()
901
+ else:
902
+ return self
903
+
904
+ def traceback(
905
+ self, timeout: Optional[float] = None
906
+ ) -> Optional[types.TracebackType]:
907
+ """Access the traceback of the exception raised by the computation.
908
+
909
+ See grpc.future.traceback for the full API contract.
910
+ """
911
+ with self._state.condition:
912
+ timed_out = _common.wait(
913
+ self._state.condition.wait, self._is_complete, timeout=timeout
914
+ )
915
+ if timed_out:
916
+ raise grpc.FutureTimeoutError()
917
+ else:
918
+ if self._state.code is grpc.StatusCode.OK:
919
+ return None
920
+ elif self._state.cancelled:
921
+ raise grpc.FutureCancelledError()
922
+ else:
923
+ try:
924
+ raise self
925
+ except grpc.RpcError:
926
+ return sys.exc_info()[2]
927
+
928
+ def add_done_callback(self, fn: Callable[[grpc.Future], None]) -> None:
929
+ with self._state.condition:
930
+ if self._state.code is None:
931
+ self._state.callbacks.append(functools.partial(fn, self))
932
+ return
933
+
934
+ fn(self)
935
+
936
+ def _next(self) -> Any:
937
+ with self._state.condition:
938
+ if self._state.code is None:
939
+ event_handler = _event_handler(
940
+ self._state, self._response_deserializer
941
+ )
942
+ self._state.due.add(cygrpc.OperationType.receive_message)
943
+ operating = self._call.operate(
944
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
945
+ event_handler,
946
+ )
947
+ if not operating:
948
+ self._state.due.remove(cygrpc.OperationType.receive_message)
949
+ elif self._state.code is grpc.StatusCode.OK:
950
+ raise StopIteration()
951
+ else:
952
+ raise self
953
+
954
+ def _response_ready():
955
+ return self._state.response is not None or (
956
+ cygrpc.OperationType.receive_message not in self._state.due
957
+ and self._state.code is not None
958
+ )
959
+
960
+ _common.wait(self._state.condition.wait, _response_ready)
961
+ if self._state.response is not None:
962
+ response = self._state.response
963
+ self._state.response = None
964
+ return response
965
+ elif cygrpc.OperationType.receive_message not in self._state.due:
966
+ if self._state.code is grpc.StatusCode.OK:
967
+ raise StopIteration()
968
+ elif self._state.code is not None:
969
+ raise self
970
+
971
+
972
+ def _start_unary_request(
973
+ request: Any,
974
+ timeout: Optional[float],
975
+ request_serializer: SerializingFunction,
976
+ ) -> Tuple[Optional[float], Optional[bytes], Optional[grpc.RpcError]]:
977
+ deadline = _deadline(timeout)
978
+ serialized_request = _common.serialize(request, request_serializer)
979
+ if serialized_request is None:
980
+ state = _RPCState(
981
+ (),
982
+ (),
983
+ (),
984
+ grpc.StatusCode.INTERNAL,
985
+ "Exception serializing request!",
986
+ )
987
+ error = _InactiveRpcError(state)
988
+ return deadline, None, error
989
+ else:
990
+ return deadline, serialized_request, None
991
+
992
+
993
+ def _end_unary_response_blocking(
994
+ state: _RPCState,
995
+ call: cygrpc.SegregatedCall,
996
+ with_call: bool,
997
+ deadline: Optional[float],
998
+ ) -> Union[ResponseType, Tuple[ResponseType, grpc.Call]]:
999
+ if state.code is grpc.StatusCode.OK:
1000
+ if with_call:
1001
+ rendezvous = _MultiThreadedRendezvous(state, call, None, deadline)
1002
+ return state.response, rendezvous
1003
+ else:
1004
+ return state.response
1005
+ else:
1006
+ raise _InactiveRpcError(state) # pytype: disable=not-instantiable
1007
+
1008
+
1009
+ def _stream_unary_invocation_operations(
1010
+ metadata: Optional[MetadataType], initial_metadata_flags: int
1011
+ ) -> Sequence[Sequence[cygrpc.Operation]]:
1012
+ return (
1013
+ (
1014
+ cygrpc.SendInitialMetadataOperation(
1015
+ metadata, initial_metadata_flags
1016
+ ),
1017
+ cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
1018
+ cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
1019
+ ),
1020
+ (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
1021
+ )
1022
+
1023
+
1024
+ def _stream_unary_invocation_operations_and_tags(
1025
+ metadata: Optional[MetadataType], initial_metadata_flags: int
1026
+ ) -> Sequence[Tuple[Sequence[cygrpc.Operation], Optional[UserTag]]]:
1027
+ return tuple(
1028
+ (
1029
+ operations,
1030
+ None,
1031
+ )
1032
+ for operations in _stream_unary_invocation_operations(
1033
+ metadata, initial_metadata_flags
1034
+ )
1035
+ )
1036
+
1037
+
1038
+ def _determine_deadline(user_deadline: Optional[float]) -> Optional[float]:
1039
+ parent_deadline = cygrpc.get_deadline_from_context()
1040
+ if parent_deadline is None and user_deadline is None:
1041
+ return None
1042
+ elif parent_deadline is not None and user_deadline is None:
1043
+ return parent_deadline
1044
+ elif user_deadline is not None and parent_deadline is None:
1045
+ return user_deadline
1046
+ else:
1047
+ return min(parent_deadline, user_deadline)
1048
+
1049
+
1050
+ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
1051
+ _channel: cygrpc.Channel
1052
+ _managed_call: IntegratedCallFactory
1053
+ _method: bytes
1054
+ _target: bytes
1055
+ _request_serializer: Optional[SerializingFunction]
1056
+ _response_deserializer: Optional[DeserializingFunction]
1057
+ _context: Any
1058
+ _registered_call_handle: Optional[int]
1059
+
1060
+ __slots__ = [
1061
+ "_channel",
1062
+ "_managed_call",
1063
+ "_method",
1064
+ "_target",
1065
+ "_request_serializer",
1066
+ "_response_deserializer",
1067
+ "_context",
1068
+ ]
1069
+
1070
+ # pylint: disable=too-many-arguments
1071
+ def __init__(
1072
+ self,
1073
+ channel: cygrpc.Channel,
1074
+ managed_call: IntegratedCallFactory,
1075
+ method: bytes,
1076
+ target: bytes,
1077
+ request_serializer: Optional[SerializingFunction],
1078
+ response_deserializer: Optional[DeserializingFunction],
1079
+ _registered_call_handle: Optional[int],
1080
+ ):
1081
+ self._channel = channel
1082
+ self._managed_call = managed_call
1083
+ self._method = method
1084
+ self._target = target
1085
+ self._request_serializer = request_serializer
1086
+ self._response_deserializer = response_deserializer
1087
+ self._context = cygrpc.build_census_context()
1088
+ self._registered_call_handle = _registered_call_handle
1089
+
1090
+ def _prepare(
1091
+ self,
1092
+ request: Any,
1093
+ timeout: Optional[float],
1094
+ metadata: Optional[MetadataType],
1095
+ wait_for_ready: Optional[bool],
1096
+ compression: Optional[grpc.Compression],
1097
+ ) -> Tuple[
1098
+ Optional[_RPCState],
1099
+ Optional[Sequence[cygrpc.Operation]],
1100
+ Optional[float],
1101
+ Optional[grpc.RpcError],
1102
+ ]:
1103
+ deadline, serialized_request, rendezvous = _start_unary_request(
1104
+ request, timeout, self._request_serializer
1105
+ )
1106
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
1107
+ wait_for_ready
1108
+ )
1109
+ augmented_metadata = _compression.augment_metadata(
1110
+ metadata, compression
1111
+ )
1112
+ if serialized_request is None:
1113
+ return None, None, None, rendezvous
1114
+ else:
1115
+ state = _RPCState(_UNARY_UNARY_INITIAL_DUE, None, None, None, None)
1116
+ operations = (
1117
+ cygrpc.SendInitialMetadataOperation(
1118
+ augmented_metadata, initial_metadata_flags
1119
+ ),
1120
+ cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS),
1121
+ cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
1122
+ cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),
1123
+ cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),
1124
+ cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
1125
+ )
1126
+ return state, operations, deadline, None
1127
+
1128
+ def _blocking(
1129
+ self,
1130
+ request: Any,
1131
+ timeout: Optional[float] = None,
1132
+ metadata: Optional[MetadataType] = None,
1133
+ credentials: Optional[grpc.CallCredentials] = None,
1134
+ wait_for_ready: Optional[bool] = None,
1135
+ compression: Optional[grpc.Compression] = None,
1136
+ ) -> Tuple[_RPCState, cygrpc.SegregatedCall]:
1137
+ state, operations, deadline, rendezvous = self._prepare(
1138
+ request, timeout, metadata, wait_for_ready, compression
1139
+ )
1140
+ if state is None:
1141
+ raise rendezvous # pylint: disable-msg=raising-bad-type
1142
+ else:
1143
+ state.rpc_start_time = time.perf_counter()
1144
+ state.method = _common.decode(self._method)
1145
+ state.target = _common.decode(self._target)
1146
+ call = self._channel.segregated_call(
1147
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
1148
+ self._method,
1149
+ None,
1150
+ _determine_deadline(deadline),
1151
+ metadata,
1152
+ None if credentials is None else credentials._credentials,
1153
+ (
1154
+ (
1155
+ operations,
1156
+ None,
1157
+ ),
1158
+ ),
1159
+ self._context,
1160
+ self._registered_call_handle,
1161
+ )
1162
+ event = call.next_event()
1163
+ _handle_event(event, state, self._response_deserializer)
1164
+ return state, call
1165
+
1166
+ def __call__(
1167
+ self,
1168
+ request: Any,
1169
+ timeout: Optional[float] = None,
1170
+ metadata: Optional[MetadataType] = None,
1171
+ credentials: Optional[grpc.CallCredentials] = None,
1172
+ wait_for_ready: Optional[bool] = None,
1173
+ compression: Optional[grpc.Compression] = None,
1174
+ ) -> Any:
1175
+ (
1176
+ state,
1177
+ call,
1178
+ ) = self._blocking(
1179
+ request, timeout, metadata, credentials, wait_for_ready, compression
1180
+ )
1181
+ return _end_unary_response_blocking(state, call, False, None)
1182
+
1183
+ def with_call(
1184
+ self,
1185
+ request: Any,
1186
+ timeout: Optional[float] = None,
1187
+ metadata: Optional[MetadataType] = None,
1188
+ credentials: Optional[grpc.CallCredentials] = None,
1189
+ wait_for_ready: Optional[bool] = None,
1190
+ compression: Optional[grpc.Compression] = None,
1191
+ ) -> Tuple[Any, grpc.Call]:
1192
+ (
1193
+ state,
1194
+ call,
1195
+ ) = self._blocking(
1196
+ request, timeout, metadata, credentials, wait_for_ready, compression
1197
+ )
1198
+ return _end_unary_response_blocking(state, call, True, None)
1199
+
1200
+ def future(
1201
+ self,
1202
+ request: Any,
1203
+ timeout: Optional[float] = None,
1204
+ metadata: Optional[MetadataType] = None,
1205
+ credentials: Optional[grpc.CallCredentials] = None,
1206
+ wait_for_ready: Optional[bool] = None,
1207
+ compression: Optional[grpc.Compression] = None,
1208
+ ) -> _MultiThreadedRendezvous:
1209
+ state, operations, deadline, rendezvous = self._prepare(
1210
+ request, timeout, metadata, wait_for_ready, compression
1211
+ )
1212
+ if state is None:
1213
+ raise rendezvous # pylint: disable-msg=raising-bad-type
1214
+ else:
1215
+ event_handler = _event_handler(state, self._response_deserializer)
1216
+ state.rpc_start_time = time.perf_counter()
1217
+ state.method = _common.decode(self._method)
1218
+ state.target = _common.decode(self._target)
1219
+ call = self._managed_call(
1220
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
1221
+ self._method,
1222
+ None,
1223
+ deadline,
1224
+ metadata,
1225
+ None if credentials is None else credentials._credentials,
1226
+ (operations,),
1227
+ event_handler,
1228
+ self._context,
1229
+ self._registered_call_handle,
1230
+ )
1231
+ return _MultiThreadedRendezvous(
1232
+ state, call, self._response_deserializer, deadline
1233
+ )
1234
+
1235
+
1236
+ class _SingleThreadedUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
1237
+ _channel: cygrpc.Channel
1238
+ _method: bytes
1239
+ _target: bytes
1240
+ _request_serializer: Optional[SerializingFunction]
1241
+ _response_deserializer: Optional[DeserializingFunction]
1242
+ _context: Any
1243
+ _registered_call_handle: Optional[int]
1244
+
1245
+ __slots__ = [
1246
+ "_channel",
1247
+ "_method",
1248
+ "_target",
1249
+ "_request_serializer",
1250
+ "_response_deserializer",
1251
+ "_context",
1252
+ ]
1253
+
1254
+ # pylint: disable=too-many-arguments
1255
+ def __init__(
1256
+ self,
1257
+ channel: cygrpc.Channel,
1258
+ method: bytes,
1259
+ target: bytes,
1260
+ request_serializer: SerializingFunction,
1261
+ response_deserializer: DeserializingFunction,
1262
+ _registered_call_handle: Optional[int],
1263
+ ):
1264
+ self._channel = channel
1265
+ self._method = method
1266
+ self._target = target
1267
+ self._request_serializer = request_serializer
1268
+ self._response_deserializer = response_deserializer
1269
+ self._context = cygrpc.build_census_context()
1270
+ self._registered_call_handle = _registered_call_handle
1271
+
1272
+ def __call__( # pylint: disable=too-many-locals
1273
+ self,
1274
+ request: Any,
1275
+ timeout: Optional[float] = None,
1276
+ metadata: Optional[MetadataType] = None,
1277
+ credentials: Optional[grpc.CallCredentials] = None,
1278
+ wait_for_ready: Optional[bool] = None,
1279
+ compression: Optional[grpc.Compression] = None,
1280
+ ) -> _SingleThreadedRendezvous:
1281
+ deadline = _deadline(timeout)
1282
+ serialized_request = _common.serialize(
1283
+ request, self._request_serializer
1284
+ )
1285
+ if serialized_request is None:
1286
+ state = _RPCState(
1287
+ (),
1288
+ (),
1289
+ (),
1290
+ grpc.StatusCode.INTERNAL,
1291
+ "Exception serializing request!",
1292
+ )
1293
+ raise _InactiveRpcError(state)
1294
+
1295
+ state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None)
1296
+ call_credentials = (
1297
+ None if credentials is None else credentials._credentials
1298
+ )
1299
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
1300
+ wait_for_ready
1301
+ )
1302
+ augmented_metadata = _compression.augment_metadata(
1303
+ metadata, compression
1304
+ )
1305
+ operations = (
1306
+ (
1307
+ cygrpc.SendInitialMetadataOperation(
1308
+ augmented_metadata, initial_metadata_flags
1309
+ ),
1310
+ cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS),
1311
+ cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
1312
+ ),
1313
+ (cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),),
1314
+ (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
1315
+ )
1316
+ operations_and_tags = tuple((ops, None) for ops in operations)
1317
+ state.rpc_start_time = time.perf_counter()
1318
+ state.method = _common.decode(self._method)
1319
+ state.target = _common.decode(self._target)
1320
+ call = self._channel.segregated_call(
1321
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
1322
+ self._method,
1323
+ None,
1324
+ _determine_deadline(deadline),
1325
+ metadata,
1326
+ call_credentials,
1327
+ operations_and_tags,
1328
+ self._context,
1329
+ self._registered_call_handle,
1330
+ )
1331
+ return _SingleThreadedRendezvous(
1332
+ state, call, self._response_deserializer, deadline
1333
+ )
1334
+
1335
+
1336
+ class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
1337
+ _channel: cygrpc.Channel
1338
+ _managed_call: IntegratedCallFactory
1339
+ _method: bytes
1340
+ _target: bytes
1341
+ _request_serializer: Optional[SerializingFunction]
1342
+ _response_deserializer: Optional[DeserializingFunction]
1343
+ _context: Any
1344
+ _registered_call_handle: Optional[int]
1345
+
1346
+ __slots__ = [
1347
+ "_channel",
1348
+ "_managed_call",
1349
+ "_method",
1350
+ "_target",
1351
+ "_request_serializer",
1352
+ "_response_deserializer",
1353
+ "_context",
1354
+ ]
1355
+
1356
+ # pylint: disable=too-many-arguments
1357
+ def __init__(
1358
+ self,
1359
+ channel: cygrpc.Channel,
1360
+ managed_call: IntegratedCallFactory,
1361
+ method: bytes,
1362
+ target: bytes,
1363
+ request_serializer: SerializingFunction,
1364
+ response_deserializer: DeserializingFunction,
1365
+ _registered_call_handle: Optional[int],
1366
+ ):
1367
+ self._channel = channel
1368
+ self._managed_call = managed_call
1369
+ self._method = method
1370
+ self._target = target
1371
+ self._request_serializer = request_serializer
1372
+ self._response_deserializer = response_deserializer
1373
+ self._context = cygrpc.build_census_context()
1374
+ self._registered_call_handle = _registered_call_handle
1375
+
1376
+ def __call__( # pylint: disable=too-many-locals
1377
+ self,
1378
+ request: Any,
1379
+ timeout: Optional[float] = None,
1380
+ metadata: Optional[MetadataType] = None,
1381
+ credentials: Optional[grpc.CallCredentials] = None,
1382
+ wait_for_ready: Optional[bool] = None,
1383
+ compression: Optional[grpc.Compression] = None,
1384
+ ) -> _MultiThreadedRendezvous:
1385
+ deadline, serialized_request, rendezvous = _start_unary_request(
1386
+ request, timeout, self._request_serializer
1387
+ )
1388
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
1389
+ wait_for_ready
1390
+ )
1391
+ if serialized_request is None:
1392
+ raise rendezvous # pylint: disable-msg=raising-bad-type
1393
+ else:
1394
+ augmented_metadata = _compression.augment_metadata(
1395
+ metadata, compression
1396
+ )
1397
+ state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None)
1398
+ operations = (
1399
+ (
1400
+ cygrpc.SendInitialMetadataOperation(
1401
+ augmented_metadata, initial_metadata_flags
1402
+ ),
1403
+ cygrpc.SendMessageOperation(
1404
+ serialized_request, _EMPTY_FLAGS
1405
+ ),
1406
+ cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
1407
+ cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
1408
+ ),
1409
+ (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
1410
+ )
1411
+ state.rpc_start_time = time.perf_counter()
1412
+ state.method = _common.decode(self._method)
1413
+ state.target = _common.decode(self._target)
1414
+ call = self._managed_call(
1415
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
1416
+ self._method,
1417
+ None,
1418
+ _determine_deadline(deadline),
1419
+ metadata,
1420
+ None if credentials is None else credentials._credentials,
1421
+ operations,
1422
+ _event_handler(state, self._response_deserializer),
1423
+ self._context,
1424
+ self._registered_call_handle,
1425
+ )
1426
+ return _MultiThreadedRendezvous(
1427
+ state, call, self._response_deserializer, deadline
1428
+ )
1429
+
1430
+
1431
+ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
1432
+ _channel: cygrpc.Channel
1433
+ _managed_call: IntegratedCallFactory
1434
+ _method: bytes
1435
+ _target: bytes
1436
+ _request_serializer: Optional[SerializingFunction]
1437
+ _response_deserializer: Optional[DeserializingFunction]
1438
+ _context: Any
1439
+ _registered_call_handle: Optional[int]
1440
+
1441
+ __slots__ = [
1442
+ "_channel",
1443
+ "_managed_call",
1444
+ "_method",
1445
+ "_target",
1446
+ "_request_serializer",
1447
+ "_response_deserializer",
1448
+ "_context",
1449
+ ]
1450
+
1451
+ # pylint: disable=too-many-arguments
1452
+ def __init__(
1453
+ self,
1454
+ channel: cygrpc.Channel,
1455
+ managed_call: IntegratedCallFactory,
1456
+ method: bytes,
1457
+ target: bytes,
1458
+ request_serializer: Optional[SerializingFunction],
1459
+ response_deserializer: Optional[DeserializingFunction],
1460
+ _registered_call_handle: Optional[int],
1461
+ ):
1462
+ self._channel = channel
1463
+ self._managed_call = managed_call
1464
+ self._method = method
1465
+ self._target = target
1466
+ self._request_serializer = request_serializer
1467
+ self._response_deserializer = response_deserializer
1468
+ self._context = cygrpc.build_census_context()
1469
+ self._registered_call_handle = _registered_call_handle
1470
+
1471
+ def _blocking(
1472
+ self,
1473
+ request_iterator: Iterator,
1474
+ timeout: Optional[float],
1475
+ metadata: Optional[MetadataType],
1476
+ credentials: Optional[grpc.CallCredentials],
1477
+ wait_for_ready: Optional[bool],
1478
+ compression: Optional[grpc.Compression],
1479
+ ) -> Tuple[_RPCState, cygrpc.SegregatedCall]:
1480
+ deadline = _deadline(timeout)
1481
+ state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
1482
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
1483
+ wait_for_ready
1484
+ )
1485
+ augmented_metadata = _compression.augment_metadata(
1486
+ metadata, compression
1487
+ )
1488
+ state.rpc_start_time = time.perf_counter()
1489
+ state.method = _common.decode(self._method)
1490
+ state.target = _common.decode(self._target)
1491
+ call = self._channel.segregated_call(
1492
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
1493
+ self._method,
1494
+ None,
1495
+ _determine_deadline(deadline),
1496
+ augmented_metadata,
1497
+ None if credentials is None else credentials._credentials,
1498
+ _stream_unary_invocation_operations_and_tags(
1499
+ augmented_metadata, initial_metadata_flags
1500
+ ),
1501
+ self._context,
1502
+ self._registered_call_handle,
1503
+ )
1504
+ _consume_request_iterator(
1505
+ request_iterator, state, call, self._request_serializer, None
1506
+ )
1507
+ while True:
1508
+ event = call.next_event()
1509
+ with state.condition:
1510
+ _handle_event(event, state, self._response_deserializer)
1511
+ state.condition.notify_all()
1512
+ if not state.due:
1513
+ break
1514
+ return state, call
1515
+
1516
+ def __call__(
1517
+ self,
1518
+ request_iterator: Iterator,
1519
+ timeout: Optional[float] = None,
1520
+ metadata: Optional[MetadataType] = None,
1521
+ credentials: Optional[grpc.CallCredentials] = None,
1522
+ wait_for_ready: Optional[bool] = None,
1523
+ compression: Optional[grpc.Compression] = None,
1524
+ ) -> Any:
1525
+ (
1526
+ state,
1527
+ call,
1528
+ ) = self._blocking(
1529
+ request_iterator,
1530
+ timeout,
1531
+ metadata,
1532
+ credentials,
1533
+ wait_for_ready,
1534
+ compression,
1535
+ )
1536
+ return _end_unary_response_blocking(state, call, False, None)
1537
+
1538
+ def with_call(
1539
+ self,
1540
+ request_iterator: Iterator,
1541
+ timeout: Optional[float] = None,
1542
+ metadata: Optional[MetadataType] = None,
1543
+ credentials: Optional[grpc.CallCredentials] = None,
1544
+ wait_for_ready: Optional[bool] = None,
1545
+ compression: Optional[grpc.Compression] = None,
1546
+ ) -> Tuple[Any, grpc.Call]:
1547
+ (
1548
+ state,
1549
+ call,
1550
+ ) = self._blocking(
1551
+ request_iterator,
1552
+ timeout,
1553
+ metadata,
1554
+ credentials,
1555
+ wait_for_ready,
1556
+ compression,
1557
+ )
1558
+ return _end_unary_response_blocking(state, call, True, None)
1559
+
1560
+ def future(
1561
+ self,
1562
+ request_iterator: Iterator,
1563
+ timeout: Optional[float] = None,
1564
+ metadata: Optional[MetadataType] = None,
1565
+ credentials: Optional[grpc.CallCredentials] = None,
1566
+ wait_for_ready: Optional[bool] = None,
1567
+ compression: Optional[grpc.Compression] = None,
1568
+ ) -> _MultiThreadedRendezvous:
1569
+ deadline = _deadline(timeout)
1570
+ state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
1571
+ event_handler = _event_handler(state, self._response_deserializer)
1572
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
1573
+ wait_for_ready
1574
+ )
1575
+ augmented_metadata = _compression.augment_metadata(
1576
+ metadata, compression
1577
+ )
1578
+ state.rpc_start_time = time.perf_counter()
1579
+ state.method = _common.decode(self._method)
1580
+ state.target = _common.decode(self._target)
1581
+ call = self._managed_call(
1582
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
1583
+ self._method,
1584
+ None,
1585
+ deadline,
1586
+ augmented_metadata,
1587
+ None if credentials is None else credentials._credentials,
1588
+ _stream_unary_invocation_operations(
1589
+ metadata, initial_metadata_flags
1590
+ ),
1591
+ event_handler,
1592
+ self._context,
1593
+ self._registered_call_handle,
1594
+ )
1595
+ _consume_request_iterator(
1596
+ request_iterator,
1597
+ state,
1598
+ call,
1599
+ self._request_serializer,
1600
+ event_handler,
1601
+ )
1602
+ return _MultiThreadedRendezvous(
1603
+ state, call, self._response_deserializer, deadline
1604
+ )
1605
+
1606
+
1607
+ class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
1608
+ _channel: cygrpc.Channel
1609
+ _managed_call: IntegratedCallFactory
1610
+ _method: bytes
1611
+ _target: bytes
1612
+ _request_serializer: Optional[SerializingFunction]
1613
+ _response_deserializer: Optional[DeserializingFunction]
1614
+ _context: Any
1615
+ _registered_call_handle: Optional[int]
1616
+
1617
+ __slots__ = [
1618
+ "_channel",
1619
+ "_managed_call",
1620
+ "_method",
1621
+ "_target",
1622
+ "_request_serializer",
1623
+ "_response_deserializer",
1624
+ "_context",
1625
+ ]
1626
+
1627
+ # pylint: disable=too-many-arguments
1628
+ def __init__(
1629
+ self,
1630
+ channel: cygrpc.Channel,
1631
+ managed_call: IntegratedCallFactory,
1632
+ method: bytes,
1633
+ target: bytes,
1634
+ request_serializer: Optional[SerializingFunction],
1635
+ response_deserializer: Optional[DeserializingFunction],
1636
+ _registered_call_handle: Optional[int],
1637
+ ):
1638
+ self._channel = channel
1639
+ self._managed_call = managed_call
1640
+ self._method = method
1641
+ self._target = target
1642
+ self._request_serializer = request_serializer
1643
+ self._response_deserializer = response_deserializer
1644
+ self._context = cygrpc.build_census_context()
1645
+ self._registered_call_handle = _registered_call_handle
1646
+
1647
+ def __call__(
1648
+ self,
1649
+ request_iterator: Iterator,
1650
+ timeout: Optional[float] = None,
1651
+ metadata: Optional[MetadataType] = None,
1652
+ credentials: Optional[grpc.CallCredentials] = None,
1653
+ wait_for_ready: Optional[bool] = None,
1654
+ compression: Optional[grpc.Compression] = None,
1655
+ ) -> _MultiThreadedRendezvous:
1656
+ deadline = _deadline(timeout)
1657
+ state = _RPCState(_STREAM_STREAM_INITIAL_DUE, None, None, None, None)
1658
+ initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
1659
+ wait_for_ready
1660
+ )
1661
+ augmented_metadata = _compression.augment_metadata(
1662
+ metadata, compression
1663
+ )
1664
+ operations = (
1665
+ (
1666
+ cygrpc.SendInitialMetadataOperation(
1667
+ augmented_metadata, initial_metadata_flags
1668
+ ),
1669
+ cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
1670
+ ),
1671
+ (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),),
1672
+ )
1673
+ event_handler = _event_handler(state, self._response_deserializer)
1674
+ state.rpc_start_time = time.perf_counter()
1675
+ state.method = _common.decode(self._method)
1676
+ state.target = _common.decode(self._target)
1677
+ call = self._managed_call(
1678
+ cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
1679
+ self._method,
1680
+ None,
1681
+ _determine_deadline(deadline),
1682
+ augmented_metadata,
1683
+ None if credentials is None else credentials._credentials,
1684
+ operations,
1685
+ event_handler,
1686
+ self._context,
1687
+ self._registered_call_handle,
1688
+ )
1689
+ _consume_request_iterator(
1690
+ request_iterator,
1691
+ state,
1692
+ call,
1693
+ self._request_serializer,
1694
+ event_handler,
1695
+ )
1696
+ return _MultiThreadedRendezvous(
1697
+ state, call, self._response_deserializer, deadline
1698
+ )
1699
+
1700
+
1701
+ class _InitialMetadataFlags(int):
1702
+ """Stores immutable initial metadata flags"""
1703
+
1704
+ def __new__(cls, value: int = _EMPTY_FLAGS):
1705
+ value &= cygrpc.InitialMetadataFlags.used_mask
1706
+ return super(_InitialMetadataFlags, cls).__new__(cls, value)
1707
+
1708
+ def with_wait_for_ready(self, wait_for_ready: Optional[bool]) -> int:
1709
+ if wait_for_ready is not None:
1710
+ if wait_for_ready:
1711
+ return self.__class__(
1712
+ self
1713
+ | cygrpc.InitialMetadataFlags.wait_for_ready
1714
+ | cygrpc.InitialMetadataFlags.wait_for_ready_explicitly_set
1715
+ )
1716
+ elif not wait_for_ready:
1717
+ return self.__class__(
1718
+ self & ~cygrpc.InitialMetadataFlags.wait_for_ready
1719
+ | cygrpc.InitialMetadataFlags.wait_for_ready_explicitly_set
1720
+ )
1721
+ return self
1722
+
1723
+
1724
+ class _ChannelCallState(object):
1725
+ channel: cygrpc.Channel
1726
+ managed_calls: int
1727
+ threading: bool
1728
+
1729
+ def __init__(self, channel: cygrpc.Channel):
1730
+ self.lock = threading.Lock()
1731
+ self.channel = channel
1732
+ self.managed_calls = 0
1733
+ self.threading = False
1734
+
1735
+ def reset_postfork_child(self) -> None:
1736
+ self.managed_calls = 0
1737
+
1738
+ def __del__(self):
1739
+ try:
1740
+ self.channel.close(
1741
+ cygrpc.StatusCode.cancelled, "Channel deallocated!"
1742
+ )
1743
+ except (TypeError, AttributeError):
1744
+ pass
1745
+
1746
+
1747
+ def _run_channel_spin_thread(state: _ChannelCallState) -> None:
1748
+ def channel_spin():
1749
+ while True:
1750
+ cygrpc.block_if_fork_in_progress(state)
1751
+ event = state.channel.next_call_event()
1752
+ if event.completion_type == cygrpc.CompletionType.queue_timeout:
1753
+ continue
1754
+ call_completed = event.tag(event)
1755
+ if call_completed:
1756
+ with state.lock:
1757
+ state.managed_calls -= 1
1758
+ if state.managed_calls == 0:
1759
+ return
1760
+
1761
+ channel_spin_thread = cygrpc.ForkManagedThread(target=channel_spin)
1762
+ channel_spin_thread.setDaemon(True)
1763
+ channel_spin_thread.start()
1764
+
1765
+
1766
+ def _channel_managed_call_management(state: _ChannelCallState):
1767
+ # pylint: disable=too-many-arguments
1768
+ def create(
1769
+ flags: int,
1770
+ method: bytes,
1771
+ host: Optional[str],
1772
+ deadline: Optional[float],
1773
+ metadata: Optional[MetadataType],
1774
+ credentials: Optional[cygrpc.CallCredentials],
1775
+ operations: Sequence[Sequence[cygrpc.Operation]],
1776
+ event_handler: UserTag,
1777
+ context: Any,
1778
+ _registered_call_handle: Optional[int],
1779
+ ) -> cygrpc.IntegratedCall:
1780
+ """Creates a cygrpc.IntegratedCall.
1781
+
1782
+ Args:
1783
+ flags: An integer bitfield of call flags.
1784
+ method: The RPC method.
1785
+ host: A host string for the created call.
1786
+ deadline: A float to be the deadline of the created call or None if
1787
+ the call is to have an infinite deadline.
1788
+ metadata: The metadata for the call or None.
1789
+ credentials: A cygrpc.CallCredentials or None.
1790
+ operations: A sequence of sequences of cygrpc.Operations to be
1791
+ started on the call.
1792
+ event_handler: A behavior to call to handle the events resultant from
1793
+ the operations on the call.
1794
+ context: Context object for distributed tracing.
1795
+ _registered_call_handle: An int representing the call handle of the
1796
+ method, or None if the method is not registered.
1797
+ Returns:
1798
+ A cygrpc.IntegratedCall with which to conduct an RPC.
1799
+ """
1800
+ operations_and_tags = tuple(
1801
+ (
1802
+ operation,
1803
+ event_handler,
1804
+ )
1805
+ for operation in operations
1806
+ )
1807
+ with state.lock:
1808
+ call = state.channel.integrated_call(
1809
+ flags,
1810
+ method,
1811
+ host,
1812
+ deadline,
1813
+ metadata,
1814
+ credentials,
1815
+ operations_and_tags,
1816
+ context,
1817
+ _registered_call_handle,
1818
+ )
1819
+ if state.managed_calls == 0:
1820
+ state.managed_calls = 1
1821
+ _run_channel_spin_thread(state)
1822
+ else:
1823
+ state.managed_calls += 1
1824
+ return call
1825
+
1826
+ return create
1827
+
1828
+
1829
+ class _ChannelConnectivityState(object):
1830
+ lock: threading.RLock
1831
+ channel: grpc.Channel
1832
+ polling: bool
1833
+ connectivity: grpc.ChannelConnectivity
1834
+ try_to_connect: bool
1835
+ # TODO(xuanwn): Refactor this: https://github.com/grpc/grpc/issues/31704
1836
+ callbacks_and_connectivities: List[
1837
+ Sequence[
1838
+ Union[
1839
+ Callable[[grpc.ChannelConnectivity], None],
1840
+ Optional[grpc.ChannelConnectivity],
1841
+ ]
1842
+ ]
1843
+ ]
1844
+ delivering: bool
1845
+
1846
+ def __init__(self, channel: grpc.Channel):
1847
+ self.lock = threading.RLock()
1848
+ self.channel = channel
1849
+ self.polling = False
1850
+ self.connectivity = None
1851
+ self.try_to_connect = False
1852
+ self.callbacks_and_connectivities = []
1853
+ self.delivering = False
1854
+
1855
+ def reset_postfork_child(self) -> None:
1856
+ self.polling = False
1857
+ self.connectivity = None
1858
+ self.try_to_connect = False
1859
+ self.callbacks_and_connectivities = []
1860
+ self.delivering = False
1861
+
1862
+
1863
+ def _deliveries(
1864
+ state: _ChannelConnectivityState,
1865
+ ) -> List[Callable[[grpc.ChannelConnectivity], None]]:
1866
+ callbacks_needing_update = []
1867
+ for callback_and_connectivity in state.callbacks_and_connectivities:
1868
+ (
1869
+ callback,
1870
+ callback_connectivity,
1871
+ ) = callback_and_connectivity
1872
+ if callback_connectivity is not state.connectivity:
1873
+ callbacks_needing_update.append(callback)
1874
+ callback_and_connectivity[1] = state.connectivity
1875
+ return callbacks_needing_update
1876
+
1877
+
1878
+ def _deliver(
1879
+ state: _ChannelConnectivityState,
1880
+ initial_connectivity: grpc.ChannelConnectivity,
1881
+ initial_callbacks: Sequence[Callable[[grpc.ChannelConnectivity], None]],
1882
+ ) -> None:
1883
+ connectivity = initial_connectivity
1884
+ callbacks = initial_callbacks
1885
+ while True:
1886
+ for callback in callbacks:
1887
+ cygrpc.block_if_fork_in_progress(state)
1888
+ try:
1889
+ callback(connectivity)
1890
+ except Exception: # pylint: disable=broad-except
1891
+ _LOGGER.exception(
1892
+ _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE
1893
+ )
1894
+ with state.lock:
1895
+ callbacks = _deliveries(state)
1896
+ if callbacks:
1897
+ connectivity = state.connectivity
1898
+ else:
1899
+ state.delivering = False
1900
+ return
1901
+
1902
+
1903
+ def _spawn_delivery(
1904
+ state: _ChannelConnectivityState,
1905
+ callbacks: Sequence[Callable[[grpc.ChannelConnectivity], None]],
1906
+ ) -> None:
1907
+ delivering_thread = cygrpc.ForkManagedThread(
1908
+ target=_deliver,
1909
+ args=(
1910
+ state,
1911
+ state.connectivity,
1912
+ callbacks,
1913
+ ),
1914
+ )
1915
+ delivering_thread.setDaemon(True)
1916
+ delivering_thread.start()
1917
+ state.delivering = True
1918
+
1919
+
1920
+ # NOTE(https://github.com/grpc/grpc/issues/3064): We'd rather not poll.
1921
+ def _poll_connectivity(
1922
+ state: _ChannelConnectivityState,
1923
+ channel: grpc.Channel,
1924
+ initial_try_to_connect: bool,
1925
+ ) -> None:
1926
+ try_to_connect = initial_try_to_connect
1927
+ connectivity = channel.check_connectivity_state(try_to_connect)
1928
+ with state.lock:
1929
+ state.connectivity = (
1930
+ _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
1931
+ connectivity
1932
+ ]
1933
+ )
1934
+ callbacks = tuple(
1935
+ callback for callback, _ in state.callbacks_and_connectivities
1936
+ )
1937
+ for callback_and_connectivity in state.callbacks_and_connectivities:
1938
+ callback_and_connectivity[1] = state.connectivity
1939
+ if callbacks:
1940
+ _spawn_delivery(state, callbacks)
1941
+ while True:
1942
+ event = channel.watch_connectivity_state(
1943
+ connectivity, time.time() + 0.2
1944
+ )
1945
+ cygrpc.block_if_fork_in_progress(state)
1946
+ with state.lock:
1947
+ if (
1948
+ not state.callbacks_and_connectivities
1949
+ and not state.try_to_connect
1950
+ ):
1951
+ state.polling = False
1952
+ state.connectivity = None
1953
+ break
1954
+ try_to_connect = state.try_to_connect
1955
+ state.try_to_connect = False
1956
+ if event.success or try_to_connect:
1957
+ connectivity = channel.check_connectivity_state(try_to_connect)
1958
+ with state.lock:
1959
+ state.connectivity = (
1960
+ _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
1961
+ connectivity
1962
+ ]
1963
+ )
1964
+ if not state.delivering:
1965
+ callbacks = _deliveries(state)
1966
+ if callbacks:
1967
+ _spawn_delivery(state, callbacks)
1968
+
1969
+
1970
+ def _subscribe(
1971
+ state: _ChannelConnectivityState,
1972
+ callback: Callable[[grpc.ChannelConnectivity], None],
1973
+ try_to_connect: bool,
1974
+ ) -> None:
1975
+ with state.lock:
1976
+ if not state.callbacks_and_connectivities and not state.polling:
1977
+ polling_thread = cygrpc.ForkManagedThread(
1978
+ target=_poll_connectivity,
1979
+ args=(state, state.channel, bool(try_to_connect)),
1980
+ )
1981
+ polling_thread.setDaemon(True)
1982
+ polling_thread.start()
1983
+ state.polling = True
1984
+ state.callbacks_and_connectivities.append([callback, None])
1985
+ elif not state.delivering and state.connectivity is not None:
1986
+ _spawn_delivery(state, (callback,))
1987
+ state.try_to_connect |= bool(try_to_connect)
1988
+ state.callbacks_and_connectivities.append(
1989
+ [callback, state.connectivity]
1990
+ )
1991
+ else:
1992
+ state.try_to_connect |= bool(try_to_connect)
1993
+ state.callbacks_and_connectivities.append([callback, None])
1994
+
1995
+
1996
+ def _unsubscribe(
1997
+ state: _ChannelConnectivityState,
1998
+ callback: Callable[[grpc.ChannelConnectivity], None],
1999
+ ) -> None:
2000
+ with state.lock:
2001
+ for index, (subscribed_callback, unused_connectivity) in enumerate(
2002
+ state.callbacks_and_connectivities
2003
+ ):
2004
+ if callback == subscribed_callback:
2005
+ state.callbacks_and_connectivities.pop(index)
2006
+ break
2007
+
2008
+
2009
+ def _augment_options(
2010
+ base_options: Sequence[ChannelArgumentType],
2011
+ compression: Optional[grpc.Compression],
2012
+ ) -> Sequence[ChannelArgumentType]:
2013
+ compression_option = _compression.create_channel_option(compression)
2014
+ return (
2015
+ tuple(base_options)
2016
+ + compression_option
2017
+ + (
2018
+ (
2019
+ cygrpc.ChannelArgKey.primary_user_agent_string,
2020
+ _USER_AGENT,
2021
+ ),
2022
+ )
2023
+ )
2024
+
2025
+
2026
+ def _separate_channel_options(
2027
+ options: Sequence[ChannelArgumentType],
2028
+ ) -> Tuple[Sequence[ChannelArgumentType], Sequence[ChannelArgumentType]]:
2029
+ """Separates core channel options from Python channel options."""
2030
+ core_options = []
2031
+ python_options = []
2032
+ for pair in options:
2033
+ if (
2034
+ pair[0]
2035
+ == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream
2036
+ ):
2037
+ python_options.append(pair)
2038
+ else:
2039
+ core_options.append(pair)
2040
+ return python_options, core_options
2041
+
2042
+
2043
+ class Channel(grpc.Channel):
2044
+ """A cygrpc.Channel-backed implementation of grpc.Channel."""
2045
+
2046
+ _single_threaded_unary_stream: bool
2047
+ _channel: cygrpc.Channel
2048
+ _call_state: _ChannelCallState
2049
+ _connectivity_state: _ChannelConnectivityState
2050
+ _target: str
2051
+ _registered_call_handles: Dict[str, int]
2052
+
2053
+ def __init__(
2054
+ self,
2055
+ target: str,
2056
+ options: Sequence[ChannelArgumentType],
2057
+ credentials: Optional[grpc.ChannelCredentials],
2058
+ compression: Optional[grpc.Compression],
2059
+ ):
2060
+ """Constructor.
2061
+
2062
+ Args:
2063
+ target: The target to which to connect.
2064
+ options: Configuration options for the channel.
2065
+ credentials: A cygrpc.ChannelCredentials or None.
2066
+ compression: An optional value indicating the compression method to be
2067
+ used over the lifetime of the channel.
2068
+ """
2069
+ python_options, core_options = _separate_channel_options(options)
2070
+ self._single_threaded_unary_stream = (
2071
+ _DEFAULT_SINGLE_THREADED_UNARY_STREAM
2072
+ )
2073
+ self._process_python_options(python_options)
2074
+ self._channel = cygrpc.Channel(
2075
+ _common.encode(target),
2076
+ _augment_options(core_options, compression),
2077
+ credentials,
2078
+ )
2079
+ self._target = target
2080
+ self._call_state = _ChannelCallState(self._channel)
2081
+ self._connectivity_state = _ChannelConnectivityState(self._channel)
2082
+ cygrpc.fork_register_channel(self)
2083
+ if cygrpc.g_gevent_activated:
2084
+ cygrpc.gevent_increment_channel_count()
2085
+
2086
+ def _get_registered_call_handle(self, method: str) -> int:
2087
+ """
2088
+ Get the registered call handle for a method.
2089
+
2090
+ This is a semi-private method. It is intended for use only by gRPC generated code.
2091
+
2092
+ This method is not thread-safe.
2093
+
2094
+ Args:
2095
+ method: Required, the method name for the RPC.
2096
+
2097
+ Returns:
2098
+ The registered call handle pointer in the form of a Python Long.
2099
+ """
2100
+ return self._channel.get_registered_call_handle(_common.encode(method))
2101
+
2102
+ def _process_python_options(
2103
+ self, python_options: Sequence[ChannelArgumentType]
2104
+ ) -> None:
2105
+ """Sets channel attributes according to python-only channel options."""
2106
+ for pair in python_options:
2107
+ if (
2108
+ pair[0]
2109
+ == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream
2110
+ ):
2111
+ self._single_threaded_unary_stream = True
2112
+
2113
+ def subscribe(
2114
+ self,
2115
+ callback: Callable[[grpc.ChannelConnectivity], None],
2116
+ try_to_connect: Optional[bool] = None,
2117
+ ) -> None:
2118
+ _subscribe(self._connectivity_state, callback, try_to_connect)
2119
+
2120
+ def unsubscribe(
2121
+ self, callback: Callable[[grpc.ChannelConnectivity], None]
2122
+ ) -> None:
2123
+ _unsubscribe(self._connectivity_state, callback)
2124
+
2125
+ # pylint: disable=arguments-differ
2126
+ def unary_unary(
2127
+ self,
2128
+ method: str,
2129
+ request_serializer: Optional[SerializingFunction] = None,
2130
+ response_deserializer: Optional[DeserializingFunction] = None,
2131
+ _registered_method: Optional[bool] = False,
2132
+ ) -> grpc.UnaryUnaryMultiCallable:
2133
+ _registered_call_handle = None
2134
+ if _registered_method:
2135
+ _registered_call_handle = self._get_registered_call_handle(method)
2136
+ return _UnaryUnaryMultiCallable(
2137
+ self._channel,
2138
+ _channel_managed_call_management(self._call_state),
2139
+ _common.encode(method),
2140
+ _common.encode(self._target),
2141
+ request_serializer,
2142
+ response_deserializer,
2143
+ _registered_call_handle,
2144
+ )
2145
+
2146
+ # pylint: disable=arguments-differ
2147
+ def unary_stream(
2148
+ self,
2149
+ method: str,
2150
+ request_serializer: Optional[SerializingFunction] = None,
2151
+ response_deserializer: Optional[DeserializingFunction] = None,
2152
+ _registered_method: Optional[bool] = False,
2153
+ ) -> grpc.UnaryStreamMultiCallable:
2154
+ _registered_call_handle = None
2155
+ if _registered_method:
2156
+ _registered_call_handle = self._get_registered_call_handle(method)
2157
+ # NOTE(rbellevi): Benchmarks have shown that running a unary-stream RPC
2158
+ # on a single Python thread results in an appreciable speed-up. However,
2159
+ # due to slight differences in capability, the multi-threaded variant
2160
+ # remains the default.
2161
+ if self._single_threaded_unary_stream:
2162
+ return _SingleThreadedUnaryStreamMultiCallable(
2163
+ self._channel,
2164
+ _common.encode(method),
2165
+ _common.encode(self._target),
2166
+ request_serializer,
2167
+ response_deserializer,
2168
+ _registered_call_handle,
2169
+ )
2170
+ else:
2171
+ return _UnaryStreamMultiCallable(
2172
+ self._channel,
2173
+ _channel_managed_call_management(self._call_state),
2174
+ _common.encode(method),
2175
+ _common.encode(self._target),
2176
+ request_serializer,
2177
+ response_deserializer,
2178
+ _registered_call_handle,
2179
+ )
2180
+
2181
+ # pylint: disable=arguments-differ
2182
+ def stream_unary(
2183
+ self,
2184
+ method: str,
2185
+ request_serializer: Optional[SerializingFunction] = None,
2186
+ response_deserializer: Optional[DeserializingFunction] = None,
2187
+ _registered_method: Optional[bool] = False,
2188
+ ) -> grpc.StreamUnaryMultiCallable:
2189
+ _registered_call_handle = None
2190
+ if _registered_method:
2191
+ _registered_call_handle = self._get_registered_call_handle(method)
2192
+ return _StreamUnaryMultiCallable(
2193
+ self._channel,
2194
+ _channel_managed_call_management(self._call_state),
2195
+ _common.encode(method),
2196
+ _common.encode(self._target),
2197
+ request_serializer,
2198
+ response_deserializer,
2199
+ _registered_call_handle,
2200
+ )
2201
+
2202
+ # pylint: disable=arguments-differ
2203
+ def stream_stream(
2204
+ self,
2205
+ method: str,
2206
+ request_serializer: Optional[SerializingFunction] = None,
2207
+ response_deserializer: Optional[DeserializingFunction] = None,
2208
+ _registered_method: Optional[bool] = False,
2209
+ ) -> grpc.StreamStreamMultiCallable:
2210
+ _registered_call_handle = None
2211
+ if _registered_method:
2212
+ _registered_call_handle = self._get_registered_call_handle(method)
2213
+ return _StreamStreamMultiCallable(
2214
+ self._channel,
2215
+ _channel_managed_call_management(self._call_state),
2216
+ _common.encode(method),
2217
+ _common.encode(self._target),
2218
+ request_serializer,
2219
+ response_deserializer,
2220
+ _registered_call_handle,
2221
+ )
2222
+
2223
+ def _unsubscribe_all(self) -> None:
2224
+ state = self._connectivity_state
2225
+ if state:
2226
+ with state.lock:
2227
+ del state.callbacks_and_connectivities[:]
2228
+
2229
+ def _close(self) -> None:
2230
+ self._unsubscribe_all()
2231
+ self._channel.close(cygrpc.StatusCode.cancelled, "Channel closed!")
2232
+ cygrpc.fork_unregister_channel(self)
2233
+ if cygrpc.g_gevent_activated:
2234
+ cygrpc.gevent_decrement_channel_count()
2235
+
2236
+ def _close_on_fork(self) -> None:
2237
+ self._unsubscribe_all()
2238
+ self._channel.close_on_fork(
2239
+ cygrpc.StatusCode.cancelled, "Channel closed due to fork"
2240
+ )
2241
+
2242
+ def __enter__(self):
2243
+ return self
2244
+
2245
+ def __exit__(self, exc_type, exc_val, exc_tb):
2246
+ self._close()
2247
+ return False
2248
+
2249
+ def close(self) -> None:
2250
+ self._close()
2251
+
2252
+ def __del__(self):
2253
+ # TODO(https://github.com/grpc/grpc/issues/12531): Several releases
2254
+ # after 1.12 (1.16 or thereabouts?) add a "self._channel.close" call
2255
+ # here (or more likely, call self._close() here). We don't do this today
2256
+ # because many valid use cases today allow the channel to be deleted
2257
+ # immediately after stubs are created. After a sufficient period of time
2258
+ # has passed for all users to be trusted to freeze out to their channels
2259
+ # for as long as they are in use and to close them after using them,
2260
+ # then deletion of this grpc._channel.Channel instance can be made to
2261
+ # effect closure of the underlying cygrpc.Channel instance.
2262
+ try:
2263
+ self._unsubscribe_all()
2264
+ except: # pylint: disable=bare-except
2265
+ # Exceptions in __del__ are ignored by Python anyway, but they can
2266
+ # keep spamming logs. Just silence them.
2267
+ pass
lib/python3.10/site-packages/grpc/_common.py ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Shared implementation."""
15
+
16
+ import logging
17
+ import time
18
+ from typing import Any, AnyStr, Callable, Optional, Union
19
+
20
+ import grpc
21
+ from grpc._cython import cygrpc
22
+ from grpc._typing import DeserializingFunction
23
+ from grpc._typing import SerializingFunction
24
+
25
+ _LOGGER = logging.getLogger(__name__)
26
+
27
+ CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
28
+ cygrpc.ConnectivityState.idle: grpc.ChannelConnectivity.IDLE,
29
+ cygrpc.ConnectivityState.connecting: grpc.ChannelConnectivity.CONNECTING,
30
+ cygrpc.ConnectivityState.ready: grpc.ChannelConnectivity.READY,
31
+ cygrpc.ConnectivityState.transient_failure: grpc.ChannelConnectivity.TRANSIENT_FAILURE,
32
+ cygrpc.ConnectivityState.shutdown: grpc.ChannelConnectivity.SHUTDOWN,
33
+ }
34
+
35
+ CYGRPC_STATUS_CODE_TO_STATUS_CODE = {
36
+ cygrpc.StatusCode.ok: grpc.StatusCode.OK,
37
+ cygrpc.StatusCode.cancelled: grpc.StatusCode.CANCELLED,
38
+ cygrpc.StatusCode.unknown: grpc.StatusCode.UNKNOWN,
39
+ cygrpc.StatusCode.invalid_argument: grpc.StatusCode.INVALID_ARGUMENT,
40
+ cygrpc.StatusCode.deadline_exceeded: grpc.StatusCode.DEADLINE_EXCEEDED,
41
+ cygrpc.StatusCode.not_found: grpc.StatusCode.NOT_FOUND,
42
+ cygrpc.StatusCode.already_exists: grpc.StatusCode.ALREADY_EXISTS,
43
+ cygrpc.StatusCode.permission_denied: grpc.StatusCode.PERMISSION_DENIED,
44
+ cygrpc.StatusCode.unauthenticated: grpc.StatusCode.UNAUTHENTICATED,
45
+ cygrpc.StatusCode.resource_exhausted: grpc.StatusCode.RESOURCE_EXHAUSTED,
46
+ cygrpc.StatusCode.failed_precondition: grpc.StatusCode.FAILED_PRECONDITION,
47
+ cygrpc.StatusCode.aborted: grpc.StatusCode.ABORTED,
48
+ cygrpc.StatusCode.out_of_range: grpc.StatusCode.OUT_OF_RANGE,
49
+ cygrpc.StatusCode.unimplemented: grpc.StatusCode.UNIMPLEMENTED,
50
+ cygrpc.StatusCode.internal: grpc.StatusCode.INTERNAL,
51
+ cygrpc.StatusCode.unavailable: grpc.StatusCode.UNAVAILABLE,
52
+ cygrpc.StatusCode.data_loss: grpc.StatusCode.DATA_LOSS,
53
+ }
54
+ STATUS_CODE_TO_CYGRPC_STATUS_CODE = {
55
+ grpc_code: cygrpc_code
56
+ for cygrpc_code, grpc_code in CYGRPC_STATUS_CODE_TO_STATUS_CODE.items()
57
+ }
58
+
59
+ MAXIMUM_WAIT_TIMEOUT = 0.1
60
+
61
+ _ERROR_MESSAGE_PORT_BINDING_FAILED = (
62
+ "Failed to bind to address %s; set "
63
+ "GRPC_VERBOSITY=debug environment variable to see detailed error message."
64
+ )
65
+
66
+
67
+ def encode(s: AnyStr) -> bytes:
68
+ if isinstance(s, bytes):
69
+ return s
70
+ else:
71
+ return s.encode("utf8")
72
+
73
+
74
+ def decode(b: AnyStr) -> str:
75
+ if isinstance(b, bytes):
76
+ return b.decode("utf-8", "replace")
77
+ return b
78
+
79
+
80
+ def _transform(
81
+ message: Any,
82
+ transformer: Union[SerializingFunction, DeserializingFunction, None],
83
+ exception_message: str,
84
+ ) -> Any:
85
+ if transformer is None:
86
+ return message
87
+ else:
88
+ try:
89
+ return transformer(message)
90
+ except Exception: # pylint: disable=broad-except
91
+ _LOGGER.exception(exception_message)
92
+ return None
93
+
94
+
95
+ def serialize(message: Any, serializer: Optional[SerializingFunction]) -> bytes:
96
+ return _transform(message, serializer, "Exception serializing message!")
97
+
98
+
99
+ def deserialize(
100
+ serialized_message: bytes, deserializer: Optional[DeserializingFunction]
101
+ ) -> Any:
102
+ return _transform(
103
+ serialized_message, deserializer, "Exception deserializing message!"
104
+ )
105
+
106
+
107
+ def fully_qualified_method(group: str, method: str) -> str:
108
+ return "/{}/{}".format(group, method)
109
+
110
+
111
+ def _wait_once(
112
+ wait_fn: Callable[..., bool],
113
+ timeout: float,
114
+ spin_cb: Optional[Callable[[], None]],
115
+ ):
116
+ wait_fn(timeout=timeout)
117
+ if spin_cb is not None:
118
+ spin_cb()
119
+
120
+
121
+ def wait(
122
+ wait_fn: Callable[..., bool],
123
+ wait_complete_fn: Callable[[], bool],
124
+ timeout: Optional[float] = None,
125
+ spin_cb: Optional[Callable[[], None]] = None,
126
+ ) -> bool:
127
+ """Blocks waiting for an event without blocking the thread indefinitely.
128
+
129
+ See https://github.com/grpc/grpc/issues/19464 for full context. CPython's
130
+ `threading.Event.wait` and `threading.Condition.wait` methods, if invoked
131
+ without a timeout kwarg, may block the calling thread indefinitely. If the
132
+ call is made from the main thread, this means that signal handlers may not
133
+ run for an arbitrarily long period of time.
134
+
135
+ This wrapper calls the supplied wait function with an arbitrary short
136
+ timeout to ensure that no signal handler has to wait longer than
137
+ MAXIMUM_WAIT_TIMEOUT before executing.
138
+
139
+ Args:
140
+ wait_fn: A callable acceptable a single float-valued kwarg named
141
+ `timeout`. This function is expected to be one of `threading.Event.wait`
142
+ or `threading.Condition.wait`.
143
+ wait_complete_fn: A callable taking no arguments and returning a bool.
144
+ When this function returns true, it indicates that waiting should cease.
145
+ timeout: An optional float-valued number of seconds after which the wait
146
+ should cease.
147
+ spin_cb: An optional Callable taking no arguments and returning nothing.
148
+ This callback will be called on each iteration of the spin. This may be
149
+ used for, e.g. work related to forking.
150
+
151
+ Returns:
152
+ True if a timeout was supplied and it was reached. False otherwise.
153
+ """
154
+ if timeout is None:
155
+ while not wait_complete_fn():
156
+ _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb)
157
+ else:
158
+ end = time.time() + timeout
159
+ while not wait_complete_fn():
160
+ remaining = min(end - time.time(), MAXIMUM_WAIT_TIMEOUT)
161
+ if remaining < 0:
162
+ return True
163
+ _wait_once(wait_fn, remaining, spin_cb)
164
+ return False
165
+
166
+
167
+ def validate_port_binding_result(address: str, port: int) -> int:
168
+ """Validates if the port binding succeed.
169
+
170
+ If the port returned by Core is 0, the binding is failed. However, in that
171
+ case, the Core API doesn't return a detailed failing reason. The best we
172
+ can do is raising an exception to prevent further confusion.
173
+
174
+ Args:
175
+ address: The address string to be bound.
176
+ port: An int returned by core
177
+ """
178
+ if port == 0:
179
+ # The Core API doesn't return a failure message. The best we can do
180
+ # is raising an exception to prevent further confusion.
181
+ raise RuntimeError(_ERROR_MESSAGE_PORT_BINDING_FAILED % address)
182
+ else:
183
+ return port
lib/python3.10/site-packages/grpc/_compression.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 The gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from __future__ import annotations
16
+
17
+ from typing import Optional
18
+
19
+ import grpc
20
+ from grpc._cython import cygrpc
21
+ from grpc._typing import MetadataType
22
+
23
+ NoCompression = cygrpc.CompressionAlgorithm.none
24
+ Deflate = cygrpc.CompressionAlgorithm.deflate
25
+ Gzip = cygrpc.CompressionAlgorithm.gzip
26
+
27
+ _METADATA_STRING_MAPPING = {
28
+ NoCompression: "identity",
29
+ Deflate: "deflate",
30
+ Gzip: "gzip",
31
+ }
32
+
33
+
34
+ def _compression_algorithm_to_metadata_value(
35
+ compression: grpc.Compression,
36
+ ) -> str:
37
+ return _METADATA_STRING_MAPPING[compression]
38
+
39
+
40
+ def compression_algorithm_to_metadata(compression: grpc.Compression):
41
+ return (
42
+ cygrpc.GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY,
43
+ _compression_algorithm_to_metadata_value(compression),
44
+ )
45
+
46
+
47
+ def create_channel_option(compression: Optional[grpc.Compression]):
48
+ return (
49
+ ((cygrpc.GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM, int(compression)),)
50
+ if compression
51
+ else ()
52
+ )
53
+
54
+
55
+ def augment_metadata(
56
+ metadata: Optional[MetadataType], compression: Optional[grpc.Compression]
57
+ ):
58
+ if not metadata and not compression:
59
+ return None
60
+ base_metadata = tuple(metadata) if metadata else ()
61
+ compression_metadata = (
62
+ (compression_algorithm_to_metadata(compression),) if compression else ()
63
+ )
64
+ return base_metadata + compression_metadata
65
+
66
+
67
+ __all__ = (
68
+ "NoCompression",
69
+ "Deflate",
70
+ "Gzip",
71
+ )
lib/python3.10/site-packages/grpc/_cython/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
lib/python3.10/site-packages/grpc/_cython/_credentials/roots.pem ADDED
The diff for this file is too large to render. See raw diff
 
lib/python3.10/site-packages/grpc/_cython/_cygrpc/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
lib/python3.10/site-packages/grpc/_grpcio_metadata.py ADDED
@@ -0,0 +1 @@
 
 
1
+ __version__ = """1.71.0"""
lib/python3.10/site-packages/grpc/_interceptor.py ADDED
@@ -0,0 +1,813 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2017 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Implementation of gRPC Python interceptors."""
15
+
16
+ import collections
17
+ import sys
18
+ import types
19
+ from typing import Any, Callable, Optional, Sequence, Tuple, Union
20
+
21
+ import grpc
22
+
23
+ from ._typing import DeserializingFunction
24
+ from ._typing import DoneCallbackType
25
+ from ._typing import MetadataType
26
+ from ._typing import RequestIterableType
27
+ from ._typing import SerializingFunction
28
+
29
+
30
+ class _ServicePipeline(object):
31
+ interceptors: Tuple[grpc.ServerInterceptor]
32
+
33
+ def __init__(self, interceptors: Sequence[grpc.ServerInterceptor]):
34
+ self.interceptors = tuple(interceptors)
35
+
36
+ def _continuation(self, thunk: Callable, index: int) -> Callable:
37
+ return lambda context: self._intercept_at(thunk, index, context)
38
+
39
+ def _intercept_at(
40
+ self, thunk: Callable, index: int, context: grpc.HandlerCallDetails
41
+ ) -> grpc.RpcMethodHandler:
42
+ if index < len(self.interceptors):
43
+ interceptor = self.interceptors[index]
44
+ thunk = self._continuation(thunk, index + 1)
45
+ return interceptor.intercept_service(thunk, context)
46
+ else:
47
+ return thunk(context)
48
+
49
+ def execute(
50
+ self, thunk: Callable, context: grpc.HandlerCallDetails
51
+ ) -> grpc.RpcMethodHandler:
52
+ return self._intercept_at(thunk, 0, context)
53
+
54
+
55
+ def service_pipeline(
56
+ interceptors: Optional[Sequence[grpc.ServerInterceptor]],
57
+ ) -> Optional[_ServicePipeline]:
58
+ return _ServicePipeline(interceptors) if interceptors else None
59
+
60
+
61
+ class _ClientCallDetails(
62
+ collections.namedtuple(
63
+ "_ClientCallDetails",
64
+ (
65
+ "method",
66
+ "timeout",
67
+ "metadata",
68
+ "credentials",
69
+ "wait_for_ready",
70
+ "compression",
71
+ ),
72
+ ),
73
+ grpc.ClientCallDetails,
74
+ ):
75
+ pass
76
+
77
+
78
+ def _unwrap_client_call_details(
79
+ call_details: grpc.ClientCallDetails,
80
+ default_details: grpc.ClientCallDetails,
81
+ ) -> Tuple[
82
+ str, float, MetadataType, grpc.CallCredentials, bool, grpc.Compression
83
+ ]:
84
+ try:
85
+ method = call_details.method # pytype: disable=attribute-error
86
+ except AttributeError:
87
+ method = default_details.method # pytype: disable=attribute-error
88
+
89
+ try:
90
+ timeout = call_details.timeout # pytype: disable=attribute-error
91
+ except AttributeError:
92
+ timeout = default_details.timeout # pytype: disable=attribute-error
93
+
94
+ try:
95
+ metadata = call_details.metadata # pytype: disable=attribute-error
96
+ except AttributeError:
97
+ metadata = default_details.metadata # pytype: disable=attribute-error
98
+
99
+ try:
100
+ credentials = (
101
+ call_details.credentials
102
+ ) # pytype: disable=attribute-error
103
+ except AttributeError:
104
+ credentials = (
105
+ default_details.credentials
106
+ ) # pytype: disable=attribute-error
107
+
108
+ try:
109
+ wait_for_ready = (
110
+ call_details.wait_for_ready
111
+ ) # pytype: disable=attribute-error
112
+ except AttributeError:
113
+ wait_for_ready = (
114
+ default_details.wait_for_ready
115
+ ) # pytype: disable=attribute-error
116
+
117
+ try:
118
+ compression = (
119
+ call_details.compression
120
+ ) # pytype: disable=attribute-error
121
+ except AttributeError:
122
+ compression = (
123
+ default_details.compression
124
+ ) # pytype: disable=attribute-error
125
+
126
+ return method, timeout, metadata, credentials, wait_for_ready, compression
127
+
128
+
129
+ class _FailureOutcome(
130
+ grpc.RpcError, grpc.Future, grpc.Call
131
+ ): # pylint: disable=too-many-ancestors
132
+ _exception: Exception
133
+ _traceback: types.TracebackType
134
+
135
+ def __init__(self, exception: Exception, traceback: types.TracebackType):
136
+ super(_FailureOutcome, self).__init__()
137
+ self._exception = exception
138
+ self._traceback = traceback
139
+
140
+ def initial_metadata(self) -> Optional[MetadataType]:
141
+ return None
142
+
143
+ def trailing_metadata(self) -> Optional[MetadataType]:
144
+ return None
145
+
146
+ def code(self) -> Optional[grpc.StatusCode]:
147
+ return grpc.StatusCode.INTERNAL
148
+
149
+ def details(self) -> Optional[str]:
150
+ return "Exception raised while intercepting the RPC"
151
+
152
+ def cancel(self) -> bool:
153
+ return False
154
+
155
+ def cancelled(self) -> bool:
156
+ return False
157
+
158
+ def is_active(self) -> bool:
159
+ return False
160
+
161
+ def time_remaining(self) -> Optional[float]:
162
+ return None
163
+
164
+ def running(self) -> bool:
165
+ return False
166
+
167
+ def done(self) -> bool:
168
+ return True
169
+
170
+ def result(self, ignored_timeout: Optional[float] = None):
171
+ raise self._exception
172
+
173
+ def exception(
174
+ self, ignored_timeout: Optional[float] = None
175
+ ) -> Optional[Exception]:
176
+ return self._exception
177
+
178
+ def traceback(
179
+ self, ignored_timeout: Optional[float] = None
180
+ ) -> Optional[types.TracebackType]:
181
+ return self._traceback
182
+
183
+ def add_callback(self, unused_callback) -> bool:
184
+ return False
185
+
186
+ def add_done_callback(self, fn: DoneCallbackType) -> None:
187
+ fn(self)
188
+
189
+ def __iter__(self):
190
+ return self
191
+
192
+ def __next__(self):
193
+ raise self._exception
194
+
195
+ def next(self):
196
+ return self.__next__()
197
+
198
+
199
+ class _UnaryOutcome(grpc.Call, grpc.Future):
200
+ _response: Any
201
+ _call: grpc.Call
202
+
203
+ def __init__(self, response: Any, call: grpc.Call):
204
+ self._response = response
205
+ self._call = call
206
+
207
+ def initial_metadata(self) -> Optional[MetadataType]:
208
+ return self._call.initial_metadata()
209
+
210
+ def trailing_metadata(self) -> Optional[MetadataType]:
211
+ return self._call.trailing_metadata()
212
+
213
+ def code(self) -> Optional[grpc.StatusCode]:
214
+ return self._call.code()
215
+
216
+ def details(self) -> Optional[str]:
217
+ return self._call.details()
218
+
219
+ def is_active(self) -> bool:
220
+ return self._call.is_active()
221
+
222
+ def time_remaining(self) -> Optional[float]:
223
+ return self._call.time_remaining()
224
+
225
+ def cancel(self) -> bool:
226
+ return self._call.cancel()
227
+
228
+ def add_callback(self, callback) -> bool:
229
+ return self._call.add_callback(callback)
230
+
231
+ def cancelled(self) -> bool:
232
+ return False
233
+
234
+ def running(self) -> bool:
235
+ return False
236
+
237
+ def done(self) -> bool:
238
+ return True
239
+
240
+ def result(self, ignored_timeout: Optional[float] = None):
241
+ return self._response
242
+
243
+ def exception(self, ignored_timeout: Optional[float] = None):
244
+ return None
245
+
246
+ def traceback(self, ignored_timeout: Optional[float] = None):
247
+ return None
248
+
249
+ def add_done_callback(self, fn: DoneCallbackType) -> None:
250
+ fn(self)
251
+
252
+
253
+ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
254
+ _thunk: Callable
255
+ _method: str
256
+ _interceptor: grpc.UnaryUnaryClientInterceptor
257
+
258
+ def __init__(
259
+ self,
260
+ thunk: Callable,
261
+ method: str,
262
+ interceptor: grpc.UnaryUnaryClientInterceptor,
263
+ ):
264
+ self._thunk = thunk
265
+ self._method = method
266
+ self._interceptor = interceptor
267
+
268
+ def __call__(
269
+ self,
270
+ request: Any,
271
+ timeout: Optional[float] = None,
272
+ metadata: Optional[MetadataType] = None,
273
+ credentials: Optional[grpc.CallCredentials] = None,
274
+ wait_for_ready: Optional[bool] = None,
275
+ compression: Optional[grpc.Compression] = None,
276
+ ) -> Any:
277
+ response, ignored_call = self._with_call(
278
+ request,
279
+ timeout=timeout,
280
+ metadata=metadata,
281
+ credentials=credentials,
282
+ wait_for_ready=wait_for_ready,
283
+ compression=compression,
284
+ )
285
+ return response
286
+
287
+ def _with_call(
288
+ self,
289
+ request: Any,
290
+ timeout: Optional[float] = None,
291
+ metadata: Optional[MetadataType] = None,
292
+ credentials: Optional[grpc.CallCredentials] = None,
293
+ wait_for_ready: Optional[bool] = None,
294
+ compression: Optional[grpc.Compression] = None,
295
+ ) -> Tuple[Any, grpc.Call]:
296
+ client_call_details = _ClientCallDetails(
297
+ self._method,
298
+ timeout,
299
+ metadata,
300
+ credentials,
301
+ wait_for_ready,
302
+ compression,
303
+ )
304
+
305
+ def continuation(new_details, request):
306
+ (
307
+ new_method,
308
+ new_timeout,
309
+ new_metadata,
310
+ new_credentials,
311
+ new_wait_for_ready,
312
+ new_compression,
313
+ ) = _unwrap_client_call_details(new_details, client_call_details)
314
+ try:
315
+ response, call = self._thunk(new_method).with_call(
316
+ request,
317
+ timeout=new_timeout,
318
+ metadata=new_metadata,
319
+ credentials=new_credentials,
320
+ wait_for_ready=new_wait_for_ready,
321
+ compression=new_compression,
322
+ )
323
+ return _UnaryOutcome(response, call)
324
+ except grpc.RpcError as rpc_error:
325
+ return rpc_error
326
+ except Exception as exception: # pylint:disable=broad-except
327
+ return _FailureOutcome(exception, sys.exc_info()[2])
328
+
329
+ call = self._interceptor.intercept_unary_unary(
330
+ continuation, client_call_details, request
331
+ )
332
+ return call.result(), call
333
+
334
+ def with_call(
335
+ self,
336
+ request: Any,
337
+ timeout: Optional[float] = None,
338
+ metadata: Optional[MetadataType] = None,
339
+ credentials: Optional[grpc.CallCredentials] = None,
340
+ wait_for_ready: Optional[bool] = None,
341
+ compression: Optional[grpc.Compression] = None,
342
+ ) -> Tuple[Any, grpc.Call]:
343
+ return self._with_call(
344
+ request,
345
+ timeout=timeout,
346
+ metadata=metadata,
347
+ credentials=credentials,
348
+ wait_for_ready=wait_for_ready,
349
+ compression=compression,
350
+ )
351
+
352
+ def future(
353
+ self,
354
+ request: Any,
355
+ timeout: Optional[float] = None,
356
+ metadata: Optional[MetadataType] = None,
357
+ credentials: Optional[grpc.CallCredentials] = None,
358
+ wait_for_ready: Optional[bool] = None,
359
+ compression: Optional[grpc.Compression] = None,
360
+ ) -> Any:
361
+ client_call_details = _ClientCallDetails(
362
+ self._method,
363
+ timeout,
364
+ metadata,
365
+ credentials,
366
+ wait_for_ready,
367
+ compression,
368
+ )
369
+
370
+ def continuation(new_details, request):
371
+ (
372
+ new_method,
373
+ new_timeout,
374
+ new_metadata,
375
+ new_credentials,
376
+ new_wait_for_ready,
377
+ new_compression,
378
+ ) = _unwrap_client_call_details(new_details, client_call_details)
379
+ return self._thunk(new_method).future(
380
+ request,
381
+ timeout=new_timeout,
382
+ metadata=new_metadata,
383
+ credentials=new_credentials,
384
+ wait_for_ready=new_wait_for_ready,
385
+ compression=new_compression,
386
+ )
387
+
388
+ try:
389
+ return self._interceptor.intercept_unary_unary(
390
+ continuation, client_call_details, request
391
+ )
392
+ except Exception as exception: # pylint:disable=broad-except
393
+ return _FailureOutcome(exception, sys.exc_info()[2])
394
+
395
+
396
+ class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
397
+ _thunk: Callable
398
+ _method: str
399
+ _interceptor: grpc.UnaryStreamClientInterceptor
400
+
401
+ def __init__(
402
+ self,
403
+ thunk: Callable,
404
+ method: str,
405
+ interceptor: grpc.UnaryStreamClientInterceptor,
406
+ ):
407
+ self._thunk = thunk
408
+ self._method = method
409
+ self._interceptor = interceptor
410
+
411
+ def __call__(
412
+ self,
413
+ request: Any,
414
+ timeout: Optional[float] = None,
415
+ metadata: Optional[MetadataType] = None,
416
+ credentials: Optional[grpc.CallCredentials] = None,
417
+ wait_for_ready: Optional[bool] = None,
418
+ compression: Optional[grpc.Compression] = None,
419
+ ):
420
+ client_call_details = _ClientCallDetails(
421
+ self._method,
422
+ timeout,
423
+ metadata,
424
+ credentials,
425
+ wait_for_ready,
426
+ compression,
427
+ )
428
+
429
+ def continuation(new_details, request):
430
+ (
431
+ new_method,
432
+ new_timeout,
433
+ new_metadata,
434
+ new_credentials,
435
+ new_wait_for_ready,
436
+ new_compression,
437
+ ) = _unwrap_client_call_details(new_details, client_call_details)
438
+ return self._thunk(new_method)(
439
+ request,
440
+ timeout=new_timeout,
441
+ metadata=new_metadata,
442
+ credentials=new_credentials,
443
+ wait_for_ready=new_wait_for_ready,
444
+ compression=new_compression,
445
+ )
446
+
447
+ try:
448
+ return self._interceptor.intercept_unary_stream(
449
+ continuation, client_call_details, request
450
+ )
451
+ except Exception as exception: # pylint:disable=broad-except
452
+ return _FailureOutcome(exception, sys.exc_info()[2])
453
+
454
+
455
+ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
456
+ _thunk: Callable
457
+ _method: str
458
+ _interceptor: grpc.StreamUnaryClientInterceptor
459
+
460
+ def __init__(
461
+ self,
462
+ thunk: Callable,
463
+ method: str,
464
+ interceptor: grpc.StreamUnaryClientInterceptor,
465
+ ):
466
+ self._thunk = thunk
467
+ self._method = method
468
+ self._interceptor = interceptor
469
+
470
+ def __call__(
471
+ self,
472
+ request_iterator: RequestIterableType,
473
+ timeout: Optional[float] = None,
474
+ metadata: Optional[MetadataType] = None,
475
+ credentials: Optional[grpc.CallCredentials] = None,
476
+ wait_for_ready: Optional[bool] = None,
477
+ compression: Optional[grpc.Compression] = None,
478
+ ) -> Any:
479
+ response, ignored_call = self._with_call(
480
+ request_iterator,
481
+ timeout=timeout,
482
+ metadata=metadata,
483
+ credentials=credentials,
484
+ wait_for_ready=wait_for_ready,
485
+ compression=compression,
486
+ )
487
+ return response
488
+
489
+ def _with_call(
490
+ self,
491
+ request_iterator: RequestIterableType,
492
+ timeout: Optional[float] = None,
493
+ metadata: Optional[MetadataType] = None,
494
+ credentials: Optional[grpc.CallCredentials] = None,
495
+ wait_for_ready: Optional[bool] = None,
496
+ compression: Optional[grpc.Compression] = None,
497
+ ) -> Tuple[Any, grpc.Call]:
498
+ client_call_details = _ClientCallDetails(
499
+ self._method,
500
+ timeout,
501
+ metadata,
502
+ credentials,
503
+ wait_for_ready,
504
+ compression,
505
+ )
506
+
507
+ def continuation(new_details, request_iterator):
508
+ (
509
+ new_method,
510
+ new_timeout,
511
+ new_metadata,
512
+ new_credentials,
513
+ new_wait_for_ready,
514
+ new_compression,
515
+ ) = _unwrap_client_call_details(new_details, client_call_details)
516
+ try:
517
+ response, call = self._thunk(new_method).with_call(
518
+ request_iterator,
519
+ timeout=new_timeout,
520
+ metadata=new_metadata,
521
+ credentials=new_credentials,
522
+ wait_for_ready=new_wait_for_ready,
523
+ compression=new_compression,
524
+ )
525
+ return _UnaryOutcome(response, call)
526
+ except grpc.RpcError as rpc_error:
527
+ return rpc_error
528
+ except Exception as exception: # pylint:disable=broad-except
529
+ return _FailureOutcome(exception, sys.exc_info()[2])
530
+
531
+ call = self._interceptor.intercept_stream_unary(
532
+ continuation, client_call_details, request_iterator
533
+ )
534
+ return call.result(), call
535
+
536
+ def with_call(
537
+ self,
538
+ request_iterator: RequestIterableType,
539
+ timeout: Optional[float] = None,
540
+ metadata: Optional[MetadataType] = None,
541
+ credentials: Optional[grpc.CallCredentials] = None,
542
+ wait_for_ready: Optional[bool] = None,
543
+ compression: Optional[grpc.Compression] = None,
544
+ ) -> Tuple[Any, grpc.Call]:
545
+ return self._with_call(
546
+ request_iterator,
547
+ timeout=timeout,
548
+ metadata=metadata,
549
+ credentials=credentials,
550
+ wait_for_ready=wait_for_ready,
551
+ compression=compression,
552
+ )
553
+
554
+ def future(
555
+ self,
556
+ request_iterator: RequestIterableType,
557
+ timeout: Optional[float] = None,
558
+ metadata: Optional[MetadataType] = None,
559
+ credentials: Optional[grpc.CallCredentials] = None,
560
+ wait_for_ready: Optional[bool] = None,
561
+ compression: Optional[grpc.Compression] = None,
562
+ ) -> Any:
563
+ client_call_details = _ClientCallDetails(
564
+ self._method,
565
+ timeout,
566
+ metadata,
567
+ credentials,
568
+ wait_for_ready,
569
+ compression,
570
+ )
571
+
572
+ def continuation(new_details, request_iterator):
573
+ (
574
+ new_method,
575
+ new_timeout,
576
+ new_metadata,
577
+ new_credentials,
578
+ new_wait_for_ready,
579
+ new_compression,
580
+ ) = _unwrap_client_call_details(new_details, client_call_details)
581
+ return self._thunk(new_method).future(
582
+ request_iterator,
583
+ timeout=new_timeout,
584
+ metadata=new_metadata,
585
+ credentials=new_credentials,
586
+ wait_for_ready=new_wait_for_ready,
587
+ compression=new_compression,
588
+ )
589
+
590
+ try:
591
+ return self._interceptor.intercept_stream_unary(
592
+ continuation, client_call_details, request_iterator
593
+ )
594
+ except Exception as exception: # pylint:disable=broad-except
595
+ return _FailureOutcome(exception, sys.exc_info()[2])
596
+
597
+
598
+ class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
599
+ _thunk: Callable
600
+ _method: str
601
+ _interceptor: grpc.StreamStreamClientInterceptor
602
+
603
+ def __init__(
604
+ self,
605
+ thunk: Callable,
606
+ method: str,
607
+ interceptor: grpc.StreamStreamClientInterceptor,
608
+ ):
609
+ self._thunk = thunk
610
+ self._method = method
611
+ self._interceptor = interceptor
612
+
613
+ def __call__(
614
+ self,
615
+ request_iterator: RequestIterableType,
616
+ timeout: Optional[float] = None,
617
+ metadata: Optional[MetadataType] = None,
618
+ credentials: Optional[grpc.CallCredentials] = None,
619
+ wait_for_ready: Optional[bool] = None,
620
+ compression: Optional[grpc.Compression] = None,
621
+ ):
622
+ client_call_details = _ClientCallDetails(
623
+ self._method,
624
+ timeout,
625
+ metadata,
626
+ credentials,
627
+ wait_for_ready,
628
+ compression,
629
+ )
630
+
631
+ def continuation(new_details, request_iterator):
632
+ (
633
+ new_method,
634
+ new_timeout,
635
+ new_metadata,
636
+ new_credentials,
637
+ new_wait_for_ready,
638
+ new_compression,
639
+ ) = _unwrap_client_call_details(new_details, client_call_details)
640
+ return self._thunk(new_method)(
641
+ request_iterator,
642
+ timeout=new_timeout,
643
+ metadata=new_metadata,
644
+ credentials=new_credentials,
645
+ wait_for_ready=new_wait_for_ready,
646
+ compression=new_compression,
647
+ )
648
+
649
+ try:
650
+ return self._interceptor.intercept_stream_stream(
651
+ continuation, client_call_details, request_iterator
652
+ )
653
+ except Exception as exception: # pylint:disable=broad-except
654
+ return _FailureOutcome(exception, sys.exc_info()[2])
655
+
656
+
657
+ class _Channel(grpc.Channel):
658
+ _channel: grpc.Channel
659
+ _interceptor: Union[
660
+ grpc.UnaryUnaryClientInterceptor,
661
+ grpc.UnaryStreamClientInterceptor,
662
+ grpc.StreamStreamClientInterceptor,
663
+ grpc.StreamUnaryClientInterceptor,
664
+ ]
665
+
666
+ def __init__(
667
+ self,
668
+ channel: grpc.Channel,
669
+ interceptor: Union[
670
+ grpc.UnaryUnaryClientInterceptor,
671
+ grpc.UnaryStreamClientInterceptor,
672
+ grpc.StreamStreamClientInterceptor,
673
+ grpc.StreamUnaryClientInterceptor,
674
+ ],
675
+ ):
676
+ self._channel = channel
677
+ self._interceptor = interceptor
678
+
679
+ def subscribe(
680
+ self, callback: Callable, try_to_connect: Optional[bool] = False
681
+ ):
682
+ self._channel.subscribe(callback, try_to_connect=try_to_connect)
683
+
684
+ def unsubscribe(self, callback: Callable):
685
+ self._channel.unsubscribe(callback)
686
+
687
+ # pylint: disable=arguments-differ
688
+ def unary_unary(
689
+ self,
690
+ method: str,
691
+ request_serializer: Optional[SerializingFunction] = None,
692
+ response_deserializer: Optional[DeserializingFunction] = None,
693
+ _registered_method: Optional[bool] = False,
694
+ ) -> grpc.UnaryUnaryMultiCallable:
695
+ # pytype: disable=wrong-arg-count
696
+ thunk = lambda m: self._channel.unary_unary(
697
+ m,
698
+ request_serializer,
699
+ response_deserializer,
700
+ _registered_method,
701
+ )
702
+ # pytype: enable=wrong-arg-count
703
+ if isinstance(self._interceptor, grpc.UnaryUnaryClientInterceptor):
704
+ return _UnaryUnaryMultiCallable(thunk, method, self._interceptor)
705
+ else:
706
+ return thunk(method)
707
+
708
+ # pylint: disable=arguments-differ
709
+ def unary_stream(
710
+ self,
711
+ method: str,
712
+ request_serializer: Optional[SerializingFunction] = None,
713
+ response_deserializer: Optional[DeserializingFunction] = None,
714
+ _registered_method: Optional[bool] = False,
715
+ ) -> grpc.UnaryStreamMultiCallable:
716
+ # pytype: disable=wrong-arg-count
717
+ thunk = lambda m: self._channel.unary_stream(
718
+ m,
719
+ request_serializer,
720
+ response_deserializer,
721
+ _registered_method,
722
+ )
723
+ # pytype: enable=wrong-arg-count
724
+ if isinstance(self._interceptor, grpc.UnaryStreamClientInterceptor):
725
+ return _UnaryStreamMultiCallable(thunk, method, self._interceptor)
726
+ else:
727
+ return thunk(method)
728
+
729
+ # pylint: disable=arguments-differ
730
+ def stream_unary(
731
+ self,
732
+ method: str,
733
+ request_serializer: Optional[SerializingFunction] = None,
734
+ response_deserializer: Optional[DeserializingFunction] = None,
735
+ _registered_method: Optional[bool] = False,
736
+ ) -> grpc.StreamUnaryMultiCallable:
737
+ # pytype: disable=wrong-arg-count
738
+ thunk = lambda m: self._channel.stream_unary(
739
+ m,
740
+ request_serializer,
741
+ response_deserializer,
742
+ _registered_method,
743
+ )
744
+ # pytype: enable=wrong-arg-count
745
+ if isinstance(self._interceptor, grpc.StreamUnaryClientInterceptor):
746
+ return _StreamUnaryMultiCallable(thunk, method, self._interceptor)
747
+ else:
748
+ return thunk(method)
749
+
750
+ # pylint: disable=arguments-differ
751
+ def stream_stream(
752
+ self,
753
+ method: str,
754
+ request_serializer: Optional[SerializingFunction] = None,
755
+ response_deserializer: Optional[DeserializingFunction] = None,
756
+ _registered_method: Optional[bool] = False,
757
+ ) -> grpc.StreamStreamMultiCallable:
758
+ # pytype: disable=wrong-arg-count
759
+ thunk = lambda m: self._channel.stream_stream(
760
+ m,
761
+ request_serializer,
762
+ response_deserializer,
763
+ _registered_method,
764
+ )
765
+ # pytype: enable=wrong-arg-count
766
+ if isinstance(self._interceptor, grpc.StreamStreamClientInterceptor):
767
+ return _StreamStreamMultiCallable(thunk, method, self._interceptor)
768
+ else:
769
+ return thunk(method)
770
+
771
+ def _close(self):
772
+ self._channel.close()
773
+
774
+ def __enter__(self):
775
+ return self
776
+
777
+ def __exit__(self, exc_type, exc_val, exc_tb):
778
+ self._close()
779
+ return False
780
+
781
+ def close(self):
782
+ self._channel.close()
783
+
784
+
785
+ def intercept_channel(
786
+ channel: grpc.Channel,
787
+ *interceptors: Optional[
788
+ Sequence[
789
+ Union[
790
+ grpc.UnaryUnaryClientInterceptor,
791
+ grpc.UnaryStreamClientInterceptor,
792
+ grpc.StreamStreamClientInterceptor,
793
+ grpc.StreamUnaryClientInterceptor,
794
+ ]
795
+ ]
796
+ ],
797
+ ) -> grpc.Channel:
798
+ for interceptor in reversed(list(interceptors)):
799
+ if (
800
+ not isinstance(interceptor, grpc.UnaryUnaryClientInterceptor)
801
+ and not isinstance(interceptor, grpc.UnaryStreamClientInterceptor)
802
+ and not isinstance(interceptor, grpc.StreamUnaryClientInterceptor)
803
+ and not isinstance(interceptor, grpc.StreamStreamClientInterceptor)
804
+ ):
805
+ raise TypeError(
806
+ "interceptor must be "
807
+ "grpc.UnaryUnaryClientInterceptor or "
808
+ "grpc.UnaryStreamClientInterceptor or "
809
+ "grpc.StreamUnaryClientInterceptor or "
810
+ "grpc.StreamStreamClientInterceptor or "
811
+ )
812
+ channel = _Channel(channel, interceptor)
813
+ return channel
lib/python3.10/site-packages/grpc/_observability.py ADDED
@@ -0,0 +1,299 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2023 The gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from __future__ import annotations
16
+
17
+ import abc
18
+ import contextlib
19
+ import logging
20
+ import threading
21
+ from typing import Any, Generator, Generic, List, Optional, TypeVar
22
+
23
+ from grpc._cython import cygrpc as _cygrpc
24
+ from grpc._typing import ChannelArgumentType
25
+
26
+ _LOGGER = logging.getLogger(__name__)
27
+
28
+ _channel = Any # _channel.py imports this module.
29
+ ClientCallTracerCapsule = TypeVar("ClientCallTracerCapsule")
30
+ ServerCallTracerFactoryCapsule = TypeVar("ServerCallTracerFactoryCapsule")
31
+
32
+ _plugin_lock: threading.RLock = threading.RLock()
33
+ _OBSERVABILITY_PLUGIN: Optional["ObservabilityPlugin"] = None
34
+ _SERVICES_TO_EXCLUDE: List[bytes] = [
35
+ b"google.monitoring.v3.MetricService",
36
+ b"google.devtools.cloudtrace.v2.TraceService",
37
+ ]
38
+
39
+
40
+ class ServerCallTracerFactory:
41
+ """An encapsulation of a ServerCallTracerFactory.
42
+
43
+ Instances of this class can be passed to a Channel as values for the
44
+ grpc.experimental.server_call_tracer_factory option
45
+ """
46
+
47
+ def __init__(self, address):
48
+ self._address = address
49
+
50
+ def __int__(self):
51
+ return self._address
52
+
53
+
54
+ class ObservabilityPlugin(
55
+ Generic[ClientCallTracerCapsule, ServerCallTracerFactoryCapsule],
56
+ metaclass=abc.ABCMeta,
57
+ ):
58
+ """Abstract base class for observability plugin.
59
+
60
+ *This is a semi-private class that was intended for the exclusive use of
61
+ the gRPC team.*
62
+
63
+ The ClientCallTracerCapsule and ClientCallTracerCapsule created by this
64
+ plugin should be injected to gRPC core using observability_init at the
65
+ start of a program, before any channels/servers are built.
66
+
67
+ Any future methods added to this interface cannot have the
68
+ @abc.abstractmethod annotation.
69
+
70
+ Attributes:
71
+ _stats_enabled: A bool indicates whether tracing is enabled.
72
+ _tracing_enabled: A bool indicates whether stats(metrics) is enabled.
73
+ _registered_methods: A set which stores the registered method names in
74
+ bytes.
75
+ """
76
+
77
+ _tracing_enabled: bool = False
78
+ _stats_enabled: bool = False
79
+
80
+ @abc.abstractmethod
81
+ def create_client_call_tracer(
82
+ self, method_name: bytes, target: bytes
83
+ ) -> ClientCallTracerCapsule:
84
+ """Creates a ClientCallTracerCapsule.
85
+
86
+ After register the plugin, if tracing or stats is enabled, this method
87
+ will be called after a call was created, the ClientCallTracer created
88
+ by this method will be saved to call context.
89
+
90
+ The ClientCallTracer is an object which implements `grpc_core::ClientCallTracer`
91
+ interface and wrapped in a PyCapsule using `client_call_tracer` as name.
92
+
93
+ Args:
94
+ method_name: The method name of the call in byte format.
95
+ target: The channel target of the call in byte format.
96
+ registered_method: Whether this method is pre-registered.
97
+
98
+ Returns:
99
+ A PyCapsule which stores a ClientCallTracer object.
100
+ """
101
+ raise NotImplementedError()
102
+
103
+ @abc.abstractmethod
104
+ def save_trace_context(
105
+ self, trace_id: str, span_id: str, is_sampled: bool
106
+ ) -> None:
107
+ """Saves the trace_id and span_id related to the current span.
108
+
109
+ After register the plugin, if tracing is enabled, this method will be
110
+ called after the server finished sending response.
111
+
112
+ This method can be used to propagate census context.
113
+
114
+ Args:
115
+ trace_id: The identifier for the trace associated with the span as a
116
+ 32-character hexadecimal encoded string,
117
+ e.g. 26ed0036f2eff2b7317bccce3e28d01f
118
+ span_id: The identifier for the span as a 16-character hexadecimal encoded
119
+ string. e.g. 113ec879e62583bc
120
+ is_sampled: A bool indicates whether the span is sampled.
121
+ """
122
+ raise NotImplementedError()
123
+
124
+ @abc.abstractmethod
125
+ def create_server_call_tracer_factory(
126
+ self,
127
+ *,
128
+ xds: bool = False,
129
+ ) -> Optional[ServerCallTracerFactoryCapsule]:
130
+ """Creates a ServerCallTracerFactoryCapsule.
131
+
132
+ This method will be called at server initialization time to create a
133
+ ServerCallTracerFactory, which will be registered to gRPC core.
134
+
135
+ The ServerCallTracerFactory is an object which implements
136
+ `grpc_core::ServerCallTracerFactory` interface and wrapped in a PyCapsule
137
+ using `server_call_tracer_factory` as name.
138
+
139
+ Args:
140
+ xds: Whether the server is xds server.
141
+ Returns:
142
+ A PyCapsule which stores a ServerCallTracerFactory object. Or None if
143
+ plugin decides not to create ServerCallTracerFactory.
144
+ """
145
+ raise NotImplementedError()
146
+
147
+ @abc.abstractmethod
148
+ def record_rpc_latency(
149
+ self, method: str, target: str, rpc_latency: float, status_code: Any
150
+ ) -> None:
151
+ """Record the latency of the RPC.
152
+
153
+ After register the plugin, if stats is enabled, this method will be
154
+ called at the end of each RPC.
155
+
156
+ Args:
157
+ method: The fully-qualified name of the RPC method being invoked.
158
+ target: The target name of the RPC method being invoked.
159
+ rpc_latency: The latency for the RPC in seconds, equals to the time between
160
+ when the client invokes the RPC and when the client receives the status.
161
+ status_code: An element of grpc.StatusCode in string format representing the
162
+ final status for the RPC.
163
+ """
164
+ raise NotImplementedError()
165
+
166
+ def set_tracing(self, enable: bool) -> None:
167
+ """Enable or disable tracing.
168
+
169
+ Args:
170
+ enable: A bool indicates whether tracing should be enabled.
171
+ """
172
+ self._tracing_enabled = enable
173
+
174
+ def set_stats(self, enable: bool) -> None:
175
+ """Enable or disable stats(metrics).
176
+
177
+ Args:
178
+ enable: A bool indicates whether stats should be enabled.
179
+ """
180
+ self._stats_enabled = enable
181
+
182
+ def save_registered_method(self, method_name: bytes) -> None:
183
+ """Saves the method name to registered_method list.
184
+
185
+ When exporting metrics, method name for unregistered methods will be replaced
186
+ with 'other' by default.
187
+
188
+ Args:
189
+ method_name: The method name in bytes.
190
+ """
191
+ raise NotImplementedError()
192
+
193
+ @property
194
+ def tracing_enabled(self) -> bool:
195
+ return self._tracing_enabled
196
+
197
+ @property
198
+ def stats_enabled(self) -> bool:
199
+ return self._stats_enabled
200
+
201
+ @property
202
+ def observability_enabled(self) -> bool:
203
+ return self.tracing_enabled or self.stats_enabled
204
+
205
+
206
+ @contextlib.contextmanager
207
+ def get_plugin() -> Generator[Optional[ObservabilityPlugin], None, None]:
208
+ """Get the ObservabilityPlugin in _observability module.
209
+
210
+ Returns:
211
+ The ObservabilityPlugin currently registered with the _observability
212
+ module. Or None if no plugin exists at the time of calling this method.
213
+ """
214
+ with _plugin_lock:
215
+ yield _OBSERVABILITY_PLUGIN
216
+
217
+
218
+ def set_plugin(observability_plugin: Optional[ObservabilityPlugin]) -> None:
219
+ """Save ObservabilityPlugin to _observability module.
220
+
221
+ Args:
222
+ observability_plugin: The ObservabilityPlugin to save.
223
+
224
+ Raises:
225
+ ValueError: If an ObservabilityPlugin was already registered at the
226
+ time of calling this method.
227
+ """
228
+ global _OBSERVABILITY_PLUGIN # pylint: disable=global-statement
229
+ with _plugin_lock:
230
+ if observability_plugin and _OBSERVABILITY_PLUGIN:
231
+ raise ValueError("observability_plugin was already set!")
232
+ _OBSERVABILITY_PLUGIN = observability_plugin
233
+
234
+
235
+ def observability_init(observability_plugin: ObservabilityPlugin) -> None:
236
+ """Initialize observability with provided ObservabilityPlugin.
237
+
238
+ This method have to be called at the start of a program, before any
239
+ channels/servers are built.
240
+
241
+ Args:
242
+ observability_plugin: The ObservabilityPlugin to use.
243
+
244
+ Raises:
245
+ ValueError: If an ObservabilityPlugin was already registered at the
246
+ time of calling this method.
247
+ """
248
+ set_plugin(observability_plugin)
249
+
250
+
251
+ def observability_deinit() -> None:
252
+ """Clear the observability context, including ObservabilityPlugin and
253
+ ServerCallTracerFactory
254
+
255
+ This method have to be called after exit observability context so that
256
+ it's possible to re-initialize again.
257
+ """
258
+ set_plugin(None)
259
+ _cygrpc.clear_server_call_tracer_factory()
260
+
261
+
262
+ def maybe_record_rpc_latency(state: "_channel._RPCState") -> None:
263
+ """Record the latency of the RPC, if the plugin is registered and stats is enabled.
264
+
265
+ This method will be called at the end of each RPC.
266
+
267
+ Args:
268
+ state: a grpc._channel._RPCState object which contains the stats related to the
269
+ RPC.
270
+ """
271
+ # TODO(xuanwn): use channel args to exclude those metrics.
272
+ for exclude_prefix in _SERVICES_TO_EXCLUDE:
273
+ if exclude_prefix in state.method.encode("utf8"):
274
+ return
275
+ with get_plugin() as plugin:
276
+ if plugin and plugin.stats_enabled:
277
+ rpc_latency_s = state.rpc_end_time - state.rpc_start_time
278
+ rpc_latency_ms = rpc_latency_s * 1000
279
+ plugin.record_rpc_latency(
280
+ state.method, state.target, rpc_latency_ms, state.code
281
+ )
282
+
283
+
284
+ def create_server_call_tracer_factory_option(xds: bool) -> ChannelArgumentType:
285
+ with get_plugin() as plugin:
286
+ if plugin and plugin.stats_enabled:
287
+ server_call_tracer_factory_address = (
288
+ _cygrpc.get_server_call_tracer_factory_address(plugin, xds)
289
+ )
290
+ if server_call_tracer_factory_address:
291
+ return (
292
+ (
293
+ "grpc.experimental.server_call_tracer_factory",
294
+ ServerCallTracerFactory(
295
+ server_call_tracer_factory_address
296
+ ),
297
+ ),
298
+ )
299
+ return ()
lib/python3.10/site-packages/grpc/_plugin_wrapping.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import collections
16
+ import logging
17
+ import threading
18
+ from typing import Callable, Optional, Type
19
+
20
+ import grpc
21
+ from grpc import _common
22
+ from grpc._cython import cygrpc
23
+ from grpc._typing import MetadataType
24
+
25
+ _LOGGER = logging.getLogger(__name__)
26
+
27
+
28
+ class _AuthMetadataContext(
29
+ collections.namedtuple(
30
+ "AuthMetadataContext",
31
+ (
32
+ "service_url",
33
+ "method_name",
34
+ ),
35
+ ),
36
+ grpc.AuthMetadataContext,
37
+ ):
38
+ pass
39
+
40
+
41
+ class _CallbackState(object):
42
+ def __init__(self):
43
+ self.lock = threading.Lock()
44
+ self.called = False
45
+ self.exception = None
46
+
47
+
48
+ class _AuthMetadataPluginCallback(grpc.AuthMetadataPluginCallback):
49
+ _state: _CallbackState
50
+ _callback: Callable
51
+
52
+ def __init__(self, state: _CallbackState, callback: Callable):
53
+ self._state = state
54
+ self._callback = callback
55
+
56
+ def __call__(
57
+ self, metadata: MetadataType, error: Optional[Type[BaseException]]
58
+ ):
59
+ with self._state.lock:
60
+ if self._state.exception is None:
61
+ if self._state.called:
62
+ raise RuntimeError(
63
+ "AuthMetadataPluginCallback invoked more than once!"
64
+ )
65
+ else:
66
+ self._state.called = True
67
+ else:
68
+ raise RuntimeError(
69
+ 'AuthMetadataPluginCallback raised exception "{}"!'.format(
70
+ self._state.exception
71
+ )
72
+ )
73
+ if error is None:
74
+ self._callback(metadata, cygrpc.StatusCode.ok, None)
75
+ else:
76
+ self._callback(
77
+ None, cygrpc.StatusCode.internal, _common.encode(str(error))
78
+ )
79
+
80
+
81
+ class _Plugin(object):
82
+ _metadata_plugin: grpc.AuthMetadataPlugin
83
+
84
+ def __init__(self, metadata_plugin: grpc.AuthMetadataPlugin):
85
+ self._metadata_plugin = metadata_plugin
86
+ self._stored_ctx = None
87
+
88
+ try:
89
+ import contextvars # pylint: disable=wrong-import-position
90
+
91
+ # The plugin may be invoked on a thread created by Core, which will not
92
+ # have the context propagated. This context is stored and installed in
93
+ # the thread invoking the plugin.
94
+ self._stored_ctx = contextvars.copy_context()
95
+ except ImportError:
96
+ # Support versions predating contextvars.
97
+ pass
98
+
99
+ def __call__(self, service_url: str, method_name: str, callback: Callable):
100
+ context = _AuthMetadataContext(
101
+ _common.decode(service_url), _common.decode(method_name)
102
+ )
103
+ callback_state = _CallbackState()
104
+ try:
105
+ self._metadata_plugin(
106
+ context, _AuthMetadataPluginCallback(callback_state, callback)
107
+ )
108
+ except Exception as exception: # pylint: disable=broad-except
109
+ _LOGGER.exception(
110
+ 'AuthMetadataPluginCallback "%s" raised exception!',
111
+ self._metadata_plugin,
112
+ )
113
+ with callback_state.lock:
114
+ callback_state.exception = exception
115
+ if callback_state.called:
116
+ return
117
+ callback(
118
+ None, cygrpc.StatusCode.internal, _common.encode(str(exception))
119
+ )
120
+
121
+
122
+ def metadata_plugin_call_credentials(
123
+ metadata_plugin: grpc.AuthMetadataPlugin, name: Optional[str]
124
+ ) -> grpc.CallCredentials:
125
+ if name is None:
126
+ try:
127
+ effective_name = metadata_plugin.__name__
128
+ except AttributeError:
129
+ effective_name = metadata_plugin.__class__.__name__
130
+ else:
131
+ effective_name = name
132
+ return grpc.CallCredentials(
133
+ cygrpc.MetadataPluginCallCredentials(
134
+ _Plugin(metadata_plugin), _common.encode(effective_name)
135
+ )
136
+ )
lib/python3.10/site-packages/grpc/_runtime_protos.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import sys
16
+ import types
17
+ from typing import Tuple, Union
18
+
19
+ _REQUIRED_SYMBOLS = ("_protos", "_services", "_protos_and_services")
20
+ _MINIMUM_VERSION = (3, 5, 0)
21
+
22
+ _UNINSTALLED_TEMPLATE = (
23
+ "Install the grpcio-tools package (1.32.0+) to use the {} function."
24
+ )
25
+ _VERSION_ERROR_TEMPLATE = (
26
+ "The {} function is only on available on Python 3.X interpreters."
27
+ )
28
+
29
+
30
+ def _has_runtime_proto_symbols(mod: types.ModuleType) -> bool:
31
+ return all(hasattr(mod, sym) for sym in _REQUIRED_SYMBOLS)
32
+
33
+
34
+ def _is_grpc_tools_importable() -> bool:
35
+ try:
36
+ import grpc_tools # pylint: disable=unused-import # pytype: disable=import-error
37
+
38
+ return True
39
+ except ImportError as e:
40
+ # NOTE: It's possible that we're encountering a transitive ImportError, so
41
+ # we check for that and re-raise if so.
42
+ if "grpc_tools" not in e.args[0]:
43
+ raise
44
+ return False
45
+
46
+
47
+ def _call_with_lazy_import(
48
+ fn_name: str, protobuf_path: str
49
+ ) -> Union[types.ModuleType, Tuple[types.ModuleType, types.ModuleType]]:
50
+ """Calls one of the three functions, lazily importing grpc_tools.
51
+
52
+ Args:
53
+ fn_name: The name of the function to import from grpc_tools.protoc.
54
+ protobuf_path: The path to import.
55
+
56
+ Returns:
57
+ The appropriate module object.
58
+ """
59
+ if sys.version_info < _MINIMUM_VERSION:
60
+ raise NotImplementedError(_VERSION_ERROR_TEMPLATE.format(fn_name))
61
+ else:
62
+ if not _is_grpc_tools_importable():
63
+ raise NotImplementedError(_UNINSTALLED_TEMPLATE.format(fn_name))
64
+ import grpc_tools.protoc # pytype: disable=import-error
65
+
66
+ if _has_runtime_proto_symbols(grpc_tools.protoc):
67
+ fn = getattr(grpc_tools.protoc, "_" + fn_name)
68
+ return fn(protobuf_path)
69
+ else:
70
+ raise NotImplementedError(_UNINSTALLED_TEMPLATE.format(fn_name))
71
+
72
+
73
+ def protos(protobuf_path): # pylint: disable=unused-argument
74
+ """Returns a module generated by the indicated .proto file.
75
+
76
+ THIS IS AN EXPERIMENTAL API.
77
+
78
+ Use this function to retrieve classes corresponding to message
79
+ definitions in the .proto file.
80
+
81
+ To inspect the contents of the returned module, use the dir function.
82
+ For example:
83
+
84
+ ```
85
+ protos = grpc.protos("foo.proto")
86
+ print(dir(protos))
87
+ ```
88
+
89
+ The returned module object corresponds to the _pb2.py file generated
90
+ by protoc. The path is expected to be relative to an entry on sys.path
91
+ and all transitive dependencies of the file should also be resolvable
92
+ from an entry on sys.path.
93
+
94
+ To completely disable the machinery behind this function, set the
95
+ GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
96
+
97
+ Args:
98
+ protobuf_path: The path to the .proto file on the filesystem. This path
99
+ must be resolvable from an entry on sys.path and so must all of its
100
+ transitive dependencies.
101
+
102
+ Returns:
103
+ A module object corresponding to the message code for the indicated
104
+ .proto file. Equivalent to a generated _pb2.py file.
105
+ """
106
+ return _call_with_lazy_import("protos", protobuf_path)
107
+
108
+
109
+ def services(protobuf_path): # pylint: disable=unused-argument
110
+ """Returns a module generated by the indicated .proto file.
111
+
112
+ THIS IS AN EXPERIMENTAL API.
113
+
114
+ Use this function to retrieve classes and functions corresponding to
115
+ service definitions in the .proto file, including both stub and servicer
116
+ definitions.
117
+
118
+ To inspect the contents of the returned module, use the dir function.
119
+ For example:
120
+
121
+ ```
122
+ services = grpc.services("foo.proto")
123
+ print(dir(services))
124
+ ```
125
+
126
+ The returned module object corresponds to the _pb2_grpc.py file generated
127
+ by protoc. The path is expected to be relative to an entry on sys.path
128
+ and all transitive dependencies of the file should also be resolvable
129
+ from an entry on sys.path.
130
+
131
+ To completely disable the machinery behind this function, set the
132
+ GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
133
+
134
+ Args:
135
+ protobuf_path: The path to the .proto file on the filesystem. This path
136
+ must be resolvable from an entry on sys.path and so must all of its
137
+ transitive dependencies.
138
+
139
+ Returns:
140
+ A module object corresponding to the stub/service code for the indicated
141
+ .proto file. Equivalent to a generated _pb2_grpc.py file.
142
+ """
143
+ return _call_with_lazy_import("services", protobuf_path)
144
+
145
+
146
+ def protos_and_services(protobuf_path): # pylint: disable=unused-argument
147
+ """Returns a 2-tuple of modules corresponding to protos and services.
148
+
149
+ THIS IS AN EXPERIMENTAL API.
150
+
151
+ The return value of this function is equivalent to a call to protos and a
152
+ call to services.
153
+
154
+ To completely disable the machinery behind this function, set the
155
+ GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
156
+
157
+ Args:
158
+ protobuf_path: The path to the .proto file on the filesystem. This path
159
+ must be resolvable from an entry on sys.path and so must all of its
160
+ transitive dependencies.
161
+
162
+ Returns:
163
+ A 2-tuple of module objects corresponding to (protos(path), services(path)).
164
+ """
165
+ return _call_with_lazy_import("protos_and_services", protobuf_path)
lib/python3.10/site-packages/grpc/_server.py ADDED
@@ -0,0 +1,1528 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Service-side implementation of gRPC Python."""
15
+
16
+ from __future__ import annotations
17
+
18
+ import abc
19
+ import collections
20
+ from concurrent import futures
21
+ import contextvars
22
+ import enum
23
+ import logging
24
+ import threading
25
+ import time
26
+ import traceback
27
+ from typing import (
28
+ Any,
29
+ Callable,
30
+ Dict,
31
+ Iterable,
32
+ Iterator,
33
+ List,
34
+ Mapping,
35
+ Optional,
36
+ Sequence,
37
+ Set,
38
+ Tuple,
39
+ Union,
40
+ )
41
+
42
+ import grpc # pytype: disable=pyi-error
43
+ from grpc import _common # pytype: disable=pyi-error
44
+ from grpc import _compression # pytype: disable=pyi-error
45
+ from grpc import _interceptor # pytype: disable=pyi-error
46
+ from grpc import _observability # pytype: disable=pyi-error
47
+ from grpc._cython import cygrpc
48
+ from grpc._typing import ArityAgnosticMethodHandler
49
+ from grpc._typing import ChannelArgumentType
50
+ from grpc._typing import DeserializingFunction
51
+ from grpc._typing import MetadataType
52
+ from grpc._typing import NullaryCallbackType
53
+ from grpc._typing import ResponseType
54
+ from grpc._typing import SerializingFunction
55
+ from grpc._typing import ServerCallbackTag
56
+ from grpc._typing import ServerTagCallbackType
57
+
58
+ _LOGGER = logging.getLogger(__name__)
59
+
60
+ _SHUTDOWN_TAG = "shutdown"
61
+ _REQUEST_CALL_TAG = "request_call"
62
+
63
+ _RECEIVE_CLOSE_ON_SERVER_TOKEN = "receive_close_on_server"
64
+ _SEND_INITIAL_METADATA_TOKEN = "send_initial_metadata"
65
+ _RECEIVE_MESSAGE_TOKEN = "receive_message"
66
+ _SEND_MESSAGE_TOKEN = "send_message"
67
+ _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN = (
68
+ "send_initial_metadata * send_message"
69
+ )
70
+ _SEND_STATUS_FROM_SERVER_TOKEN = "send_status_from_server"
71
+ _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN = (
72
+ "send_initial_metadata * send_status_from_server"
73
+ )
74
+
75
+ _OPEN = "open"
76
+ _CLOSED = "closed"
77
+ _CANCELLED = "cancelled"
78
+
79
+ _EMPTY_FLAGS = 0
80
+
81
+ _DEALLOCATED_SERVER_CHECK_PERIOD_S = 1.0
82
+ _INF_TIMEOUT = 1e9
83
+
84
+
85
+ def _serialized_request(request_event: cygrpc.BaseEvent) -> bytes:
86
+ return request_event.batch_operations[0].message()
87
+
88
+
89
+ def _application_code(code: grpc.StatusCode) -> cygrpc.StatusCode:
90
+ cygrpc_code = _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE.get(code)
91
+ return cygrpc.StatusCode.unknown if cygrpc_code is None else cygrpc_code
92
+
93
+
94
+ def _completion_code(state: _RPCState) -> cygrpc.StatusCode:
95
+ if state.code is None:
96
+ return cygrpc.StatusCode.ok
97
+ else:
98
+ return _application_code(state.code)
99
+
100
+
101
+ def _abortion_code(
102
+ state: _RPCState, code: cygrpc.StatusCode
103
+ ) -> cygrpc.StatusCode:
104
+ if state.code is None:
105
+ return code
106
+ else:
107
+ return _application_code(state.code)
108
+
109
+
110
+ def _details(state: _RPCState) -> bytes:
111
+ return b"" if state.details is None else state.details
112
+
113
+
114
+ class _HandlerCallDetails(
115
+ collections.namedtuple(
116
+ "_HandlerCallDetails",
117
+ (
118
+ "method",
119
+ "invocation_metadata",
120
+ ),
121
+ ),
122
+ grpc.HandlerCallDetails,
123
+ ):
124
+ pass
125
+
126
+
127
+ class _Method(abc.ABC):
128
+ @abc.abstractmethod
129
+ def name(self) -> Optional[str]:
130
+ raise NotImplementedError()
131
+
132
+ @abc.abstractmethod
133
+ def handler(
134
+ self, handler_call_details: _HandlerCallDetails
135
+ ) -> Optional[grpc.RpcMethodHandler]:
136
+ raise NotImplementedError()
137
+
138
+
139
+ class _RegisteredMethod(_Method):
140
+ def __init__(
141
+ self,
142
+ name: str,
143
+ registered_handler: Optional[grpc.RpcMethodHandler],
144
+ ):
145
+ self._name = name
146
+ self._registered_handler = registered_handler
147
+
148
+ def name(self) -> Optional[str]:
149
+ return self._name
150
+
151
+ def handler(
152
+ self, handler_call_details: _HandlerCallDetails
153
+ ) -> Optional[grpc.RpcMethodHandler]:
154
+ return self._registered_handler
155
+
156
+
157
+ class _GenericMethod(_Method):
158
+ def __init__(
159
+ self,
160
+ generic_handlers: List[grpc.GenericRpcHandler],
161
+ ):
162
+ self._generic_handlers = generic_handlers
163
+
164
+ def name(self) -> Optional[str]:
165
+ return None
166
+
167
+ def handler(
168
+ self, handler_call_details: _HandlerCallDetails
169
+ ) -> Optional[grpc.RpcMethodHandler]:
170
+ # If the same method have both generic and registered handler,
171
+ # registered handler will take precedence.
172
+ for generic_handler in self._generic_handlers:
173
+ method_handler = generic_handler.service(handler_call_details)
174
+ if method_handler is not None:
175
+ return method_handler
176
+ return None
177
+
178
+
179
+ class _RPCState(object):
180
+ context: contextvars.Context
181
+ condition: threading.Condition
182
+ due = Set[str]
183
+ request: Any
184
+ client: str
185
+ initial_metadata_allowed: bool
186
+ compression_algorithm: Optional[grpc.Compression]
187
+ disable_next_compression: bool
188
+ trailing_metadata: Optional[MetadataType]
189
+ code: Optional[grpc.StatusCode]
190
+ details: Optional[bytes]
191
+ statused: bool
192
+ rpc_errors: List[Exception]
193
+ callbacks: Optional[List[NullaryCallbackType]]
194
+ aborted: bool
195
+
196
+ def __init__(self):
197
+ self.context = contextvars.Context()
198
+ self.condition = threading.Condition()
199
+ self.due = set()
200
+ self.request = None
201
+ self.client = _OPEN
202
+ self.initial_metadata_allowed = True
203
+ self.compression_algorithm = None
204
+ self.disable_next_compression = False
205
+ self.trailing_metadata = None
206
+ self.code = None
207
+ self.details = None
208
+ self.statused = False
209
+ self.rpc_errors = []
210
+ self.callbacks = []
211
+ self.aborted = False
212
+
213
+
214
+ def _raise_rpc_error(state: _RPCState) -> None:
215
+ rpc_error = grpc.RpcError()
216
+ state.rpc_errors.append(rpc_error)
217
+ raise rpc_error
218
+
219
+
220
+ def _possibly_finish_call(
221
+ state: _RPCState, token: str
222
+ ) -> ServerTagCallbackType:
223
+ state.due.remove(token)
224
+ if not _is_rpc_state_active(state) and not state.due:
225
+ callbacks = state.callbacks
226
+ state.callbacks = None
227
+ return state, callbacks
228
+ else:
229
+ return None, ()
230
+
231
+
232
+ def _send_status_from_server(state: _RPCState, token: str) -> ServerCallbackTag:
233
+ def send_status_from_server(unused_send_status_from_server_event):
234
+ with state.condition:
235
+ return _possibly_finish_call(state, token)
236
+
237
+ return send_status_from_server
238
+
239
+
240
+ def _get_initial_metadata(
241
+ state: _RPCState, metadata: Optional[MetadataType]
242
+ ) -> Optional[MetadataType]:
243
+ with state.condition:
244
+ if state.compression_algorithm:
245
+ compression_metadata = (
246
+ _compression.compression_algorithm_to_metadata(
247
+ state.compression_algorithm
248
+ ),
249
+ )
250
+ if metadata is None:
251
+ return compression_metadata
252
+ else:
253
+ return compression_metadata + tuple(metadata)
254
+ else:
255
+ return metadata
256
+
257
+
258
+ def _get_initial_metadata_operation(
259
+ state: _RPCState, metadata: Optional[MetadataType]
260
+ ) -> cygrpc.Operation:
261
+ operation = cygrpc.SendInitialMetadataOperation(
262
+ _get_initial_metadata(state, metadata), _EMPTY_FLAGS
263
+ )
264
+ return operation
265
+
266
+
267
+ def _abort(
268
+ state: _RPCState, call: cygrpc.Call, code: cygrpc.StatusCode, details: bytes
269
+ ) -> None:
270
+ if state.client is not _CANCELLED:
271
+ effective_code = _abortion_code(state, code)
272
+ effective_details = details if state.details is None else state.details
273
+ if state.initial_metadata_allowed:
274
+ operations = (
275
+ _get_initial_metadata_operation(state, None),
276
+ cygrpc.SendStatusFromServerOperation(
277
+ state.trailing_metadata,
278
+ effective_code,
279
+ effective_details,
280
+ _EMPTY_FLAGS,
281
+ ),
282
+ )
283
+ token = _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN
284
+ else:
285
+ operations = (
286
+ cygrpc.SendStatusFromServerOperation(
287
+ state.trailing_metadata,
288
+ effective_code,
289
+ effective_details,
290
+ _EMPTY_FLAGS,
291
+ ),
292
+ )
293
+ token = _SEND_STATUS_FROM_SERVER_TOKEN
294
+ call.start_server_batch(
295
+ operations, _send_status_from_server(state, token)
296
+ )
297
+ state.statused = True
298
+ state.due.add(token)
299
+
300
+
301
+ def _receive_close_on_server(state: _RPCState) -> ServerCallbackTag:
302
+ def receive_close_on_server(receive_close_on_server_event):
303
+ with state.condition:
304
+ if receive_close_on_server_event.batch_operations[0].cancelled():
305
+ state.client = _CANCELLED
306
+ elif state.client is _OPEN:
307
+ state.client = _CLOSED
308
+ state.condition.notify_all()
309
+ return _possibly_finish_call(state, _RECEIVE_CLOSE_ON_SERVER_TOKEN)
310
+
311
+ return receive_close_on_server
312
+
313
+
314
+ def _receive_message(
315
+ state: _RPCState,
316
+ call: cygrpc.Call,
317
+ request_deserializer: Optional[DeserializingFunction],
318
+ ) -> ServerCallbackTag:
319
+ def receive_message(receive_message_event):
320
+ serialized_request = _serialized_request(receive_message_event)
321
+ if serialized_request is None:
322
+ with state.condition:
323
+ if state.client is _OPEN:
324
+ state.client = _CLOSED
325
+ state.condition.notify_all()
326
+ return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
327
+ else:
328
+ request = _common.deserialize(
329
+ serialized_request, request_deserializer
330
+ )
331
+ with state.condition:
332
+ if request is None:
333
+ _abort(
334
+ state,
335
+ call,
336
+ cygrpc.StatusCode.internal,
337
+ b"Exception deserializing request!",
338
+ )
339
+ else:
340
+ state.request = request
341
+ state.condition.notify_all()
342
+ return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
343
+
344
+ return receive_message
345
+
346
+
347
+ def _send_initial_metadata(state: _RPCState) -> ServerCallbackTag:
348
+ def send_initial_metadata(unused_send_initial_metadata_event):
349
+ with state.condition:
350
+ return _possibly_finish_call(state, _SEND_INITIAL_METADATA_TOKEN)
351
+
352
+ return send_initial_metadata
353
+
354
+
355
+ def _send_message(state: _RPCState, token: str) -> ServerCallbackTag:
356
+ def send_message(unused_send_message_event):
357
+ with state.condition:
358
+ state.condition.notify_all()
359
+ return _possibly_finish_call(state, token)
360
+
361
+ return send_message
362
+
363
+
364
+ class _Context(grpc.ServicerContext):
365
+ _rpc_event: cygrpc.BaseEvent
366
+ _state: _RPCState
367
+ request_deserializer: Optional[DeserializingFunction]
368
+
369
+ def __init__(
370
+ self,
371
+ rpc_event: cygrpc.BaseEvent,
372
+ state: _RPCState,
373
+ request_deserializer: Optional[DeserializingFunction],
374
+ ):
375
+ self._rpc_event = rpc_event
376
+ self._state = state
377
+ self._request_deserializer = request_deserializer
378
+
379
+ def is_active(self) -> bool:
380
+ with self._state.condition:
381
+ return _is_rpc_state_active(self._state)
382
+
383
+ def time_remaining(self) -> float:
384
+ return max(self._rpc_event.call_details.deadline - time.time(), 0)
385
+
386
+ def cancel(self) -> None:
387
+ self._rpc_event.call.cancel()
388
+
389
+ def add_callback(self, callback: NullaryCallbackType) -> bool:
390
+ with self._state.condition:
391
+ if self._state.callbacks is None:
392
+ return False
393
+ else:
394
+ self._state.callbacks.append(callback)
395
+ return True
396
+
397
+ def disable_next_message_compression(self) -> None:
398
+ with self._state.condition:
399
+ self._state.disable_next_compression = True
400
+
401
+ def invocation_metadata(self) -> Optional[MetadataType]:
402
+ return self._rpc_event.invocation_metadata
403
+
404
+ def peer(self) -> str:
405
+ return _common.decode(self._rpc_event.call.peer())
406
+
407
+ def peer_identities(self) -> Optional[Sequence[bytes]]:
408
+ return cygrpc.peer_identities(self._rpc_event.call)
409
+
410
+ def peer_identity_key(self) -> Optional[str]:
411
+ id_key = cygrpc.peer_identity_key(self._rpc_event.call)
412
+ return id_key if id_key is None else _common.decode(id_key)
413
+
414
+ def auth_context(self) -> Mapping[str, Sequence[bytes]]:
415
+ auth_context = cygrpc.auth_context(self._rpc_event.call)
416
+ auth_context_dict = {} if auth_context is None else auth_context
417
+ return {
418
+ _common.decode(key): value
419
+ for key, value in auth_context_dict.items()
420
+ }
421
+
422
+ def set_compression(self, compression: grpc.Compression) -> None:
423
+ with self._state.condition:
424
+ self._state.compression_algorithm = compression
425
+
426
+ def send_initial_metadata(self, initial_metadata: MetadataType) -> None:
427
+ with self._state.condition:
428
+ if self._state.client is _CANCELLED:
429
+ _raise_rpc_error(self._state)
430
+ else:
431
+ if self._state.initial_metadata_allowed:
432
+ operation = _get_initial_metadata_operation(
433
+ self._state, initial_metadata
434
+ )
435
+ self._rpc_event.call.start_server_batch(
436
+ (operation,), _send_initial_metadata(self._state)
437
+ )
438
+ self._state.initial_metadata_allowed = False
439
+ self._state.due.add(_SEND_INITIAL_METADATA_TOKEN)
440
+ else:
441
+ raise ValueError("Initial metadata no longer allowed!")
442
+
443
+ def set_trailing_metadata(self, trailing_metadata: MetadataType) -> None:
444
+ with self._state.condition:
445
+ self._state.trailing_metadata = trailing_metadata
446
+
447
+ def trailing_metadata(self) -> Optional[MetadataType]:
448
+ return self._state.trailing_metadata
449
+
450
+ def abort(self, code: grpc.StatusCode, details: str) -> None:
451
+ # treat OK like other invalid arguments: fail the RPC
452
+ if code == grpc.StatusCode.OK:
453
+ _LOGGER.error(
454
+ "abort() called with StatusCode.OK; returning UNKNOWN"
455
+ )
456
+ code = grpc.StatusCode.UNKNOWN
457
+ details = ""
458
+ with self._state.condition:
459
+ self._state.code = code
460
+ self._state.details = _common.encode(details)
461
+ self._state.aborted = True
462
+ raise Exception()
463
+
464
+ def abort_with_status(self, status: grpc.Status) -> None:
465
+ self._state.trailing_metadata = status.trailing_metadata
466
+ self.abort(status.code, status.details)
467
+
468
+ def set_code(self, code: grpc.StatusCode) -> None:
469
+ with self._state.condition:
470
+ self._state.code = code
471
+
472
+ def code(self) -> grpc.StatusCode:
473
+ return self._state.code
474
+
475
+ def set_details(self, details: str) -> None:
476
+ with self._state.condition:
477
+ self._state.details = _common.encode(details)
478
+
479
+ def details(self) -> bytes:
480
+ return self._state.details
481
+
482
+ def _finalize_state(self) -> None:
483
+ pass
484
+
485
+
486
+ class _RequestIterator(object):
487
+ _state: _RPCState
488
+ _call: cygrpc.Call
489
+ _request_deserializer: Optional[DeserializingFunction]
490
+
491
+ def __init__(
492
+ self,
493
+ state: _RPCState,
494
+ call: cygrpc.Call,
495
+ request_deserializer: Optional[DeserializingFunction],
496
+ ):
497
+ self._state = state
498
+ self._call = call
499
+ self._request_deserializer = request_deserializer
500
+
501
+ def _raise_or_start_receive_message(self) -> None:
502
+ if self._state.client is _CANCELLED:
503
+ _raise_rpc_error(self._state)
504
+ elif not _is_rpc_state_active(self._state):
505
+ raise StopIteration()
506
+ else:
507
+ self._call.start_server_batch(
508
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
509
+ _receive_message(
510
+ self._state, self._call, self._request_deserializer
511
+ ),
512
+ )
513
+ self._state.due.add(_RECEIVE_MESSAGE_TOKEN)
514
+
515
+ def _look_for_request(self) -> Any:
516
+ if self._state.client is _CANCELLED:
517
+ _raise_rpc_error(self._state)
518
+ elif (
519
+ self._state.request is None
520
+ and _RECEIVE_MESSAGE_TOKEN not in self._state.due
521
+ ):
522
+ raise StopIteration()
523
+ else:
524
+ request = self._state.request
525
+ self._state.request = None
526
+ return request
527
+
528
+ raise AssertionError() # should never run
529
+
530
+ def _next(self) -> Any:
531
+ with self._state.condition:
532
+ self._raise_or_start_receive_message()
533
+ while True:
534
+ self._state.condition.wait()
535
+ request = self._look_for_request()
536
+ if request is not None:
537
+ return request
538
+
539
+ def __iter__(self) -> _RequestIterator:
540
+ return self
541
+
542
+ def __next__(self) -> Any:
543
+ return self._next()
544
+
545
+ def next(self) -> Any:
546
+ return self._next()
547
+
548
+
549
+ def _unary_request(
550
+ rpc_event: cygrpc.BaseEvent,
551
+ state: _RPCState,
552
+ request_deserializer: Optional[DeserializingFunction],
553
+ ) -> Callable[[], Any]:
554
+ def unary_request():
555
+ with state.condition:
556
+ if not _is_rpc_state_active(state):
557
+ return None
558
+ else:
559
+ rpc_event.call.start_server_batch(
560
+ (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
561
+ _receive_message(
562
+ state, rpc_event.call, request_deserializer
563
+ ),
564
+ )
565
+ state.due.add(_RECEIVE_MESSAGE_TOKEN)
566
+ while True:
567
+ state.condition.wait()
568
+ if state.request is None:
569
+ if state.client is _CLOSED:
570
+ details = '"{}" requires exactly one request message.'.format(
571
+ rpc_event.call_details.method
572
+ )
573
+ _abort(
574
+ state,
575
+ rpc_event.call,
576
+ cygrpc.StatusCode.unimplemented,
577
+ _common.encode(details),
578
+ )
579
+ return None
580
+ elif state.client is _CANCELLED:
581
+ return None
582
+ else:
583
+ request = state.request
584
+ state.request = None
585
+ return request
586
+
587
+ return unary_request
588
+
589
+
590
+ def _call_behavior(
591
+ rpc_event: cygrpc.BaseEvent,
592
+ state: _RPCState,
593
+ behavior: ArityAgnosticMethodHandler,
594
+ argument: Any,
595
+ request_deserializer: Optional[DeserializingFunction],
596
+ send_response_callback: Optional[Callable[[ResponseType], None]] = None,
597
+ ) -> Tuple[Union[ResponseType, Iterator[ResponseType]], bool]:
598
+ from grpc import _create_servicer_context # pytype: disable=pyi-error
599
+
600
+ with _create_servicer_context(
601
+ rpc_event, state, request_deserializer
602
+ ) as context:
603
+ try:
604
+ response_or_iterator = None
605
+ if send_response_callback is not None:
606
+ response_or_iterator = behavior(
607
+ argument, context, send_response_callback
608
+ )
609
+ else:
610
+ response_or_iterator = behavior(argument, context)
611
+ return response_or_iterator, True
612
+ except Exception as exception: # pylint: disable=broad-except
613
+ with state.condition:
614
+ if state.aborted:
615
+ _abort(
616
+ state,
617
+ rpc_event.call,
618
+ cygrpc.StatusCode.unknown,
619
+ b"RPC Aborted",
620
+ )
621
+ elif exception not in state.rpc_errors:
622
+ try:
623
+ details = "Exception calling application: {}".format(
624
+ exception
625
+ )
626
+ except Exception: # pylint: disable=broad-except
627
+ details = (
628
+ "Calling application raised unprintable Exception!"
629
+ )
630
+ _LOGGER.exception(
631
+ traceback.format_exception(
632
+ type(exception),
633
+ exception,
634
+ exception.__traceback__,
635
+ )
636
+ )
637
+ traceback.print_exc()
638
+ _LOGGER.exception(details)
639
+ _abort(
640
+ state,
641
+ rpc_event.call,
642
+ cygrpc.StatusCode.unknown,
643
+ _common.encode(details),
644
+ )
645
+ return None, False
646
+
647
+
648
+ def _take_response_from_response_iterator(
649
+ rpc_event: cygrpc.BaseEvent,
650
+ state: _RPCState,
651
+ response_iterator: Iterator[ResponseType],
652
+ ) -> Tuple[ResponseType, bool]:
653
+ try:
654
+ return next(response_iterator), True
655
+ except StopIteration:
656
+ return None, True
657
+ except Exception as exception: # pylint: disable=broad-except
658
+ with state.condition:
659
+ if state.aborted:
660
+ _abort(
661
+ state,
662
+ rpc_event.call,
663
+ cygrpc.StatusCode.unknown,
664
+ b"RPC Aborted",
665
+ )
666
+ elif exception not in state.rpc_errors:
667
+ details = "Exception iterating responses: {}".format(exception)
668
+ _LOGGER.exception(details)
669
+ _abort(
670
+ state,
671
+ rpc_event.call,
672
+ cygrpc.StatusCode.unknown,
673
+ _common.encode(details),
674
+ )
675
+ return None, False
676
+
677
+
678
+ def _serialize_response(
679
+ rpc_event: cygrpc.BaseEvent,
680
+ state: _RPCState,
681
+ response: Any,
682
+ response_serializer: Optional[SerializingFunction],
683
+ ) -> Optional[bytes]:
684
+ serialized_response = _common.serialize(response, response_serializer)
685
+ if serialized_response is None:
686
+ with state.condition:
687
+ _abort(
688
+ state,
689
+ rpc_event.call,
690
+ cygrpc.StatusCode.internal,
691
+ b"Failed to serialize response!",
692
+ )
693
+ return None
694
+ else:
695
+ return serialized_response
696
+
697
+
698
+ def _get_send_message_op_flags_from_state(
699
+ state: _RPCState,
700
+ ) -> Union[int, cygrpc.WriteFlag]:
701
+ if state.disable_next_compression:
702
+ return cygrpc.WriteFlag.no_compress
703
+ else:
704
+ return _EMPTY_FLAGS
705
+
706
+
707
+ def _reset_per_message_state(state: _RPCState) -> None:
708
+ with state.condition:
709
+ state.disable_next_compression = False
710
+
711
+
712
+ def _send_response(
713
+ rpc_event: cygrpc.BaseEvent, state: _RPCState, serialized_response: bytes
714
+ ) -> bool:
715
+ with state.condition:
716
+ if not _is_rpc_state_active(state):
717
+ return False
718
+ else:
719
+ if state.initial_metadata_allowed:
720
+ operations = (
721
+ _get_initial_metadata_operation(state, None),
722
+ cygrpc.SendMessageOperation(
723
+ serialized_response,
724
+ _get_send_message_op_flags_from_state(state),
725
+ ),
726
+ )
727
+ state.initial_metadata_allowed = False
728
+ token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN
729
+ else:
730
+ operations = (
731
+ cygrpc.SendMessageOperation(
732
+ serialized_response,
733
+ _get_send_message_op_flags_from_state(state),
734
+ ),
735
+ )
736
+ token = _SEND_MESSAGE_TOKEN
737
+ rpc_event.call.start_server_batch(
738
+ operations, _send_message(state, token)
739
+ )
740
+ state.due.add(token)
741
+ _reset_per_message_state(state)
742
+ while True:
743
+ state.condition.wait()
744
+ if token not in state.due:
745
+ return _is_rpc_state_active(state)
746
+
747
+
748
+ def _status(
749
+ rpc_event: cygrpc.BaseEvent,
750
+ state: _RPCState,
751
+ serialized_response: Optional[bytes],
752
+ ) -> None:
753
+ with state.condition:
754
+ if state.client is not _CANCELLED:
755
+ code = _completion_code(state)
756
+ details = _details(state)
757
+ operations = [
758
+ cygrpc.SendStatusFromServerOperation(
759
+ state.trailing_metadata, code, details, _EMPTY_FLAGS
760
+ ),
761
+ ]
762
+ if state.initial_metadata_allowed:
763
+ operations.append(_get_initial_metadata_operation(state, None))
764
+ if serialized_response is not None:
765
+ operations.append(
766
+ cygrpc.SendMessageOperation(
767
+ serialized_response,
768
+ _get_send_message_op_flags_from_state(state),
769
+ )
770
+ )
771
+ rpc_event.call.start_server_batch(
772
+ operations,
773
+ _send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN),
774
+ )
775
+ state.statused = True
776
+ _reset_per_message_state(state)
777
+ state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN)
778
+
779
+
780
+ def _unary_response_in_pool(
781
+ rpc_event: cygrpc.BaseEvent,
782
+ state: _RPCState,
783
+ behavior: ArityAgnosticMethodHandler,
784
+ argument_thunk: Callable[[], Any],
785
+ request_deserializer: Optional[SerializingFunction],
786
+ response_serializer: Optional[SerializingFunction],
787
+ ) -> None:
788
+ cygrpc.install_context_from_request_call_event(rpc_event)
789
+
790
+ try:
791
+ argument = argument_thunk()
792
+ if argument is not None:
793
+ response, proceed = _call_behavior(
794
+ rpc_event, state, behavior, argument, request_deserializer
795
+ )
796
+ if proceed:
797
+ serialized_response = _serialize_response(
798
+ rpc_event, state, response, response_serializer
799
+ )
800
+ if serialized_response is not None:
801
+ _status(rpc_event, state, serialized_response)
802
+ except Exception: # pylint: disable=broad-except
803
+ traceback.print_exc()
804
+ finally:
805
+ cygrpc.uninstall_context()
806
+
807
+
808
+ def _stream_response_in_pool(
809
+ rpc_event: cygrpc.BaseEvent,
810
+ state: _RPCState,
811
+ behavior: ArityAgnosticMethodHandler,
812
+ argument_thunk: Callable[[], Any],
813
+ request_deserializer: Optional[DeserializingFunction],
814
+ response_serializer: Optional[SerializingFunction],
815
+ ) -> None:
816
+ cygrpc.install_context_from_request_call_event(rpc_event)
817
+
818
+ def send_response(response: Any) -> None:
819
+ if response is None:
820
+ _status(rpc_event, state, None)
821
+ else:
822
+ serialized_response = _serialize_response(
823
+ rpc_event, state, response, response_serializer
824
+ )
825
+ if serialized_response is not None:
826
+ _send_response(rpc_event, state, serialized_response)
827
+
828
+ try:
829
+ argument = argument_thunk()
830
+ if argument is not None:
831
+ if (
832
+ hasattr(behavior, "experimental_non_blocking")
833
+ and behavior.experimental_non_blocking
834
+ ):
835
+ _call_behavior(
836
+ rpc_event,
837
+ state,
838
+ behavior,
839
+ argument,
840
+ request_deserializer,
841
+ send_response_callback=send_response,
842
+ )
843
+ else:
844
+ response_iterator, proceed = _call_behavior(
845
+ rpc_event, state, behavior, argument, request_deserializer
846
+ )
847
+ if proceed:
848
+ _send_message_callback_to_blocking_iterator_adapter(
849
+ rpc_event, state, send_response, response_iterator
850
+ )
851
+ except Exception: # pylint: disable=broad-except
852
+ traceback.print_exc()
853
+ finally:
854
+ cygrpc.uninstall_context()
855
+
856
+
857
+ def _is_rpc_state_active(state: _RPCState) -> bool:
858
+ return state.client is not _CANCELLED and not state.statused
859
+
860
+
861
+ def _send_message_callback_to_blocking_iterator_adapter(
862
+ rpc_event: cygrpc.BaseEvent,
863
+ state: _RPCState,
864
+ send_response_callback: Callable[[ResponseType], None],
865
+ response_iterator: Iterator[ResponseType],
866
+ ) -> None:
867
+ while True:
868
+ response, proceed = _take_response_from_response_iterator(
869
+ rpc_event, state, response_iterator
870
+ )
871
+ if proceed:
872
+ send_response_callback(response)
873
+ if not _is_rpc_state_active(state):
874
+ break
875
+ else:
876
+ break
877
+
878
+
879
+ def _select_thread_pool_for_behavior(
880
+ behavior: ArityAgnosticMethodHandler,
881
+ default_thread_pool: futures.ThreadPoolExecutor,
882
+ ) -> futures.ThreadPoolExecutor:
883
+ if hasattr(behavior, "experimental_thread_pool") and isinstance(
884
+ behavior.experimental_thread_pool, futures.ThreadPoolExecutor
885
+ ):
886
+ return behavior.experimental_thread_pool
887
+ else:
888
+ return default_thread_pool
889
+
890
+
891
+ def _handle_unary_unary(
892
+ rpc_event: cygrpc.BaseEvent,
893
+ state: _RPCState,
894
+ method_handler: grpc.RpcMethodHandler,
895
+ default_thread_pool: futures.ThreadPoolExecutor,
896
+ ) -> futures.Future:
897
+ unary_request = _unary_request(
898
+ rpc_event, state, method_handler.request_deserializer
899
+ )
900
+ thread_pool = _select_thread_pool_for_behavior(
901
+ method_handler.unary_unary, default_thread_pool
902
+ )
903
+ return thread_pool.submit(
904
+ state.context.run,
905
+ _unary_response_in_pool,
906
+ rpc_event,
907
+ state,
908
+ method_handler.unary_unary,
909
+ unary_request,
910
+ method_handler.request_deserializer,
911
+ method_handler.response_serializer,
912
+ )
913
+
914
+
915
+ def _handle_unary_stream(
916
+ rpc_event: cygrpc.BaseEvent,
917
+ state: _RPCState,
918
+ method_handler: grpc.RpcMethodHandler,
919
+ default_thread_pool: futures.ThreadPoolExecutor,
920
+ ) -> futures.Future:
921
+ unary_request = _unary_request(
922
+ rpc_event, state, method_handler.request_deserializer
923
+ )
924
+ thread_pool = _select_thread_pool_for_behavior(
925
+ method_handler.unary_stream, default_thread_pool
926
+ )
927
+ return thread_pool.submit(
928
+ state.context.run,
929
+ _stream_response_in_pool,
930
+ rpc_event,
931
+ state,
932
+ method_handler.unary_stream,
933
+ unary_request,
934
+ method_handler.request_deserializer,
935
+ method_handler.response_serializer,
936
+ )
937
+
938
+
939
+ def _handle_stream_unary(
940
+ rpc_event: cygrpc.BaseEvent,
941
+ state: _RPCState,
942
+ method_handler: grpc.RpcMethodHandler,
943
+ default_thread_pool: futures.ThreadPoolExecutor,
944
+ ) -> futures.Future:
945
+ request_iterator = _RequestIterator(
946
+ state, rpc_event.call, method_handler.request_deserializer
947
+ )
948
+ thread_pool = _select_thread_pool_for_behavior(
949
+ method_handler.stream_unary, default_thread_pool
950
+ )
951
+ return thread_pool.submit(
952
+ state.context.run,
953
+ _unary_response_in_pool,
954
+ rpc_event,
955
+ state,
956
+ method_handler.stream_unary,
957
+ lambda: request_iterator,
958
+ method_handler.request_deserializer,
959
+ method_handler.response_serializer,
960
+ )
961
+
962
+
963
+ def _handle_stream_stream(
964
+ rpc_event: cygrpc.BaseEvent,
965
+ state: _RPCState,
966
+ method_handler: grpc.RpcMethodHandler,
967
+ default_thread_pool: futures.ThreadPoolExecutor,
968
+ ) -> futures.Future:
969
+ request_iterator = _RequestIterator(
970
+ state, rpc_event.call, method_handler.request_deserializer
971
+ )
972
+ thread_pool = _select_thread_pool_for_behavior(
973
+ method_handler.stream_stream, default_thread_pool
974
+ )
975
+ return thread_pool.submit(
976
+ state.context.run,
977
+ _stream_response_in_pool,
978
+ rpc_event,
979
+ state,
980
+ method_handler.stream_stream,
981
+ lambda: request_iterator,
982
+ method_handler.request_deserializer,
983
+ method_handler.response_serializer,
984
+ )
985
+
986
+
987
+ def _find_method_handler(
988
+ rpc_event: cygrpc.BaseEvent,
989
+ state: _RPCState,
990
+ method_with_handler: _Method,
991
+ interceptor_pipeline: Optional[_interceptor._ServicePipeline],
992
+ ) -> Optional[grpc.RpcMethodHandler]:
993
+ def query_handlers(
994
+ handler_call_details: _HandlerCallDetails,
995
+ ) -> Optional[grpc.RpcMethodHandler]:
996
+ return method_with_handler.handler(handler_call_details)
997
+
998
+ method_name = method_with_handler.name()
999
+ if not method_name:
1000
+ method_name = _common.decode(rpc_event.call_details.method)
1001
+
1002
+ handler_call_details = _HandlerCallDetails(
1003
+ method_name,
1004
+ rpc_event.invocation_metadata,
1005
+ )
1006
+
1007
+ if interceptor_pipeline is not None:
1008
+ return state.context.run(
1009
+ interceptor_pipeline.execute, query_handlers, handler_call_details
1010
+ )
1011
+ else:
1012
+ return state.context.run(query_handlers, handler_call_details)
1013
+
1014
+
1015
+ def _reject_rpc(
1016
+ rpc_event: cygrpc.BaseEvent,
1017
+ rpc_state: _RPCState,
1018
+ status: cygrpc.StatusCode,
1019
+ details: bytes,
1020
+ ):
1021
+ operations = (
1022
+ _get_initial_metadata_operation(rpc_state, None),
1023
+ cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),
1024
+ cygrpc.SendStatusFromServerOperation(
1025
+ None, status, details, _EMPTY_FLAGS
1026
+ ),
1027
+ )
1028
+ rpc_event.call.start_server_batch(
1029
+ operations,
1030
+ lambda ignored_event: (
1031
+ rpc_state,
1032
+ (),
1033
+ ),
1034
+ )
1035
+
1036
+
1037
+ def _handle_with_method_handler(
1038
+ rpc_event: cygrpc.BaseEvent,
1039
+ state: _RPCState,
1040
+ method_handler: grpc.RpcMethodHandler,
1041
+ thread_pool: futures.ThreadPoolExecutor,
1042
+ ) -> futures.Future:
1043
+ with state.condition:
1044
+ rpc_event.call.start_server_batch(
1045
+ (cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),),
1046
+ _receive_close_on_server(state),
1047
+ )
1048
+ state.due.add(_RECEIVE_CLOSE_ON_SERVER_TOKEN)
1049
+ if method_handler.request_streaming:
1050
+ if method_handler.response_streaming:
1051
+ return _handle_stream_stream(
1052
+ rpc_event, state, method_handler, thread_pool
1053
+ )
1054
+ else:
1055
+ return _handle_stream_unary(
1056
+ rpc_event, state, method_handler, thread_pool
1057
+ )
1058
+ else:
1059
+ if method_handler.response_streaming:
1060
+ return _handle_unary_stream(
1061
+ rpc_event, state, method_handler, thread_pool
1062
+ )
1063
+ else:
1064
+ return _handle_unary_unary(
1065
+ rpc_event, state, method_handler, thread_pool
1066
+ )
1067
+
1068
+
1069
+ def _handle_call(
1070
+ rpc_event: cygrpc.BaseEvent,
1071
+ method_with_handler: _Method,
1072
+ interceptor_pipeline: Optional[_interceptor._ServicePipeline],
1073
+ thread_pool: futures.ThreadPoolExecutor,
1074
+ concurrency_exceeded: bool,
1075
+ ) -> Tuple[Optional[_RPCState], Optional[futures.Future]]:
1076
+ """Handles RPC based on provided handlers.
1077
+
1078
+ When receiving a call event from Core, registered method will have its
1079
+ name as tag, we pass the tag as registered_method_name to this method,
1080
+ then we can find the handler in registered_method_handlers based on
1081
+ the method name.
1082
+
1083
+ For call event with unregistered method, the method name will be included
1084
+ in rpc_event.call_details.method and we need to query the generics handlers
1085
+ to find the actual handler.
1086
+ """
1087
+ if not rpc_event.success:
1088
+ return None, None
1089
+ if rpc_event.call_details.method or method_with_handler.name():
1090
+ rpc_state = _RPCState()
1091
+ try:
1092
+ method_handler = _find_method_handler(
1093
+ rpc_event,
1094
+ rpc_state,
1095
+ method_with_handler,
1096
+ interceptor_pipeline,
1097
+ )
1098
+ except Exception as exception: # pylint: disable=broad-except
1099
+ details = "Exception servicing handler: {}".format(exception)
1100
+ _LOGGER.exception(details)
1101
+ _reject_rpc(
1102
+ rpc_event,
1103
+ rpc_state,
1104
+ cygrpc.StatusCode.unknown,
1105
+ b"Error in service handler!",
1106
+ )
1107
+ return rpc_state, None
1108
+ if method_handler is None:
1109
+ _reject_rpc(
1110
+ rpc_event,
1111
+ rpc_state,
1112
+ cygrpc.StatusCode.unimplemented,
1113
+ b"Method not found!",
1114
+ )
1115
+ return rpc_state, None
1116
+ elif concurrency_exceeded:
1117
+ _reject_rpc(
1118
+ rpc_event,
1119
+ rpc_state,
1120
+ cygrpc.StatusCode.resource_exhausted,
1121
+ b"Concurrent RPC limit exceeded!",
1122
+ )
1123
+ return rpc_state, None
1124
+ else:
1125
+ return (
1126
+ rpc_state,
1127
+ _handle_with_method_handler(
1128
+ rpc_event, rpc_state, method_handler, thread_pool
1129
+ ),
1130
+ )
1131
+ else:
1132
+ return None, None
1133
+
1134
+
1135
+ @enum.unique
1136
+ class _ServerStage(enum.Enum):
1137
+ STOPPED = "stopped"
1138
+ STARTED = "started"
1139
+ GRACE = "grace"
1140
+
1141
+
1142
+ class _ServerState(object):
1143
+ lock: threading.RLock
1144
+ completion_queue: cygrpc.CompletionQueue
1145
+ server: cygrpc.Server
1146
+ generic_handlers: List[grpc.GenericRpcHandler]
1147
+ registered_method_handlers: Dict[str, grpc.RpcMethodHandler]
1148
+ interceptor_pipeline: Optional[_interceptor._ServicePipeline]
1149
+ thread_pool: futures.ThreadPoolExecutor
1150
+ stage: _ServerStage
1151
+ termination_event: threading.Event
1152
+ shutdown_events: List[threading.Event]
1153
+ maximum_concurrent_rpcs: Optional[int]
1154
+ active_rpc_count: int
1155
+ rpc_states: Set[_RPCState]
1156
+ due: Set[str]
1157
+ server_deallocated: bool
1158
+
1159
+ # pylint: disable=too-many-arguments
1160
+ def __init__(
1161
+ self,
1162
+ completion_queue: cygrpc.CompletionQueue,
1163
+ server: cygrpc.Server,
1164
+ generic_handlers: Sequence[grpc.GenericRpcHandler],
1165
+ interceptor_pipeline: Optional[_interceptor._ServicePipeline],
1166
+ thread_pool: futures.ThreadPoolExecutor,
1167
+ maximum_concurrent_rpcs: Optional[int],
1168
+ ):
1169
+ self.lock = threading.RLock()
1170
+ self.completion_queue = completion_queue
1171
+ self.server = server
1172
+ self.generic_handlers = list(generic_handlers)
1173
+ self.interceptor_pipeline = interceptor_pipeline
1174
+ self.thread_pool = thread_pool
1175
+ self.stage = _ServerStage.STOPPED
1176
+ self.termination_event = threading.Event()
1177
+ self.shutdown_events = [self.termination_event]
1178
+ self.maximum_concurrent_rpcs = maximum_concurrent_rpcs
1179
+ self.active_rpc_count = 0
1180
+ self.registered_method_handlers = {}
1181
+
1182
+ # TODO(https://github.com/grpc/grpc/issues/6597): eliminate these fields.
1183
+ self.rpc_states = set()
1184
+ self.due = set()
1185
+
1186
+ # A "volatile" flag to interrupt the daemon serving thread
1187
+ self.server_deallocated = False
1188
+
1189
+
1190
+ def _add_generic_handlers(
1191
+ state: _ServerState, generic_handlers: Iterable[grpc.GenericRpcHandler]
1192
+ ) -> None:
1193
+ with state.lock:
1194
+ state.generic_handlers.extend(generic_handlers)
1195
+
1196
+
1197
+ def _add_registered_method_handlers(
1198
+ state: _ServerState, method_handlers: Dict[str, grpc.RpcMethodHandler]
1199
+ ) -> None:
1200
+ with state.lock:
1201
+ state.registered_method_handlers.update(method_handlers)
1202
+
1203
+
1204
+ def _add_insecure_port(state: _ServerState, address: bytes) -> int:
1205
+ with state.lock:
1206
+ return state.server.add_http2_port(address)
1207
+
1208
+
1209
+ def _add_secure_port(
1210
+ state: _ServerState,
1211
+ address: bytes,
1212
+ server_credentials: grpc.ServerCredentials,
1213
+ ) -> int:
1214
+ with state.lock:
1215
+ return state.server.add_http2_port(
1216
+ address, server_credentials._credentials
1217
+ )
1218
+
1219
+
1220
+ def _request_call(state: _ServerState) -> None:
1221
+ state.server.request_call(
1222
+ state.completion_queue, state.completion_queue, _REQUEST_CALL_TAG
1223
+ )
1224
+ state.due.add(_REQUEST_CALL_TAG)
1225
+
1226
+
1227
+ def _request_registered_call(state: _ServerState, method: str) -> None:
1228
+ registered_call_tag = method
1229
+ state.server.request_registered_call(
1230
+ state.completion_queue,
1231
+ state.completion_queue,
1232
+ method,
1233
+ registered_call_tag,
1234
+ )
1235
+ state.due.add(registered_call_tag)
1236
+
1237
+
1238
+ # TODO(https://github.com/grpc/grpc/issues/6597): delete this function.
1239
+ def _stop_serving(state: _ServerState) -> bool:
1240
+ if not state.rpc_states and not state.due:
1241
+ state.server.destroy()
1242
+ for shutdown_event in state.shutdown_events:
1243
+ shutdown_event.set()
1244
+ state.stage = _ServerStage.STOPPED
1245
+ return True
1246
+ else:
1247
+ return False
1248
+
1249
+
1250
+ def _on_call_completed(state: _ServerState) -> None:
1251
+ with state.lock:
1252
+ state.active_rpc_count -= 1
1253
+
1254
+
1255
+ # pylint: disable=too-many-branches
1256
+ def _process_event_and_continue(
1257
+ state: _ServerState, event: cygrpc.BaseEvent
1258
+ ) -> bool:
1259
+ should_continue = True
1260
+ if event.tag is _SHUTDOWN_TAG:
1261
+ with state.lock:
1262
+ state.due.remove(_SHUTDOWN_TAG)
1263
+ if _stop_serving(state):
1264
+ should_continue = False
1265
+ elif (
1266
+ event.tag is _REQUEST_CALL_TAG
1267
+ or event.tag in state.registered_method_handlers.keys()
1268
+ ):
1269
+ registered_method_name = None
1270
+ if event.tag in state.registered_method_handlers.keys():
1271
+ registered_method_name = event.tag
1272
+ method_with_handler = _RegisteredMethod(
1273
+ registered_method_name,
1274
+ state.registered_method_handlers.get(
1275
+ registered_method_name, None
1276
+ ),
1277
+ )
1278
+ else:
1279
+ method_with_handler = _GenericMethod(
1280
+ state.generic_handlers,
1281
+ )
1282
+ with state.lock:
1283
+ state.due.remove(event.tag)
1284
+ concurrency_exceeded = (
1285
+ state.maximum_concurrent_rpcs is not None
1286
+ and state.active_rpc_count >= state.maximum_concurrent_rpcs
1287
+ )
1288
+ rpc_state, rpc_future = _handle_call(
1289
+ event,
1290
+ method_with_handler,
1291
+ state.interceptor_pipeline,
1292
+ state.thread_pool,
1293
+ concurrency_exceeded,
1294
+ )
1295
+ if rpc_state is not None:
1296
+ state.rpc_states.add(rpc_state)
1297
+ if rpc_future is not None:
1298
+ state.active_rpc_count += 1
1299
+ rpc_future.add_done_callback(
1300
+ lambda unused_future: _on_call_completed(state)
1301
+ )
1302
+ if state.stage is _ServerStage.STARTED:
1303
+ if (
1304
+ registered_method_name
1305
+ in state.registered_method_handlers.keys()
1306
+ ):
1307
+ _request_registered_call(state, registered_method_name)
1308
+ else:
1309
+ _request_call(state)
1310
+ elif _stop_serving(state):
1311
+ should_continue = False
1312
+ else:
1313
+ rpc_state, callbacks = event.tag(event)
1314
+ for callback in callbacks:
1315
+ try:
1316
+ callback()
1317
+ except Exception: # pylint: disable=broad-except
1318
+ _LOGGER.exception("Exception calling callback!")
1319
+ if rpc_state is not None:
1320
+ with state.lock:
1321
+ state.rpc_states.remove(rpc_state)
1322
+ if _stop_serving(state):
1323
+ should_continue = False
1324
+ return should_continue
1325
+
1326
+
1327
+ def _serve(state: _ServerState) -> None:
1328
+ while True:
1329
+ timeout = time.time() + _DEALLOCATED_SERVER_CHECK_PERIOD_S
1330
+ event = state.completion_queue.poll(timeout)
1331
+ if state.server_deallocated:
1332
+ _begin_shutdown_once(state)
1333
+ if event.completion_type != cygrpc.CompletionType.queue_timeout:
1334
+ if not _process_event_and_continue(state, event):
1335
+ return
1336
+ # We want to force the deletion of the previous event
1337
+ # ~before~ we poll again; if the event has a reference
1338
+ # to a shutdown Call object, this can induce spinlock.
1339
+ event = None
1340
+
1341
+
1342
+ def _begin_shutdown_once(state: _ServerState) -> None:
1343
+ with state.lock:
1344
+ if state.stage is _ServerStage.STARTED:
1345
+ state.server.shutdown(state.completion_queue, _SHUTDOWN_TAG)
1346
+ state.stage = _ServerStage.GRACE
1347
+ state.due.add(_SHUTDOWN_TAG)
1348
+
1349
+
1350
+ def _stop(state: _ServerState, grace: Optional[float]) -> threading.Event:
1351
+ with state.lock:
1352
+ if state.stage is _ServerStage.STOPPED:
1353
+ shutdown_event = threading.Event()
1354
+ shutdown_event.set()
1355
+ return shutdown_event
1356
+ else:
1357
+ _begin_shutdown_once(state)
1358
+ shutdown_event = threading.Event()
1359
+ state.shutdown_events.append(shutdown_event)
1360
+ if grace is None:
1361
+ state.server.cancel_all_calls()
1362
+ else:
1363
+
1364
+ def cancel_all_calls_after_grace():
1365
+ shutdown_event.wait(timeout=grace)
1366
+ with state.lock:
1367
+ state.server.cancel_all_calls()
1368
+
1369
+ thread = threading.Thread(target=cancel_all_calls_after_grace)
1370
+ thread.start()
1371
+ return shutdown_event
1372
+ shutdown_event.wait()
1373
+ return shutdown_event
1374
+
1375
+
1376
+ def _start(state: _ServerState) -> None:
1377
+ with state.lock:
1378
+ if state.stage is not _ServerStage.STOPPED:
1379
+ raise ValueError("Cannot start already-started server!")
1380
+ state.server.start()
1381
+ state.stage = _ServerStage.STARTED
1382
+ # Request a call for each registered method so we can handle any of them.
1383
+ for method in state.registered_method_handlers.keys():
1384
+ _request_registered_call(state, method)
1385
+ # Also request a call for non-registered method.
1386
+ _request_call(state)
1387
+ thread = threading.Thread(target=_serve, args=(state,))
1388
+ thread.daemon = True
1389
+ thread.start()
1390
+
1391
+
1392
+ def _validate_generic_rpc_handlers(
1393
+ generic_rpc_handlers: Iterable[grpc.GenericRpcHandler],
1394
+ ) -> None:
1395
+ for generic_rpc_handler in generic_rpc_handlers:
1396
+ service_attribute = getattr(generic_rpc_handler, "service", None)
1397
+ if service_attribute is None:
1398
+ raise AttributeError(
1399
+ '"{}" must conform to grpc.GenericRpcHandler type but does '
1400
+ 'not have "service" method!'.format(generic_rpc_handler)
1401
+ )
1402
+
1403
+
1404
+ def _augment_options(
1405
+ base_options: Sequence[ChannelArgumentType],
1406
+ compression: Optional[grpc.Compression],
1407
+ xds: bool,
1408
+ ) -> Sequence[ChannelArgumentType]:
1409
+ compression_option = _compression.create_channel_option(compression)
1410
+ maybe_server_call_tracer_factory_option = (
1411
+ _observability.create_server_call_tracer_factory_option(xds)
1412
+ )
1413
+ return (
1414
+ tuple(base_options)
1415
+ + compression_option
1416
+ + maybe_server_call_tracer_factory_option
1417
+ )
1418
+
1419
+
1420
+ class _Server(grpc.Server):
1421
+ _state: _ServerState
1422
+
1423
+ # pylint: disable=too-many-arguments
1424
+ def __init__(
1425
+ self,
1426
+ thread_pool: futures.ThreadPoolExecutor,
1427
+ generic_handlers: Sequence[grpc.GenericRpcHandler],
1428
+ interceptors: Sequence[grpc.ServerInterceptor],
1429
+ options: Sequence[ChannelArgumentType],
1430
+ maximum_concurrent_rpcs: Optional[int],
1431
+ compression: Optional[grpc.Compression],
1432
+ xds: bool,
1433
+ ):
1434
+ completion_queue = cygrpc.CompletionQueue()
1435
+ server = cygrpc.Server(_augment_options(options, compression, xds), xds)
1436
+ server.register_completion_queue(completion_queue)
1437
+ self._state = _ServerState(
1438
+ completion_queue,
1439
+ server,
1440
+ generic_handlers,
1441
+ _interceptor.service_pipeline(interceptors),
1442
+ thread_pool,
1443
+ maximum_concurrent_rpcs,
1444
+ )
1445
+ self._cy_server = server
1446
+
1447
+ def add_generic_rpc_handlers(
1448
+ self, generic_rpc_handlers: Iterable[grpc.GenericRpcHandler]
1449
+ ) -> None:
1450
+ _validate_generic_rpc_handlers(generic_rpc_handlers)
1451
+ _add_generic_handlers(self._state, generic_rpc_handlers)
1452
+
1453
+ def add_registered_method_handlers(
1454
+ self,
1455
+ service_name: str,
1456
+ method_handlers: Dict[str, grpc.RpcMethodHandler],
1457
+ ) -> None:
1458
+ # Can't register method once server started.
1459
+ with self._state.lock:
1460
+ if self._state.stage is _ServerStage.STARTED:
1461
+ return
1462
+
1463
+ # TODO(xuanwn): We should validate method_handlers first.
1464
+ method_to_handlers = {
1465
+ _common.fully_qualified_method(service_name, method): method_handler
1466
+ for method, method_handler in method_handlers.items()
1467
+ }
1468
+ for fully_qualified_method in method_to_handlers.keys():
1469
+ self._cy_server.register_method(fully_qualified_method)
1470
+ _add_registered_method_handlers(self._state, method_to_handlers)
1471
+
1472
+ def add_insecure_port(self, address: str) -> int:
1473
+ return _common.validate_port_binding_result(
1474
+ address, _add_insecure_port(self._state, _common.encode(address))
1475
+ )
1476
+
1477
+ def add_secure_port(
1478
+ self, address: str, server_credentials: grpc.ServerCredentials
1479
+ ) -> int:
1480
+ return _common.validate_port_binding_result(
1481
+ address,
1482
+ _add_secure_port(
1483
+ self._state, _common.encode(address), server_credentials
1484
+ ),
1485
+ )
1486
+
1487
+ def start(self) -> None:
1488
+ _start(self._state)
1489
+
1490
+ def wait_for_termination(self, timeout: Optional[float] = None) -> bool:
1491
+ # NOTE(https://bugs.python.org/issue35935)
1492
+ # Remove this workaround once threading.Event.wait() is working with
1493
+ # CTRL+C across platforms.
1494
+ return _common.wait(
1495
+ self._state.termination_event.wait,
1496
+ self._state.termination_event.is_set,
1497
+ timeout=timeout,
1498
+ )
1499
+
1500
+ def stop(self, grace: Optional[float]) -> threading.Event:
1501
+ return _stop(self._state, grace)
1502
+
1503
+ def __del__(self):
1504
+ if hasattr(self, "_state"):
1505
+ # We can not grab a lock in __del__(), so set a flag to signal the
1506
+ # serving daemon thread (if it exists) to initiate shutdown.
1507
+ self._state.server_deallocated = True
1508
+
1509
+
1510
+ def create_server(
1511
+ thread_pool: futures.ThreadPoolExecutor,
1512
+ generic_rpc_handlers: Sequence[grpc.GenericRpcHandler],
1513
+ interceptors: Sequence[grpc.ServerInterceptor],
1514
+ options: Sequence[ChannelArgumentType],
1515
+ maximum_concurrent_rpcs: Optional[int],
1516
+ compression: Optional[grpc.Compression],
1517
+ xds: bool,
1518
+ ) -> _Server:
1519
+ _validate_generic_rpc_handlers(generic_rpc_handlers)
1520
+ return _Server(
1521
+ thread_pool,
1522
+ generic_rpc_handlers,
1523
+ interceptors,
1524
+ options,
1525
+ maximum_concurrent_rpcs,
1526
+ compression,
1527
+ xds,
1528
+ )
lib/python3.10/site-packages/grpc/_simple_stubs.py ADDED
@@ -0,0 +1,588 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Functions that obviate explicit stubs and explicit channels."""
15
+
16
+ import collections
17
+ import datetime
18
+ import logging
19
+ import os
20
+ import threading
21
+ from typing import (
22
+ Any,
23
+ AnyStr,
24
+ Callable,
25
+ Dict,
26
+ Iterator,
27
+ Optional,
28
+ Sequence,
29
+ Tuple,
30
+ TypeVar,
31
+ Union,
32
+ )
33
+
34
+ import grpc
35
+ from grpc.experimental import experimental_api
36
+
37
+ RequestType = TypeVar("RequestType")
38
+ ResponseType = TypeVar("ResponseType")
39
+
40
+ OptionsType = Sequence[Tuple[str, str]]
41
+ CacheKey = Tuple[
42
+ str,
43
+ OptionsType,
44
+ Optional[grpc.ChannelCredentials],
45
+ Optional[grpc.Compression],
46
+ ]
47
+
48
+ _LOGGER = logging.getLogger(__name__)
49
+
50
+ _EVICTION_PERIOD_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS"
51
+ if _EVICTION_PERIOD_KEY in os.environ:
52
+ _EVICTION_PERIOD = datetime.timedelta(
53
+ seconds=float(os.environ[_EVICTION_PERIOD_KEY])
54
+ )
55
+ _LOGGER.debug(
56
+ "Setting managed channel eviction period to %s", _EVICTION_PERIOD
57
+ )
58
+ else:
59
+ _EVICTION_PERIOD = datetime.timedelta(minutes=10)
60
+
61
+ _MAXIMUM_CHANNELS_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM"
62
+ if _MAXIMUM_CHANNELS_KEY in os.environ:
63
+ _MAXIMUM_CHANNELS = int(os.environ[_MAXIMUM_CHANNELS_KEY])
64
+ _LOGGER.debug("Setting maximum managed channels to %d", _MAXIMUM_CHANNELS)
65
+ else:
66
+ _MAXIMUM_CHANNELS = 2**8
67
+
68
+ _DEFAULT_TIMEOUT_KEY = "GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS"
69
+ if _DEFAULT_TIMEOUT_KEY in os.environ:
70
+ _DEFAULT_TIMEOUT = float(os.environ[_DEFAULT_TIMEOUT_KEY])
71
+ _LOGGER.debug("Setting default timeout seconds to %f", _DEFAULT_TIMEOUT)
72
+ else:
73
+ _DEFAULT_TIMEOUT = 60.0
74
+
75
+
76
+ def _create_channel(
77
+ target: str,
78
+ options: Sequence[Tuple[str, str]],
79
+ channel_credentials: Optional[grpc.ChannelCredentials],
80
+ compression: Optional[grpc.Compression],
81
+ ) -> grpc.Channel:
82
+ _LOGGER.debug(
83
+ f"Creating secure channel with credentials '{channel_credentials}', "
84
+ + f"options '{options}' and compression '{compression}'"
85
+ )
86
+ return grpc.secure_channel(
87
+ target,
88
+ credentials=channel_credentials,
89
+ options=options,
90
+ compression=compression,
91
+ )
92
+
93
+
94
+ class ChannelCache:
95
+ # NOTE(rbellevi): Untyped due to reference cycle.
96
+ _singleton = None
97
+ _lock: threading.RLock = threading.RLock()
98
+ _condition: threading.Condition = threading.Condition(lock=_lock)
99
+ _eviction_ready: threading.Event = threading.Event()
100
+
101
+ _mapping: Dict[CacheKey, Tuple[grpc.Channel, datetime.datetime]]
102
+ _eviction_thread: threading.Thread
103
+
104
+ def __init__(self):
105
+ self._mapping = collections.OrderedDict()
106
+ self._eviction_thread = threading.Thread(
107
+ target=ChannelCache._perform_evictions, daemon=True
108
+ )
109
+ self._eviction_thread.start()
110
+
111
+ @staticmethod
112
+ def get():
113
+ with ChannelCache._lock:
114
+ if ChannelCache._singleton is None:
115
+ ChannelCache._singleton = ChannelCache()
116
+ ChannelCache._eviction_ready.wait()
117
+ return ChannelCache._singleton
118
+
119
+ def _evict_locked(self, key: CacheKey):
120
+ channel, _ = self._mapping.pop(key)
121
+ _LOGGER.debug(
122
+ "Evicting channel %s with configuration %s.", channel, key
123
+ )
124
+ channel.close()
125
+ del channel
126
+
127
+ @staticmethod
128
+ def _perform_evictions():
129
+ while True:
130
+ with ChannelCache._lock:
131
+ ChannelCache._eviction_ready.set()
132
+ if not ChannelCache._singleton._mapping:
133
+ ChannelCache._condition.wait()
134
+ elif len(ChannelCache._singleton._mapping) > _MAXIMUM_CHANNELS:
135
+ key = next(iter(ChannelCache._singleton._mapping.keys()))
136
+ ChannelCache._singleton._evict_locked(key)
137
+ # And immediately reevaluate.
138
+ else:
139
+ key, (_, eviction_time) = next(
140
+ iter(ChannelCache._singleton._mapping.items())
141
+ )
142
+ now = datetime.datetime.now()
143
+ if eviction_time <= now:
144
+ ChannelCache._singleton._evict_locked(key)
145
+ continue
146
+ else:
147
+ time_to_eviction = (eviction_time - now).total_seconds()
148
+ # NOTE: We aim to *eventually* coalesce to a state in
149
+ # which no overdue channels are in the cache and the
150
+ # length of the cache is longer than _MAXIMUM_CHANNELS.
151
+ # We tolerate momentary states in which these two
152
+ # criteria are not met.
153
+ ChannelCache._condition.wait(timeout=time_to_eviction)
154
+
155
+ def get_channel(
156
+ self,
157
+ target: str,
158
+ options: Sequence[Tuple[str, str]],
159
+ channel_credentials: Optional[grpc.ChannelCredentials],
160
+ insecure: bool,
161
+ compression: Optional[grpc.Compression],
162
+ method: str,
163
+ _registered_method: bool,
164
+ ) -> Tuple[grpc.Channel, Optional[int]]:
165
+ """Get a channel from cache or creates a new channel.
166
+
167
+ This method also takes care of register method for channel,
168
+ which means we'll register a new call handle if we're calling a
169
+ non-registered method for an existing channel.
170
+
171
+ Returns:
172
+ A tuple with two items. The first item is the channel, second item is
173
+ the call handle if the method is registered, None if it's not registered.
174
+ """
175
+ if insecure and channel_credentials:
176
+ raise ValueError(
177
+ "The insecure option is mutually exclusive with "
178
+ + "the channel_credentials option. Please use one "
179
+ + "or the other."
180
+ )
181
+ if insecure:
182
+ channel_credentials = (
183
+ grpc.experimental.insecure_channel_credentials()
184
+ )
185
+ elif channel_credentials is None:
186
+ _LOGGER.debug("Defaulting to SSL channel credentials.")
187
+ channel_credentials = grpc.ssl_channel_credentials()
188
+ key = (target, options, channel_credentials, compression)
189
+ with self._lock:
190
+ channel_data = self._mapping.get(key, None)
191
+ call_handle = None
192
+ if channel_data is not None:
193
+ channel = channel_data[0]
194
+ # Register a new call handle if we're calling a registered method for an
195
+ # existing channel and this method is not registered.
196
+ if _registered_method:
197
+ call_handle = channel._get_registered_call_handle(method)
198
+ self._mapping.pop(key)
199
+ self._mapping[key] = (
200
+ channel,
201
+ datetime.datetime.now() + _EVICTION_PERIOD,
202
+ )
203
+ return channel, call_handle
204
+ else:
205
+ channel = _create_channel(
206
+ target, options, channel_credentials, compression
207
+ )
208
+ if _registered_method:
209
+ call_handle = channel._get_registered_call_handle(method)
210
+ self._mapping[key] = (
211
+ channel,
212
+ datetime.datetime.now() + _EVICTION_PERIOD,
213
+ )
214
+ if (
215
+ len(self._mapping) == 1
216
+ or len(self._mapping) >= _MAXIMUM_CHANNELS
217
+ ):
218
+ self._condition.notify()
219
+ return channel, call_handle
220
+
221
+ def _test_only_channel_count(self) -> int:
222
+ with self._lock:
223
+ return len(self._mapping)
224
+
225
+
226
+ @experimental_api
227
+ # pylint: disable=too-many-locals
228
+ def unary_unary(
229
+ request: RequestType,
230
+ target: str,
231
+ method: str,
232
+ request_serializer: Optional[Callable[[Any], bytes]] = None,
233
+ response_deserializer: Optional[Callable[[bytes], Any]] = None,
234
+ options: Sequence[Tuple[AnyStr, AnyStr]] = (),
235
+ channel_credentials: Optional[grpc.ChannelCredentials] = None,
236
+ insecure: bool = False,
237
+ call_credentials: Optional[grpc.CallCredentials] = None,
238
+ compression: Optional[grpc.Compression] = None,
239
+ wait_for_ready: Optional[bool] = None,
240
+ timeout: Optional[float] = _DEFAULT_TIMEOUT,
241
+ metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
242
+ _registered_method: Optional[bool] = False,
243
+ ) -> ResponseType:
244
+ """Invokes a unary-unary RPC without an explicitly specified channel.
245
+
246
+ THIS IS AN EXPERIMENTAL API.
247
+
248
+ This is backed by a per-process cache of channels. Channels are evicted
249
+ from the cache after a fixed period by a background. Channels will also be
250
+ evicted if more than a configured maximum accumulate.
251
+
252
+ The default eviction period is 10 minutes. One may set the environment
253
+ variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
254
+
255
+ The default maximum number of channels is 256. One may set the
256
+ environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
257
+ this.
258
+
259
+ Args:
260
+ request: An iterator that yields request values for the RPC.
261
+ target: The server address.
262
+ method: The name of the RPC method.
263
+ request_serializer: Optional :term:`serializer` for serializing the request
264
+ message. Request goes unserialized in case None is passed.
265
+ response_deserializer: Optional :term:`deserializer` for deserializing the response
266
+ message. Response goes undeserialized in case None is passed.
267
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
268
+ runtime) to configure the channel.
269
+ channel_credentials: A credential applied to the whole channel, e.g. the
270
+ return value of grpc.ssl_channel_credentials() or
271
+ grpc.insecure_channel_credentials().
272
+ insecure: If True, specifies channel_credentials as
273
+ :term:`grpc.insecure_channel_credentials()`. This option is mutually
274
+ exclusive with the `channel_credentials` option.
275
+ call_credentials: A call credential applied to each call individually,
276
+ e.g. the output of grpc.metadata_call_credentials() or
277
+ grpc.access_token_call_credentials().
278
+ compression: An optional value indicating the compression method to be
279
+ used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
280
+ wait_for_ready: An optional flag indicating whether the RPC should fail
281
+ immediately if the connection is not ready at the time the RPC is
282
+ invoked, or if it should wait until the connection to the server
283
+ becomes ready. When using this option, the user will likely also want
284
+ to set a timeout. Defaults to True.
285
+ timeout: An optional duration of time in seconds to allow for the RPC,
286
+ after which an exception will be raised. If timeout is unspecified,
287
+ defaults to a timeout controlled by the
288
+ GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
289
+ unset, defaults to 60 seconds. Supply a value of None to indicate that
290
+ no timeout should be enforced.
291
+ metadata: Optional metadata to send to the server.
292
+
293
+ Returns:
294
+ The response to the RPC.
295
+ """
296
+ channel, method_handle = ChannelCache.get().get_channel(
297
+ target,
298
+ options,
299
+ channel_credentials,
300
+ insecure,
301
+ compression,
302
+ method,
303
+ _registered_method,
304
+ )
305
+ multicallable = channel.unary_unary(
306
+ method, request_serializer, response_deserializer, method_handle
307
+ )
308
+ wait_for_ready = wait_for_ready if wait_for_ready is not None else True
309
+ return multicallable(
310
+ request,
311
+ metadata=metadata,
312
+ wait_for_ready=wait_for_ready,
313
+ credentials=call_credentials,
314
+ timeout=timeout,
315
+ )
316
+
317
+
318
+ @experimental_api
319
+ # pylint: disable=too-many-locals
320
+ def unary_stream(
321
+ request: RequestType,
322
+ target: str,
323
+ method: str,
324
+ request_serializer: Optional[Callable[[Any], bytes]] = None,
325
+ response_deserializer: Optional[Callable[[bytes], Any]] = None,
326
+ options: Sequence[Tuple[AnyStr, AnyStr]] = (),
327
+ channel_credentials: Optional[grpc.ChannelCredentials] = None,
328
+ insecure: bool = False,
329
+ call_credentials: Optional[grpc.CallCredentials] = None,
330
+ compression: Optional[grpc.Compression] = None,
331
+ wait_for_ready: Optional[bool] = None,
332
+ timeout: Optional[float] = _DEFAULT_TIMEOUT,
333
+ metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
334
+ _registered_method: Optional[bool] = False,
335
+ ) -> Iterator[ResponseType]:
336
+ """Invokes a unary-stream RPC without an explicitly specified channel.
337
+
338
+ THIS IS AN EXPERIMENTAL API.
339
+
340
+ This is backed by a per-process cache of channels. Channels are evicted
341
+ from the cache after a fixed period by a background. Channels will also be
342
+ evicted if more than a configured maximum accumulate.
343
+
344
+ The default eviction period is 10 minutes. One may set the environment
345
+ variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
346
+
347
+ The default maximum number of channels is 256. One may set the
348
+ environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
349
+ this.
350
+
351
+ Args:
352
+ request: An iterator that yields request values for the RPC.
353
+ target: The server address.
354
+ method: The name of the RPC method.
355
+ request_serializer: Optional :term:`serializer` for serializing the request
356
+ message. Request goes unserialized in case None is passed.
357
+ response_deserializer: Optional :term:`deserializer` for deserializing the response
358
+ message. Response goes undeserialized in case None is passed.
359
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
360
+ runtime) to configure the channel.
361
+ channel_credentials: A credential applied to the whole channel, e.g. the
362
+ return value of grpc.ssl_channel_credentials().
363
+ insecure: If True, specifies channel_credentials as
364
+ :term:`grpc.insecure_channel_credentials()`. This option is mutually
365
+ exclusive with the `channel_credentials` option.
366
+ call_credentials: A call credential applied to each call individually,
367
+ e.g. the output of grpc.metadata_call_credentials() or
368
+ grpc.access_token_call_credentials().
369
+ compression: An optional value indicating the compression method to be
370
+ used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
371
+ wait_for_ready: An optional flag indicating whether the RPC should fail
372
+ immediately if the connection is not ready at the time the RPC is
373
+ invoked, or if it should wait until the connection to the server
374
+ becomes ready. When using this option, the user will likely also want
375
+ to set a timeout. Defaults to True.
376
+ timeout: An optional duration of time in seconds to allow for the RPC,
377
+ after which an exception will be raised. If timeout is unspecified,
378
+ defaults to a timeout controlled by the
379
+ GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
380
+ unset, defaults to 60 seconds. Supply a value of None to indicate that
381
+ no timeout should be enforced.
382
+ metadata: Optional metadata to send to the server.
383
+
384
+ Returns:
385
+ An iterator of responses.
386
+ """
387
+ channel, method_handle = ChannelCache.get().get_channel(
388
+ target,
389
+ options,
390
+ channel_credentials,
391
+ insecure,
392
+ compression,
393
+ method,
394
+ _registered_method,
395
+ )
396
+ multicallable = channel.unary_stream(
397
+ method, request_serializer, response_deserializer, method_handle
398
+ )
399
+ wait_for_ready = wait_for_ready if wait_for_ready is not None else True
400
+ return multicallable(
401
+ request,
402
+ metadata=metadata,
403
+ wait_for_ready=wait_for_ready,
404
+ credentials=call_credentials,
405
+ timeout=timeout,
406
+ )
407
+
408
+
409
+ @experimental_api
410
+ # pylint: disable=too-many-locals
411
+ def stream_unary(
412
+ request_iterator: Iterator[RequestType],
413
+ target: str,
414
+ method: str,
415
+ request_serializer: Optional[Callable[[Any], bytes]] = None,
416
+ response_deserializer: Optional[Callable[[bytes], Any]] = None,
417
+ options: Sequence[Tuple[AnyStr, AnyStr]] = (),
418
+ channel_credentials: Optional[grpc.ChannelCredentials] = None,
419
+ insecure: bool = False,
420
+ call_credentials: Optional[grpc.CallCredentials] = None,
421
+ compression: Optional[grpc.Compression] = None,
422
+ wait_for_ready: Optional[bool] = None,
423
+ timeout: Optional[float] = _DEFAULT_TIMEOUT,
424
+ metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
425
+ _registered_method: Optional[bool] = False,
426
+ ) -> ResponseType:
427
+ """Invokes a stream-unary RPC without an explicitly specified channel.
428
+
429
+ THIS IS AN EXPERIMENTAL API.
430
+
431
+ This is backed by a per-process cache of channels. Channels are evicted
432
+ from the cache after a fixed period by a background. Channels will also be
433
+ evicted if more than a configured maximum accumulate.
434
+
435
+ The default eviction period is 10 minutes. One may set the environment
436
+ variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
437
+
438
+ The default maximum number of channels is 256. One may set the
439
+ environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
440
+ this.
441
+
442
+ Args:
443
+ request_iterator: An iterator that yields request values for the RPC.
444
+ target: The server address.
445
+ method: The name of the RPC method.
446
+ request_serializer: Optional :term:`serializer` for serializing the request
447
+ message. Request goes unserialized in case None is passed.
448
+ response_deserializer: Optional :term:`deserializer` for deserializing the response
449
+ message. Response goes undeserialized in case None is passed.
450
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
451
+ runtime) to configure the channel.
452
+ channel_credentials: A credential applied to the whole channel, e.g. the
453
+ return value of grpc.ssl_channel_credentials().
454
+ call_credentials: A call credential applied to each call individually,
455
+ e.g. the output of grpc.metadata_call_credentials() or
456
+ grpc.access_token_call_credentials().
457
+ insecure: If True, specifies channel_credentials as
458
+ :term:`grpc.insecure_channel_credentials()`. This option is mutually
459
+ exclusive with the `channel_credentials` option.
460
+ compression: An optional value indicating the compression method to be
461
+ used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
462
+ wait_for_ready: An optional flag indicating whether the RPC should fail
463
+ immediately if the connection is not ready at the time the RPC is
464
+ invoked, or if it should wait until the connection to the server
465
+ becomes ready. When using this option, the user will likely also want
466
+ to set a timeout. Defaults to True.
467
+ timeout: An optional duration of time in seconds to allow for the RPC,
468
+ after which an exception will be raised. If timeout is unspecified,
469
+ defaults to a timeout controlled by the
470
+ GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
471
+ unset, defaults to 60 seconds. Supply a value of None to indicate that
472
+ no timeout should be enforced.
473
+ metadata: Optional metadata to send to the server.
474
+
475
+ Returns:
476
+ The response to the RPC.
477
+ """
478
+ channel, method_handle = ChannelCache.get().get_channel(
479
+ target,
480
+ options,
481
+ channel_credentials,
482
+ insecure,
483
+ compression,
484
+ method,
485
+ _registered_method,
486
+ )
487
+ multicallable = channel.stream_unary(
488
+ method, request_serializer, response_deserializer, method_handle
489
+ )
490
+ wait_for_ready = wait_for_ready if wait_for_ready is not None else True
491
+ return multicallable(
492
+ request_iterator,
493
+ metadata=metadata,
494
+ wait_for_ready=wait_for_ready,
495
+ credentials=call_credentials,
496
+ timeout=timeout,
497
+ )
498
+
499
+
500
+ @experimental_api
501
+ # pylint: disable=too-many-locals
502
+ def stream_stream(
503
+ request_iterator: Iterator[RequestType],
504
+ target: str,
505
+ method: str,
506
+ request_serializer: Optional[Callable[[Any], bytes]] = None,
507
+ response_deserializer: Optional[Callable[[bytes], Any]] = None,
508
+ options: Sequence[Tuple[AnyStr, AnyStr]] = (),
509
+ channel_credentials: Optional[grpc.ChannelCredentials] = None,
510
+ insecure: bool = False,
511
+ call_credentials: Optional[grpc.CallCredentials] = None,
512
+ compression: Optional[grpc.Compression] = None,
513
+ wait_for_ready: Optional[bool] = None,
514
+ timeout: Optional[float] = _DEFAULT_TIMEOUT,
515
+ metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
516
+ _registered_method: Optional[bool] = False,
517
+ ) -> Iterator[ResponseType]:
518
+ """Invokes a stream-stream RPC without an explicitly specified channel.
519
+
520
+ THIS IS AN EXPERIMENTAL API.
521
+
522
+ This is backed by a per-process cache of channels. Channels are evicted
523
+ from the cache after a fixed period by a background. Channels will also be
524
+ evicted if more than a configured maximum accumulate.
525
+
526
+ The default eviction period is 10 minutes. One may set the environment
527
+ variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
528
+
529
+ The default maximum number of channels is 256. One may set the
530
+ environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
531
+ this.
532
+
533
+ Args:
534
+ request_iterator: An iterator that yields request values for the RPC.
535
+ target: The server address.
536
+ method: The name of the RPC method.
537
+ request_serializer: Optional :term:`serializer` for serializing the request
538
+ message. Request goes unserialized in case None is passed.
539
+ response_deserializer: Optional :term:`deserializer` for deserializing the response
540
+ message. Response goes undeserialized in case None is passed.
541
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
542
+ runtime) to configure the channel.
543
+ channel_credentials: A credential applied to the whole channel, e.g. the
544
+ return value of grpc.ssl_channel_credentials().
545
+ call_credentials: A call credential applied to each call individually,
546
+ e.g. the output of grpc.metadata_call_credentials() or
547
+ grpc.access_token_call_credentials().
548
+ insecure: If True, specifies channel_credentials as
549
+ :term:`grpc.insecure_channel_credentials()`. This option is mutually
550
+ exclusive with the `channel_credentials` option.
551
+ compression: An optional value indicating the compression method to be
552
+ used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
553
+ wait_for_ready: An optional flag indicating whether the RPC should fail
554
+ immediately if the connection is not ready at the time the RPC is
555
+ invoked, or if it should wait until the connection to the server
556
+ becomes ready. When using this option, the user will likely also want
557
+ to set a timeout. Defaults to True.
558
+ timeout: An optional duration of time in seconds to allow for the RPC,
559
+ after which an exception will be raised. If timeout is unspecified,
560
+ defaults to a timeout controlled by the
561
+ GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
562
+ unset, defaults to 60 seconds. Supply a value of None to indicate that
563
+ no timeout should be enforced.
564
+ metadata: Optional metadata to send to the server.
565
+
566
+ Returns:
567
+ An iterator of responses.
568
+ """
569
+ channel, method_handle = ChannelCache.get().get_channel(
570
+ target,
571
+ options,
572
+ channel_credentials,
573
+ insecure,
574
+ compression,
575
+ method,
576
+ _registered_method,
577
+ )
578
+ multicallable = channel.stream_stream(
579
+ method, request_serializer, response_deserializer, method_handle
580
+ )
581
+ wait_for_ready = wait_for_ready if wait_for_ready is not None else True
582
+ return multicallable(
583
+ request_iterator,
584
+ metadata=metadata,
585
+ wait_for_ready=wait_for_ready,
586
+ credentials=call_credentials,
587
+ timeout=timeout,
588
+ )
lib/python3.10/site-packages/grpc/_typing.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2022 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Common types for gRPC Sync API"""
15
+
16
+ from typing import (
17
+ TYPE_CHECKING,
18
+ Any,
19
+ Callable,
20
+ Iterable,
21
+ Iterator,
22
+ Optional,
23
+ Sequence,
24
+ Tuple,
25
+ TypeVar,
26
+ Union,
27
+ )
28
+
29
+ from grpc._cython import cygrpc
30
+
31
+ if TYPE_CHECKING:
32
+ from grpc import ServicerContext
33
+ from grpc._server import _RPCState
34
+
35
+ RequestType = TypeVar("RequestType")
36
+ ResponseType = TypeVar("ResponseType")
37
+ SerializingFunction = Callable[[Any], bytes]
38
+ DeserializingFunction = Callable[[bytes], Any]
39
+ MetadataType = Sequence[Tuple[str, Union[str, bytes]]]
40
+ ChannelArgumentType = Tuple[str, Any]
41
+ DoneCallbackType = Callable[[Any], None]
42
+ NullaryCallbackType = Callable[[], None]
43
+ RequestIterableType = Iterable[Any]
44
+ ResponseIterableType = Iterable[Any]
45
+ UserTag = Callable[[cygrpc.BaseEvent], bool]
46
+ IntegratedCallFactory = Callable[
47
+ [
48
+ int,
49
+ bytes,
50
+ None,
51
+ Optional[float],
52
+ Optional[MetadataType],
53
+ Optional[cygrpc.CallCredentials],
54
+ Sequence[Sequence[cygrpc.Operation]],
55
+ UserTag,
56
+ Any,
57
+ ],
58
+ cygrpc.IntegratedCall,
59
+ ]
60
+ ServerTagCallbackType = Tuple[
61
+ Optional["_RPCState"], Sequence[NullaryCallbackType]
62
+ ]
63
+ ServerCallbackTag = Callable[[cygrpc.BaseEvent], ServerTagCallbackType]
64
+ ArityAgnosticMethodHandler = Union[
65
+ Callable[
66
+ [RequestType, "ServicerContext", Callable[[ResponseType], None]],
67
+ ResponseType,
68
+ ],
69
+ Callable[
70
+ [RequestType, "ServicerContext", Callable[[ResponseType], None]],
71
+ Iterator[ResponseType],
72
+ ],
73
+ Callable[
74
+ [
75
+ Iterator[RequestType],
76
+ "ServicerContext",
77
+ Callable[[ResponseType], None],
78
+ ],
79
+ ResponseType,
80
+ ],
81
+ Callable[
82
+ [
83
+ Iterator[RequestType],
84
+ "ServicerContext",
85
+ Callable[[ResponseType], None],
86
+ ],
87
+ Iterator[ResponseType],
88
+ ],
89
+ Callable[[RequestType, "ServicerContext"], ResponseType],
90
+ Callable[[RequestType, "ServicerContext"], Iterator[ResponseType]],
91
+ Callable[[Iterator[RequestType], "ServicerContext"], ResponseType],
92
+ Callable[
93
+ [Iterator[RequestType], "ServicerContext"], Iterator[ResponseType]
94
+ ],
95
+ ]
lib/python3.10/site-packages/grpc/_utilities.py ADDED
@@ -0,0 +1,222 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Internal utilities for gRPC Python."""
15
+
16
+ import collections
17
+ import logging
18
+ import threading
19
+ import time
20
+ from typing import Callable, Dict, Optional, Sequence
21
+
22
+ import grpc # pytype: disable=pyi-error
23
+ from grpc import _common # pytype: disable=pyi-error
24
+ from grpc._typing import DoneCallbackType
25
+
26
+ _LOGGER = logging.getLogger(__name__)
27
+
28
+ _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
29
+ 'Exception calling connectivity future "done" callback!'
30
+ )
31
+
32
+
33
+ class RpcMethodHandler(
34
+ collections.namedtuple(
35
+ "_RpcMethodHandler",
36
+ (
37
+ "request_streaming",
38
+ "response_streaming",
39
+ "request_deserializer",
40
+ "response_serializer",
41
+ "unary_unary",
42
+ "unary_stream",
43
+ "stream_unary",
44
+ "stream_stream",
45
+ ),
46
+ ),
47
+ grpc.RpcMethodHandler,
48
+ ):
49
+ pass
50
+
51
+
52
+ class DictionaryGenericHandler(grpc.ServiceRpcHandler):
53
+ _name: str
54
+ _method_handlers: Dict[str, grpc.RpcMethodHandler]
55
+
56
+ def __init__(
57
+ self, service: str, method_handlers: Dict[str, grpc.RpcMethodHandler]
58
+ ):
59
+ self._name = service
60
+ self._method_handlers = {
61
+ _common.fully_qualified_method(service, method): method_handler
62
+ for method, method_handler in method_handlers.items()
63
+ }
64
+
65
+ def service_name(self) -> str:
66
+ return self._name
67
+
68
+ def service(
69
+ self, handler_call_details: grpc.HandlerCallDetails
70
+ ) -> Optional[grpc.RpcMethodHandler]:
71
+ details_method = handler_call_details.method
72
+ return self._method_handlers.get(
73
+ details_method
74
+ ) # pytype: disable=attribute-error
75
+
76
+
77
+ class _ChannelReadyFuture(grpc.Future):
78
+ _condition: threading.Condition
79
+ _channel: grpc.Channel
80
+ _matured: bool
81
+ _cancelled: bool
82
+ _done_callbacks: Sequence[Callable]
83
+
84
+ def __init__(self, channel: grpc.Channel):
85
+ self._condition = threading.Condition()
86
+ self._channel = channel
87
+
88
+ self._matured = False
89
+ self._cancelled = False
90
+ self._done_callbacks = []
91
+
92
+ def _block(self, timeout: Optional[float]) -> None:
93
+ until = None if timeout is None else time.time() + timeout
94
+ with self._condition:
95
+ while True:
96
+ if self._cancelled:
97
+ raise grpc.FutureCancelledError()
98
+ elif self._matured:
99
+ return
100
+ else:
101
+ if until is None:
102
+ self._condition.wait()
103
+ else:
104
+ remaining = until - time.time()
105
+ if remaining < 0:
106
+ raise grpc.FutureTimeoutError()
107
+ else:
108
+ self._condition.wait(timeout=remaining)
109
+
110
+ def _update(self, connectivity: Optional[grpc.ChannelConnectivity]) -> None:
111
+ with self._condition:
112
+ if (
113
+ not self._cancelled
114
+ and connectivity is grpc.ChannelConnectivity.READY
115
+ ):
116
+ self._matured = True
117
+ self._channel.unsubscribe(self._update)
118
+ self._condition.notify_all()
119
+ done_callbacks = tuple(self._done_callbacks)
120
+ self._done_callbacks = None
121
+ else:
122
+ return
123
+
124
+ for done_callback in done_callbacks:
125
+ try:
126
+ done_callback(self)
127
+ except Exception: # pylint: disable=broad-except
128
+ _LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
129
+
130
+ def cancel(self) -> bool:
131
+ with self._condition:
132
+ if not self._matured:
133
+ self._cancelled = True
134
+ self._channel.unsubscribe(self._update)
135
+ self._condition.notify_all()
136
+ done_callbacks = tuple(self._done_callbacks)
137
+ self._done_callbacks = None
138
+ else:
139
+ return False
140
+
141
+ for done_callback in done_callbacks:
142
+ try:
143
+ done_callback(self)
144
+ except Exception: # pylint: disable=broad-except
145
+ _LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
146
+
147
+ return True
148
+
149
+ def cancelled(self) -> bool:
150
+ with self._condition:
151
+ return self._cancelled
152
+
153
+ def running(self) -> bool:
154
+ with self._condition:
155
+ return not self._cancelled and not self._matured
156
+
157
+ def done(self) -> bool:
158
+ with self._condition:
159
+ return self._cancelled or self._matured
160
+
161
+ def result(self, timeout: Optional[float] = None) -> None:
162
+ self._block(timeout)
163
+
164
+ def exception(self, timeout: Optional[float] = None) -> None:
165
+ self._block(timeout)
166
+
167
+ def traceback(self, timeout: Optional[float] = None) -> None:
168
+ self._block(timeout)
169
+
170
+ def add_done_callback(self, fn: DoneCallbackType):
171
+ with self._condition:
172
+ if not self._cancelled and not self._matured:
173
+ self._done_callbacks.append(fn)
174
+ return
175
+
176
+ fn(self)
177
+
178
+ def start(self):
179
+ with self._condition:
180
+ self._channel.subscribe(self._update, try_to_connect=True)
181
+
182
+ def __del__(self):
183
+ with self._condition:
184
+ if not self._cancelled and not self._matured:
185
+ self._channel.unsubscribe(self._update)
186
+
187
+
188
+ def channel_ready_future(channel: grpc.Channel) -> _ChannelReadyFuture:
189
+ ready_future = _ChannelReadyFuture(channel)
190
+ ready_future.start()
191
+ return ready_future
192
+
193
+
194
+ def first_version_is_lower(version1: str, version2: str) -> bool:
195
+ """
196
+ Compares two versions in the format '1.60.1' or '1.60.1.dev0'.
197
+
198
+ This method will be used in all stubs generated by grpcio-tools to check whether
199
+ the stub version is compatible with the runtime grpcio.
200
+
201
+ Args:
202
+ version1: The first version string.
203
+ version2: The second version string.
204
+
205
+ Returns:
206
+ True if version1 is lower, False otherwise.
207
+ """
208
+ version1_list = version1.split(".")
209
+ version2_list = version2.split(".")
210
+
211
+ try:
212
+ for i in range(3):
213
+ if int(version1_list[i]) < int(version2_list[i]):
214
+ return True
215
+ elif int(version1_list[i]) > int(version2_list[i]):
216
+ return False
217
+ except ValueError:
218
+ # Return false in case we can't convert version to int.
219
+ return False
220
+
221
+ # The version without dev0 will be considered lower.
222
+ return len(version1_list) < len(version2_list)
lib/python3.10/site-packages/grpc/aio/__init__.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """gRPC's Asynchronous Python API.
15
+
16
+ gRPC Async API objects may only be used on the thread on which they were
17
+ created. AsyncIO doesn't provide thread safety for most of its APIs.
18
+ """
19
+
20
+ from typing import Any, Optional, Sequence, Tuple
21
+
22
+ import grpc
23
+ from grpc._cython.cygrpc import AbortError
24
+ from grpc._cython.cygrpc import BaseError
25
+ from grpc._cython.cygrpc import EOF
26
+ from grpc._cython.cygrpc import InternalError
27
+ from grpc._cython.cygrpc import UsageError
28
+ from grpc._cython.cygrpc import init_grpc_aio
29
+ from grpc._cython.cygrpc import shutdown_grpc_aio
30
+
31
+ from ._base_call import Call
32
+ from ._base_call import RpcContext
33
+ from ._base_call import StreamStreamCall
34
+ from ._base_call import StreamUnaryCall
35
+ from ._base_call import UnaryStreamCall
36
+ from ._base_call import UnaryUnaryCall
37
+ from ._base_channel import Channel
38
+ from ._base_channel import StreamStreamMultiCallable
39
+ from ._base_channel import StreamUnaryMultiCallable
40
+ from ._base_channel import UnaryStreamMultiCallable
41
+ from ._base_channel import UnaryUnaryMultiCallable
42
+ from ._base_server import Server
43
+ from ._base_server import ServicerContext
44
+ from ._call import AioRpcError
45
+ from ._channel import insecure_channel
46
+ from ._channel import secure_channel
47
+ from ._interceptor import ClientCallDetails
48
+ from ._interceptor import ClientInterceptor
49
+ from ._interceptor import InterceptedUnaryUnaryCall
50
+ from ._interceptor import ServerInterceptor
51
+ from ._interceptor import StreamStreamClientInterceptor
52
+ from ._interceptor import StreamUnaryClientInterceptor
53
+ from ._interceptor import UnaryStreamClientInterceptor
54
+ from ._interceptor import UnaryUnaryClientInterceptor
55
+ from ._metadata import Metadata
56
+ from ._server import server
57
+ from ._typing import ChannelArgumentType
58
+
59
+ ################################### __all__ #################################
60
+
61
+ __all__ = (
62
+ "init_grpc_aio",
63
+ "shutdown_grpc_aio",
64
+ "AioRpcError",
65
+ "RpcContext",
66
+ "Call",
67
+ "UnaryUnaryCall",
68
+ "UnaryStreamCall",
69
+ "StreamUnaryCall",
70
+ "StreamStreamCall",
71
+ "Channel",
72
+ "UnaryUnaryMultiCallable",
73
+ "UnaryStreamMultiCallable",
74
+ "StreamUnaryMultiCallable",
75
+ "StreamStreamMultiCallable",
76
+ "ClientCallDetails",
77
+ "ClientInterceptor",
78
+ "UnaryStreamClientInterceptor",
79
+ "UnaryUnaryClientInterceptor",
80
+ "StreamUnaryClientInterceptor",
81
+ "StreamStreamClientInterceptor",
82
+ "InterceptedUnaryUnaryCall",
83
+ "ServerInterceptor",
84
+ "insecure_channel",
85
+ "server",
86
+ "Server",
87
+ "ServicerContext",
88
+ "EOF",
89
+ "secure_channel",
90
+ "AbortError",
91
+ "BaseError",
92
+ "UsageError",
93
+ "InternalError",
94
+ "Metadata",
95
+ )
lib/python3.10/site-packages/grpc/aio/_base_call.py ADDED
@@ -0,0 +1,257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 The gRPC Authors
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Abstract base classes for client-side Call objects.
15
+
16
+ Call objects represents the RPC itself, and offer methods to access / modify
17
+ its information. They also offer methods to manipulate the life-cycle of the
18
+ RPC, e.g. cancellation.
19
+ """
20
+
21
+ from abc import ABCMeta
22
+ from abc import abstractmethod
23
+ from typing import Any, AsyncIterator, Generator, Generic, Optional, Union
24
+
25
+ import grpc
26
+
27
+ from ._metadata import Metadata
28
+ from ._typing import DoneCallbackType
29
+ from ._typing import EOFType
30
+ from ._typing import RequestType
31
+ from ._typing import ResponseType
32
+
33
+ __all__ = "RpcContext", "Call", "UnaryUnaryCall", "UnaryStreamCall"
34
+
35
+
36
+ class RpcContext(metaclass=ABCMeta):
37
+ """Provides RPC-related information and action."""
38
+
39
+ @abstractmethod
40
+ def cancelled(self) -> bool:
41
+ """Return True if the RPC is cancelled.
42
+
43
+ The RPC is cancelled when the cancellation was requested with cancel().
44
+
45
+ Returns:
46
+ A bool indicates whether the RPC is cancelled or not.
47
+ """
48
+
49
+ @abstractmethod
50
+ def done(self) -> bool:
51
+ """Return True if the RPC is done.
52
+
53
+ An RPC is done if the RPC is completed, cancelled or aborted.
54
+
55
+ Returns:
56
+ A bool indicates if the RPC is done.
57
+ """
58
+
59
+ @abstractmethod
60
+ def time_remaining(self) -> Optional[float]:
61
+ """Describes the length of allowed time remaining for the RPC.
62
+
63
+ Returns:
64
+ A nonnegative float indicating the length of allowed time in seconds
65
+ remaining for the RPC to complete before it is considered to have
66
+ timed out, or None if no deadline was specified for the RPC.
67
+ """
68
+
69
+ @abstractmethod
70
+ def cancel(self) -> bool:
71
+ """Cancels the RPC.
72
+
73
+ Idempotent and has no effect if the RPC has already terminated.
74
+
75
+ Returns:
76
+ A bool indicates if the cancellation is performed or not.
77
+ """
78
+
79
+ @abstractmethod
80
+ def add_done_callback(self, callback: DoneCallbackType) -> None:
81
+ """Registers a callback to be called on RPC termination.
82
+
83
+ Args:
84
+ callback: A callable object will be called with the call object as
85
+ its only argument.
86
+ """
87
+
88
+
89
+ class Call(RpcContext, metaclass=ABCMeta):
90
+ """The abstract base class of an RPC on the client-side."""
91
+
92
+ @abstractmethod
93
+ async def initial_metadata(self) -> Metadata:
94
+ """Accesses the initial metadata sent by the server.
95
+
96
+ Returns:
97
+ The initial :term:`metadata`.
98
+ """
99
+
100
+ @abstractmethod
101
+ async def trailing_metadata(self) -> Metadata:
102
+ """Accesses the trailing metadata sent by the server.
103
+
104
+ Returns:
105
+ The trailing :term:`metadata`.
106
+ """
107
+
108
+ @abstractmethod
109
+ async def code(self) -> grpc.StatusCode:
110
+ """Accesses the status code sent by the server.
111
+
112
+ Returns:
113
+ The StatusCode value for the RPC.
114
+ """
115
+
116
+ @abstractmethod
117
+ async def details(self) -> str:
118
+ """Accesses the details sent by the server.
119
+
120
+ Returns:
121
+ The details string of the RPC.
122
+ """
123
+
124
+ @abstractmethod
125
+ async def wait_for_connection(self) -> None:
126
+ """Waits until connected to peer and raises aio.AioRpcError if failed.
127
+
128
+ This is an EXPERIMENTAL method.
129
+
130
+ This method ensures the RPC has been successfully connected. Otherwise,
131
+ an AioRpcError will be raised to explain the reason of the connection
132
+ failure.
133
+
134
+ This method is recommended for building retry mechanisms.
135
+ """
136
+
137
+
138
+ class UnaryUnaryCall(
139
+ Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
140
+ ):
141
+ """The abstract base class of a unary-unary RPC on the client-side."""
142
+
143
+ @abstractmethod
144
+ def __await__(self) -> Generator[Any, None, ResponseType]:
145
+ """Await the response message to be ready.
146
+
147
+ Returns:
148
+ The response message of the RPC.
149
+ """
150
+
151
+
152
+ class UnaryStreamCall(
153
+ Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
154
+ ):
155
+ @abstractmethod
156
+ def __aiter__(self) -> AsyncIterator[ResponseType]:
157
+ """Returns the async iterator representation that yields messages.
158
+
159
+ Under the hood, it is calling the "read" method.
160
+
161
+ Returns:
162
+ An async iterator object that yields messages.
163
+ """
164
+
165
+ @abstractmethod
166
+ async def read(self) -> Union[EOFType, ResponseType]:
167
+ """Reads one message from the stream.
168
+
169
+ Read operations must be serialized when called from multiple
170
+ coroutines.
171
+
172
+ Note that the iterator and read/write APIs may not be mixed on
173
+ a single RPC.
174
+
175
+ Returns:
176
+ A response message, or an `grpc.aio.EOF` to indicate the end of the
177
+ stream.
178
+ """
179
+
180
+
181
+ class StreamUnaryCall(
182
+ Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
183
+ ):
184
+ @abstractmethod
185
+ async def write(self, request: RequestType) -> None:
186
+ """Writes one message to the stream.
187
+
188
+ Note that the iterator and read/write APIs may not be mixed on
189
+ a single RPC.
190
+
191
+ Raises:
192
+ An RpcError exception if the write failed.
193
+ """
194
+
195
+ @abstractmethod
196
+ async def done_writing(self) -> None:
197
+ """Notifies server that the client is done sending messages.
198
+
199
+ After done_writing is called, any additional invocation to the write
200
+ function will fail. This function is idempotent.
201
+ """
202
+
203
+ @abstractmethod
204
+ def __await__(self) -> Generator[Any, None, ResponseType]:
205
+ """Await the response message to be ready.
206
+
207
+ Returns:
208
+ The response message of the stream.
209
+ """
210
+
211
+
212
+ class StreamStreamCall(
213
+ Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
214
+ ):
215
+ @abstractmethod
216
+ def __aiter__(self) -> AsyncIterator[ResponseType]:
217
+ """Returns the async iterator representation that yields messages.
218
+
219
+ Under the hood, it is calling the "read" method.
220
+
221
+ Returns:
222
+ An async iterator object that yields messages.
223
+ """
224
+
225
+ @abstractmethod
226
+ async def read(self) -> Union[EOFType, ResponseType]:
227
+ """Reads one message from the stream.
228
+
229
+ Read operations must be serialized when called from multiple
230
+ coroutines.
231
+
232
+ Note that the iterator and read/write APIs may not be mixed on
233
+ a single RPC.
234
+
235
+ Returns:
236
+ A response message, or an `grpc.aio.EOF` to indicate the end of the
237
+ stream.
238
+ """
239
+
240
+ @abstractmethod
241
+ async def write(self, request: RequestType) -> None:
242
+ """Writes one message to the stream.
243
+
244
+ Note that the iterator and read/write APIs may not be mixed on
245
+ a single RPC.
246
+
247
+ Raises:
248
+ An RpcError exception if the write failed.
249
+ """
250
+
251
+ @abstractmethod
252
+ async def done_writing(self) -> None:
253
+ """Notifies server that the client is done sending messages.
254
+
255
+ After done_writing is called, any additional invocation to the write
256
+ function will fail. This function is idempotent.
257
+ """
lib/python3.10/site-packages/grpc/aio/_base_channel.py ADDED
@@ -0,0 +1,364 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The gRPC Authors
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Abstract base classes for Channel objects and Multicallable objects."""
15
+
16
+ import abc
17
+ from typing import Generic, Optional
18
+
19
+ import grpc
20
+
21
+ from . import _base_call
22
+ from ._typing import DeserializingFunction
23
+ from ._typing import MetadataType
24
+ from ._typing import RequestIterableType
25
+ from ._typing import RequestType
26
+ from ._typing import ResponseType
27
+ from ._typing import SerializingFunction
28
+
29
+
30
+ class UnaryUnaryMultiCallable(Generic[RequestType, ResponseType], abc.ABC):
31
+ """Enables asynchronous invocation of a unary-call RPC."""
32
+
33
+ @abc.abstractmethod
34
+ def __call__(
35
+ self,
36
+ request: RequestType,
37
+ *,
38
+ timeout: Optional[float] = None,
39
+ metadata: Optional[MetadataType] = None,
40
+ credentials: Optional[grpc.CallCredentials] = None,
41
+ wait_for_ready: Optional[bool] = None,
42
+ compression: Optional[grpc.Compression] = None,
43
+ ) -> _base_call.UnaryUnaryCall[RequestType, ResponseType]:
44
+ """Asynchronously invokes the underlying RPC.
45
+
46
+ Args:
47
+ request: The request value for the RPC.
48
+ timeout: An optional duration of time in seconds to allow
49
+ for the RPC.
50
+ metadata: Optional :term:`metadata` to be transmitted to the
51
+ service-side of the RPC.
52
+ credentials: An optional CallCredentials for the RPC. Only valid for
53
+ secure Channel.
54
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
55
+ compression: An element of grpc.compression, e.g.
56
+ grpc.compression.Gzip.
57
+
58
+ Returns:
59
+ A UnaryUnaryCall object.
60
+
61
+ Raises:
62
+ RpcError: Indicates that the RPC terminated with non-OK status. The
63
+ raised RpcError will also be a Call for the RPC affording the RPC's
64
+ metadata, status code, and details.
65
+ """
66
+
67
+
68
+ class UnaryStreamMultiCallable(Generic[RequestType, ResponseType], abc.ABC):
69
+ """Enables asynchronous invocation of a server-streaming RPC."""
70
+
71
+ @abc.abstractmethod
72
+ def __call__(
73
+ self,
74
+ request: RequestType,
75
+ *,
76
+ timeout: Optional[float] = None,
77
+ metadata: Optional[MetadataType] = None,
78
+ credentials: Optional[grpc.CallCredentials] = None,
79
+ wait_for_ready: Optional[bool] = None,
80
+ compression: Optional[grpc.Compression] = None,
81
+ ) -> _base_call.UnaryStreamCall[RequestType, ResponseType]:
82
+ """Asynchronously invokes the underlying RPC.
83
+
84
+ Args:
85
+ request: The request value for the RPC.
86
+ timeout: An optional duration of time in seconds to allow
87
+ for the RPC.
88
+ metadata: Optional :term:`metadata` to be transmitted to the
89
+ service-side of the RPC.
90
+ credentials: An optional CallCredentials for the RPC. Only valid for
91
+ secure Channel.
92
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
93
+ compression: An element of grpc.compression, e.g.
94
+ grpc.compression.Gzip.
95
+
96
+ Returns:
97
+ A UnaryStreamCall object.
98
+
99
+ Raises:
100
+ RpcError: Indicates that the RPC terminated with non-OK status. The
101
+ raised RpcError will also be a Call for the RPC affording the RPC's
102
+ metadata, status code, and details.
103
+ """
104
+
105
+
106
+ class StreamUnaryMultiCallable(abc.ABC):
107
+ """Enables asynchronous invocation of a client-streaming RPC."""
108
+
109
+ @abc.abstractmethod
110
+ def __call__(
111
+ self,
112
+ request_iterator: Optional[RequestIterableType] = None,
113
+ timeout: Optional[float] = None,
114
+ metadata: Optional[MetadataType] = None,
115
+ credentials: Optional[grpc.CallCredentials] = None,
116
+ wait_for_ready: Optional[bool] = None,
117
+ compression: Optional[grpc.Compression] = None,
118
+ ) -> _base_call.StreamUnaryCall:
119
+ """Asynchronously invokes the underlying RPC.
120
+
121
+ Args:
122
+ request_iterator: An optional async iterable or iterable of request
123
+ messages for the RPC.
124
+ timeout: An optional duration of time in seconds to allow
125
+ for the RPC.
126
+ metadata: Optional :term:`metadata` to be transmitted to the
127
+ service-side of the RPC.
128
+ credentials: An optional CallCredentials for the RPC. Only valid for
129
+ secure Channel.
130
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
131
+ compression: An element of grpc.compression, e.g.
132
+ grpc.compression.Gzip.
133
+
134
+ Returns:
135
+ A StreamUnaryCall object.
136
+
137
+ Raises:
138
+ RpcError: Indicates that the RPC terminated with non-OK status. The
139
+ raised RpcError will also be a Call for the RPC affording the RPC's
140
+ metadata, status code, and details.
141
+ """
142
+
143
+
144
+ class StreamStreamMultiCallable(abc.ABC):
145
+ """Enables asynchronous invocation of a bidirectional-streaming RPC."""
146
+
147
+ @abc.abstractmethod
148
+ def __call__(
149
+ self,
150
+ request_iterator: Optional[RequestIterableType] = None,
151
+ timeout: Optional[float] = None,
152
+ metadata: Optional[MetadataType] = None,
153
+ credentials: Optional[grpc.CallCredentials] = None,
154
+ wait_for_ready: Optional[bool] = None,
155
+ compression: Optional[grpc.Compression] = None,
156
+ ) -> _base_call.StreamStreamCall:
157
+ """Asynchronously invokes the underlying RPC.
158
+
159
+ Args:
160
+ request_iterator: An optional async iterable or iterable of request
161
+ messages for the RPC.
162
+ timeout: An optional duration of time in seconds to allow
163
+ for the RPC.
164
+ metadata: Optional :term:`metadata` to be transmitted to the
165
+ service-side of the RPC.
166
+ credentials: An optional CallCredentials for the RPC. Only valid for
167
+ secure Channel.
168
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
169
+ compression: An element of grpc.compression, e.g.
170
+ grpc.compression.Gzip.
171
+
172
+ Returns:
173
+ A StreamStreamCall object.
174
+
175
+ Raises:
176
+ RpcError: Indicates that the RPC terminated with non-OK status. The
177
+ raised RpcError will also be a Call for the RPC affording the RPC's
178
+ metadata, status code, and details.
179
+ """
180
+
181
+
182
+ class Channel(abc.ABC):
183
+ """Enables asynchronous RPC invocation as a client.
184
+
185
+ Channel objects implement the Asynchronous Context Manager (aka. async
186
+ with) type, although they are not supported to be entered and exited
187
+ multiple times.
188
+ """
189
+
190
+ @abc.abstractmethod
191
+ async def __aenter__(self):
192
+ """Starts an asynchronous context manager.
193
+
194
+ Returns:
195
+ Channel the channel that was instantiated.
196
+ """
197
+
198
+ @abc.abstractmethod
199
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
200
+ """Finishes the asynchronous context manager by closing the channel.
201
+
202
+ Still active RPCs will be cancelled.
203
+ """
204
+
205
+ @abc.abstractmethod
206
+ async def close(self, grace: Optional[float] = None):
207
+ """Closes this Channel and releases all resources held by it.
208
+
209
+ This method immediately stops the channel from executing new RPCs in
210
+ all cases.
211
+
212
+ If a grace period is specified, this method waits until all active
213
+ RPCs are finished or until the grace period is reached. RPCs that haven't
214
+ been terminated within the grace period are aborted.
215
+ If a grace period is not specified (by passing None for grace),
216
+ all existing RPCs are cancelled immediately.
217
+
218
+ This method is idempotent.
219
+ """
220
+
221
+ @abc.abstractmethod
222
+ def get_state(
223
+ self, try_to_connect: bool = False
224
+ ) -> grpc.ChannelConnectivity:
225
+ """Checks the connectivity state of a channel.
226
+
227
+ This is an EXPERIMENTAL API.
228
+
229
+ If the channel reaches a stable connectivity state, it is guaranteed
230
+ that the return value of this function will eventually converge to that
231
+ state.
232
+
233
+ Args:
234
+ try_to_connect: a bool indicate whether the Channel should try to
235
+ connect to peer or not.
236
+
237
+ Returns: A ChannelConnectivity object.
238
+ """
239
+
240
+ @abc.abstractmethod
241
+ async def wait_for_state_change(
242
+ self,
243
+ last_observed_state: grpc.ChannelConnectivity,
244
+ ) -> None:
245
+ """Waits for a change in connectivity state.
246
+
247
+ This is an EXPERIMENTAL API.
248
+
249
+ The function blocks until there is a change in the channel connectivity
250
+ state from the "last_observed_state". If the state is already
251
+ different, this function will return immediately.
252
+
253
+ There is an inherent race between the invocation of
254
+ "Channel.wait_for_state_change" and "Channel.get_state". The state can
255
+ change arbitrary many times during the race, so there is no way to
256
+ observe every state transition.
257
+
258
+ If there is a need to put a timeout for this function, please refer to
259
+ "asyncio.wait_for".
260
+
261
+ Args:
262
+ last_observed_state: A grpc.ChannelConnectivity object representing
263
+ the last known state.
264
+ """
265
+
266
+ @abc.abstractmethod
267
+ async def channel_ready(self) -> None:
268
+ """Creates a coroutine that blocks until the Channel is READY."""
269
+
270
+ @abc.abstractmethod
271
+ def unary_unary(
272
+ self,
273
+ method: str,
274
+ request_serializer: Optional[SerializingFunction] = None,
275
+ response_deserializer: Optional[DeserializingFunction] = None,
276
+ _registered_method: Optional[bool] = False,
277
+ ) -> UnaryUnaryMultiCallable:
278
+ """Creates a UnaryUnaryMultiCallable for a unary-unary method.
279
+
280
+ Args:
281
+ method: The name of the RPC method.
282
+ request_serializer: Optional :term:`serializer` for serializing the request
283
+ message. Request goes unserialized in case None is passed.
284
+ response_deserializer: Optional :term:`deserializer` for deserializing the
285
+ response message. Response goes undeserialized in case None
286
+ is passed.
287
+ _registered_method: Implementation Private. Optional: A bool representing
288
+ whether the method is registered.
289
+
290
+ Returns:
291
+ A UnaryUnaryMultiCallable value for the named unary-unary method.
292
+ """
293
+
294
+ @abc.abstractmethod
295
+ def unary_stream(
296
+ self,
297
+ method: str,
298
+ request_serializer: Optional[SerializingFunction] = None,
299
+ response_deserializer: Optional[DeserializingFunction] = None,
300
+ _registered_method: Optional[bool] = False,
301
+ ) -> UnaryStreamMultiCallable:
302
+ """Creates a UnaryStreamMultiCallable for a unary-stream method.
303
+
304
+ Args:
305
+ method: The name of the RPC method.
306
+ request_serializer: Optional :term:`serializer` for serializing the request
307
+ message. Request goes unserialized in case None is passed.
308
+ response_deserializer: Optional :term:`deserializer` for deserializing the
309
+ response message. Response goes undeserialized in case None
310
+ is passed.
311
+ _registered_method: Implementation Private. Optional: A bool representing
312
+ whether the method is registered.
313
+
314
+ Returns:
315
+ A UnaryStreamMultiCallable value for the named unary-stream method.
316
+ """
317
+
318
+ @abc.abstractmethod
319
+ def stream_unary(
320
+ self,
321
+ method: str,
322
+ request_serializer: Optional[SerializingFunction] = None,
323
+ response_deserializer: Optional[DeserializingFunction] = None,
324
+ _registered_method: Optional[bool] = False,
325
+ ) -> StreamUnaryMultiCallable:
326
+ """Creates a StreamUnaryMultiCallable for a stream-unary method.
327
+
328
+ Args:
329
+ method: The name of the RPC method.
330
+ request_serializer: Optional :term:`serializer` for serializing the request
331
+ message. Request goes unserialized in case None is passed.
332
+ response_deserializer: Optional :term:`deserializer` for deserializing the
333
+ response message. Response goes undeserialized in case None
334
+ is passed.
335
+ _registered_method: Implementation Private. Optional: A bool representing
336
+ whether the method is registered.
337
+
338
+ Returns:
339
+ A StreamUnaryMultiCallable value for the named stream-unary method.
340
+ """
341
+
342
+ @abc.abstractmethod
343
+ def stream_stream(
344
+ self,
345
+ method: str,
346
+ request_serializer: Optional[SerializingFunction] = None,
347
+ response_deserializer: Optional[DeserializingFunction] = None,
348
+ _registered_method: Optional[bool] = False,
349
+ ) -> StreamStreamMultiCallable:
350
+ """Creates a StreamStreamMultiCallable for a stream-stream method.
351
+
352
+ Args:
353
+ method: The name of the RPC method.
354
+ request_serializer: Optional :term:`serializer` for serializing the request
355
+ message. Request goes unserialized in case None is passed.
356
+ response_deserializer: Optional :term:`deserializer` for deserializing the
357
+ response message. Response goes undeserialized in case None
358
+ is passed.
359
+ _registered_method: Implementation Private. Optional: A bool representing
360
+ whether the method is registered.
361
+
362
+ Returns:
363
+ A StreamStreamMultiCallable value for the named stream-stream method.
364
+ """
lib/python3.10/site-packages/grpc/aio/_base_server.py ADDED
@@ -0,0 +1,385 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The gRPC Authors
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Abstract base classes for server-side classes."""
15
+
16
+ import abc
17
+ from typing import Generic, Iterable, Mapping, NoReturn, Optional, Sequence
18
+
19
+ import grpc
20
+
21
+ from ._metadata import Metadata # pylint: disable=unused-import
22
+ from ._typing import DoneCallbackType
23
+ from ._typing import MetadataType
24
+ from ._typing import RequestType
25
+ from ._typing import ResponseType
26
+
27
+
28
+ class Server(abc.ABC):
29
+ """Serves RPCs."""
30
+
31
+ @abc.abstractmethod
32
+ def add_generic_rpc_handlers(
33
+ self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]
34
+ ) -> None:
35
+ """Registers GenericRpcHandlers with this Server.
36
+
37
+ This method is only safe to call before the server is started.
38
+
39
+ Args:
40
+ generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
41
+ used to service RPCs.
42
+ """
43
+
44
+ @abc.abstractmethod
45
+ def add_insecure_port(self, address: str) -> int:
46
+ """Opens an insecure port for accepting RPCs.
47
+
48
+ A port is a communication endpoint that used by networking protocols,
49
+ like TCP and UDP. To date, we only support TCP.
50
+
51
+ This method may only be called before starting the server.
52
+
53
+ Args:
54
+ address: The address for which to open a port. If the port is 0,
55
+ or not specified in the address, then the gRPC runtime will choose a port.
56
+
57
+ Returns:
58
+ An integer port on which the server will accept RPC requests.
59
+ """
60
+
61
+ @abc.abstractmethod
62
+ def add_secure_port(
63
+ self, address: str, server_credentials: grpc.ServerCredentials
64
+ ) -> int:
65
+ """Opens a secure port for accepting RPCs.
66
+
67
+ A port is a communication endpoint that used by networking protocols,
68
+ like TCP and UDP. To date, we only support TCP.
69
+
70
+ This method may only be called before starting the server.
71
+
72
+ Args:
73
+ address: The address for which to open a port.
74
+ if the port is 0, or not specified in the address, then the gRPC
75
+ runtime will choose a port.
76
+ server_credentials: A ServerCredentials object.
77
+
78
+ Returns:
79
+ An integer port on which the server will accept RPC requests.
80
+ """
81
+
82
+ @abc.abstractmethod
83
+ async def start(self) -> None:
84
+ """Starts this Server.
85
+
86
+ This method may only be called once. (i.e. it is not idempotent).
87
+ """
88
+
89
+ @abc.abstractmethod
90
+ async def stop(self, grace: Optional[float]) -> None:
91
+ """Stops this Server.
92
+
93
+ This method immediately stops the server from servicing new RPCs in
94
+ all cases.
95
+
96
+ If a grace period is specified, this method waits until all active
97
+ RPCs are finished or until the grace period is reached. RPCs that haven't
98
+ been terminated within the grace period are aborted.
99
+ If a grace period is not specified (by passing None for grace), all
100
+ existing RPCs are aborted immediately and this method blocks until
101
+ the last RPC handler terminates.
102
+
103
+ This method is idempotent and may be called at any time. Passing a
104
+ smaller grace value in a subsequent call will have the effect of
105
+ stopping the Server sooner (passing None will have the effect of
106
+ stopping the server immediately). Passing a larger grace value in a
107
+ subsequent call will not have the effect of stopping the server later
108
+ (i.e. the most restrictive grace value is used).
109
+
110
+ Args:
111
+ grace: A duration of time in seconds or None.
112
+ """
113
+
114
+ @abc.abstractmethod
115
+ async def wait_for_termination(
116
+ self, timeout: Optional[float] = None
117
+ ) -> bool:
118
+ """Continues current coroutine once the server stops.
119
+
120
+ This is an EXPERIMENTAL API.
121
+
122
+ The wait will not consume computational resources during blocking, and
123
+ it will block until one of the two following conditions are met:
124
+
125
+ 1) The server is stopped or terminated;
126
+ 2) A timeout occurs if timeout is not `None`.
127
+
128
+ The timeout argument works in the same way as `threading.Event.wait()`.
129
+ https://docs.python.org/3/library/threading.html#threading.Event.wait
130
+
131
+ Args:
132
+ timeout: A floating point number specifying a timeout for the
133
+ operation in seconds.
134
+
135
+ Returns:
136
+ A bool indicates if the operation times out.
137
+ """
138
+
139
+ def add_registered_method_handlers(self, service_name, method_handlers):
140
+ """Registers GenericRpcHandlers with this Server.
141
+
142
+ This method is only safe to call before the server is started.
143
+
144
+ Args:
145
+ service_name: The service name.
146
+ method_handlers: A dictionary that maps method names to corresponding
147
+ RpcMethodHandler.
148
+ """
149
+
150
+
151
+ # pylint: disable=too-many-public-methods
152
+ class ServicerContext(Generic[RequestType, ResponseType], abc.ABC):
153
+ """A context object passed to method implementations."""
154
+
155
+ @abc.abstractmethod
156
+ async def read(self) -> RequestType:
157
+ """Reads one message from the RPC.
158
+
159
+ Only one read operation is allowed simultaneously.
160
+
161
+ Returns:
162
+ A response message of the RPC.
163
+
164
+ Raises:
165
+ An RpcError exception if the read failed.
166
+ """
167
+
168
+ @abc.abstractmethod
169
+ async def write(self, message: ResponseType) -> None:
170
+ """Writes one message to the RPC.
171
+
172
+ Only one write operation is allowed simultaneously.
173
+
174
+ Raises:
175
+ An RpcError exception if the write failed.
176
+ """
177
+
178
+ @abc.abstractmethod
179
+ async def send_initial_metadata(
180
+ self, initial_metadata: MetadataType
181
+ ) -> None:
182
+ """Sends the initial metadata value to the client.
183
+
184
+ This method need not be called by implementations if they have no
185
+ metadata to add to what the gRPC runtime will transmit.
186
+
187
+ Args:
188
+ initial_metadata: The initial :term:`metadata`.
189
+ """
190
+
191
+ @abc.abstractmethod
192
+ async def abort(
193
+ self,
194
+ code: grpc.StatusCode,
195
+ details: str = "",
196
+ trailing_metadata: MetadataType = tuple(),
197
+ ) -> NoReturn:
198
+ """Raises an exception to terminate the RPC with a non-OK status.
199
+
200
+ The code and details passed as arguments will supersede any existing
201
+ ones.
202
+
203
+ Args:
204
+ code: A StatusCode object to be sent to the client.
205
+ It must not be StatusCode.OK.
206
+ details: A UTF-8-encodable string to be sent to the client upon
207
+ termination of the RPC.
208
+ trailing_metadata: A sequence of tuple represents the trailing
209
+ :term:`metadata`.
210
+
211
+ Raises:
212
+ Exception: An exception is always raised to signal the abortion the
213
+ RPC to the gRPC runtime.
214
+ """
215
+
216
+ @abc.abstractmethod
217
+ def set_trailing_metadata(self, trailing_metadata: MetadataType) -> None:
218
+ """Sends the trailing metadata for the RPC.
219
+
220
+ This method need not be called by implementations if they have no
221
+ metadata to add to what the gRPC runtime will transmit.
222
+
223
+ Args:
224
+ trailing_metadata: The trailing :term:`metadata`.
225
+ """
226
+
227
+ @abc.abstractmethod
228
+ def invocation_metadata(self) -> Optional[MetadataType]:
229
+ """Accesses the metadata sent by the client.
230
+
231
+ Returns:
232
+ The invocation :term:`metadata`.
233
+ """
234
+
235
+ @abc.abstractmethod
236
+ def set_code(self, code: grpc.StatusCode) -> None:
237
+ """Sets the value to be used as status code upon RPC completion.
238
+
239
+ This method need not be called by method implementations if they wish
240
+ the gRPC runtime to determine the status code of the RPC.
241
+
242
+ Args:
243
+ code: A StatusCode object to be sent to the client.
244
+ """
245
+
246
+ @abc.abstractmethod
247
+ def set_details(self, details: str) -> None:
248
+ """Sets the value to be used the as detail string upon RPC completion.
249
+
250
+ This method need not be called by method implementations if they have
251
+ no details to transmit.
252
+
253
+ Args:
254
+ details: A UTF-8-encodable string to be sent to the client upon
255
+ termination of the RPC.
256
+ """
257
+
258
+ @abc.abstractmethod
259
+ def set_compression(self, compression: grpc.Compression) -> None:
260
+ """Set the compression algorithm to be used for the entire call.
261
+
262
+ Args:
263
+ compression: An element of grpc.compression, e.g.
264
+ grpc.compression.Gzip.
265
+ """
266
+
267
+ @abc.abstractmethod
268
+ def disable_next_message_compression(self) -> None:
269
+ """Disables compression for the next response message.
270
+
271
+ This method will override any compression configuration set during
272
+ server creation or set on the call.
273
+ """
274
+
275
+ @abc.abstractmethod
276
+ def peer(self) -> str:
277
+ """Identifies the peer that invoked the RPC being serviced.
278
+
279
+ Returns:
280
+ A string identifying the peer that invoked the RPC being serviced.
281
+ The string format is determined by gRPC runtime.
282
+ """
283
+
284
+ @abc.abstractmethod
285
+ def peer_identities(self) -> Optional[Iterable[bytes]]:
286
+ """Gets one or more peer identity(s).
287
+
288
+ Equivalent to
289
+ servicer_context.auth_context().get(servicer_context.peer_identity_key())
290
+
291
+ Returns:
292
+ An iterable of the identities, or None if the call is not
293
+ authenticated. Each identity is returned as a raw bytes type.
294
+ """
295
+
296
+ @abc.abstractmethod
297
+ def peer_identity_key(self) -> Optional[str]:
298
+ """The auth property used to identify the peer.
299
+
300
+ For example, "x509_common_name" or "x509_subject_alternative_name" are
301
+ used to identify an SSL peer.
302
+
303
+ Returns:
304
+ The auth property (string) that indicates the
305
+ peer identity, or None if the call is not authenticated.
306
+ """
307
+
308
+ @abc.abstractmethod
309
+ def auth_context(self) -> Mapping[str, Iterable[bytes]]:
310
+ """Gets the auth context for the call.
311
+
312
+ Returns:
313
+ A map of strings to an iterable of bytes for each auth property.
314
+ """
315
+
316
+ def time_remaining(self) -> float:
317
+ """Describes the length of allowed time remaining for the RPC.
318
+
319
+ Returns:
320
+ A nonnegative float indicating the length of allowed time in seconds
321
+ remaining for the RPC to complete before it is considered to have
322
+ timed out, or None if no deadline was specified for the RPC.
323
+ """
324
+
325
+ def trailing_metadata(self):
326
+ """Access value to be used as trailing metadata upon RPC completion.
327
+
328
+ This is an EXPERIMENTAL API.
329
+
330
+ Returns:
331
+ The trailing :term:`metadata` for the RPC.
332
+ """
333
+ raise NotImplementedError()
334
+
335
+ def code(self):
336
+ """Accesses the value to be used as status code upon RPC completion.
337
+
338
+ This is an EXPERIMENTAL API.
339
+
340
+ Returns:
341
+ The StatusCode value for the RPC.
342
+ """
343
+ raise NotImplementedError()
344
+
345
+ def details(self):
346
+ """Accesses the value to be used as detail string upon RPC completion.
347
+
348
+ This is an EXPERIMENTAL API.
349
+
350
+ Returns:
351
+ The details string of the RPC.
352
+ """
353
+ raise NotImplementedError()
354
+
355
+ def add_done_callback(self, callback: DoneCallbackType) -> None:
356
+ """Registers a callback to be called on RPC termination.
357
+
358
+ This is an EXPERIMENTAL API.
359
+
360
+ Args:
361
+ callback: A callable object will be called with the servicer context
362
+ object as its only argument.
363
+ """
364
+
365
+ def cancelled(self) -> bool:
366
+ """Return True if the RPC is cancelled.
367
+
368
+ The RPC is cancelled when the cancellation was requested with cancel().
369
+
370
+ This is an EXPERIMENTAL API.
371
+
372
+ Returns:
373
+ A bool indicates whether the RPC is cancelled or not.
374
+ """
375
+
376
+ def done(self) -> bool:
377
+ """Return True if the RPC is done.
378
+
379
+ An RPC is done if the RPC is completed, cancelled or aborted.
380
+
381
+ This is an EXPERIMENTAL API.
382
+
383
+ Returns:
384
+ A bool indicates if the RPC is done.
385
+ """
lib/python3.10/site-packages/grpc/aio/_channel.py ADDED
@@ -0,0 +1,627 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Invocation-side implementation of gRPC Asyncio Python."""
15
+
16
+ import asyncio
17
+ import sys
18
+ from typing import Any, Iterable, List, Optional, Sequence
19
+
20
+ import grpc
21
+ from grpc import _common
22
+ from grpc import _compression
23
+ from grpc import _grpcio_metadata
24
+ from grpc._cython import cygrpc
25
+
26
+ from . import _base_call
27
+ from . import _base_channel
28
+ from ._call import StreamStreamCall
29
+ from ._call import StreamUnaryCall
30
+ from ._call import UnaryStreamCall
31
+ from ._call import UnaryUnaryCall
32
+ from ._interceptor import ClientInterceptor
33
+ from ._interceptor import InterceptedStreamStreamCall
34
+ from ._interceptor import InterceptedStreamUnaryCall
35
+ from ._interceptor import InterceptedUnaryStreamCall
36
+ from ._interceptor import InterceptedUnaryUnaryCall
37
+ from ._interceptor import StreamStreamClientInterceptor
38
+ from ._interceptor import StreamUnaryClientInterceptor
39
+ from ._interceptor import UnaryStreamClientInterceptor
40
+ from ._interceptor import UnaryUnaryClientInterceptor
41
+ from ._metadata import Metadata
42
+ from ._typing import ChannelArgumentType
43
+ from ._typing import DeserializingFunction
44
+ from ._typing import MetadataType
45
+ from ._typing import RequestIterableType
46
+ from ._typing import RequestType
47
+ from ._typing import ResponseType
48
+ from ._typing import SerializingFunction
49
+ from ._utils import _timeout_to_deadline
50
+
51
+ _USER_AGENT = "grpc-python-asyncio/{}".format(_grpcio_metadata.__version__)
52
+
53
+ if sys.version_info[1] < 7:
54
+
55
+ def _all_tasks() -> Iterable[asyncio.Task]:
56
+ return asyncio.Task.all_tasks() # pylint: disable=no-member
57
+
58
+ else:
59
+
60
+ def _all_tasks() -> Iterable[asyncio.Task]:
61
+ return asyncio.all_tasks()
62
+
63
+
64
+ def _augment_channel_arguments(
65
+ base_options: ChannelArgumentType, compression: Optional[grpc.Compression]
66
+ ):
67
+ compression_channel_argument = _compression.create_channel_option(
68
+ compression
69
+ )
70
+ user_agent_channel_argument = (
71
+ (
72
+ cygrpc.ChannelArgKey.primary_user_agent_string,
73
+ _USER_AGENT,
74
+ ),
75
+ )
76
+ return (
77
+ tuple(base_options)
78
+ + compression_channel_argument
79
+ + user_agent_channel_argument
80
+ )
81
+
82
+
83
+ class _BaseMultiCallable:
84
+ """Base class of all multi callable objects.
85
+
86
+ Handles the initialization logic and stores common attributes.
87
+ """
88
+
89
+ _loop: asyncio.AbstractEventLoop
90
+ _channel: cygrpc.AioChannel
91
+ _method: bytes
92
+ _request_serializer: SerializingFunction
93
+ _response_deserializer: DeserializingFunction
94
+ _interceptors: Optional[Sequence[ClientInterceptor]]
95
+ _references: List[Any]
96
+ _loop: asyncio.AbstractEventLoop
97
+
98
+ # pylint: disable=too-many-arguments
99
+ def __init__(
100
+ self,
101
+ channel: cygrpc.AioChannel,
102
+ method: bytes,
103
+ request_serializer: SerializingFunction,
104
+ response_deserializer: DeserializingFunction,
105
+ interceptors: Optional[Sequence[ClientInterceptor]],
106
+ references: List[Any],
107
+ loop: asyncio.AbstractEventLoop,
108
+ ) -> None:
109
+ self._loop = loop
110
+ self._channel = channel
111
+ self._method = method
112
+ self._request_serializer = request_serializer
113
+ self._response_deserializer = response_deserializer
114
+ self._interceptors = interceptors
115
+ self._references = references
116
+
117
+ @staticmethod
118
+ def _init_metadata(
119
+ metadata: Optional[MetadataType] = None,
120
+ compression: Optional[grpc.Compression] = None,
121
+ ) -> Metadata:
122
+ """Based on the provided values for <metadata> or <compression> initialise the final
123
+ metadata, as it should be used for the current call.
124
+ """
125
+ metadata = metadata or Metadata()
126
+ if not isinstance(metadata, Metadata) and isinstance(metadata, tuple):
127
+ metadata = Metadata.from_tuple(metadata)
128
+ if compression:
129
+ metadata = Metadata(
130
+ *_compression.augment_metadata(metadata, compression)
131
+ )
132
+ return metadata
133
+
134
+
135
+ class UnaryUnaryMultiCallable(
136
+ _BaseMultiCallable, _base_channel.UnaryUnaryMultiCallable
137
+ ):
138
+ def __call__(
139
+ self,
140
+ request: RequestType,
141
+ *,
142
+ timeout: Optional[float] = None,
143
+ metadata: Optional[MetadataType] = None,
144
+ credentials: Optional[grpc.CallCredentials] = None,
145
+ wait_for_ready: Optional[bool] = None,
146
+ compression: Optional[grpc.Compression] = None,
147
+ ) -> _base_call.UnaryUnaryCall[RequestType, ResponseType]:
148
+ metadata = self._init_metadata(metadata, compression)
149
+ if not self._interceptors:
150
+ call = UnaryUnaryCall(
151
+ request,
152
+ _timeout_to_deadline(timeout),
153
+ metadata,
154
+ credentials,
155
+ wait_for_ready,
156
+ self._channel,
157
+ self._method,
158
+ self._request_serializer,
159
+ self._response_deserializer,
160
+ self._loop,
161
+ )
162
+ else:
163
+ call = InterceptedUnaryUnaryCall(
164
+ self._interceptors,
165
+ request,
166
+ timeout,
167
+ metadata,
168
+ credentials,
169
+ wait_for_ready,
170
+ self._channel,
171
+ self._method,
172
+ self._request_serializer,
173
+ self._response_deserializer,
174
+ self._loop,
175
+ )
176
+
177
+ return call
178
+
179
+
180
+ class UnaryStreamMultiCallable(
181
+ _BaseMultiCallable, _base_channel.UnaryStreamMultiCallable
182
+ ):
183
+ def __call__(
184
+ self,
185
+ request: RequestType,
186
+ *,
187
+ timeout: Optional[float] = None,
188
+ metadata: Optional[MetadataType] = None,
189
+ credentials: Optional[grpc.CallCredentials] = None,
190
+ wait_for_ready: Optional[bool] = None,
191
+ compression: Optional[grpc.Compression] = None,
192
+ ) -> _base_call.UnaryStreamCall[RequestType, ResponseType]:
193
+ metadata = self._init_metadata(metadata, compression)
194
+
195
+ if not self._interceptors:
196
+ call = UnaryStreamCall(
197
+ request,
198
+ _timeout_to_deadline(timeout),
199
+ metadata,
200
+ credentials,
201
+ wait_for_ready,
202
+ self._channel,
203
+ self._method,
204
+ self._request_serializer,
205
+ self._response_deserializer,
206
+ self._loop,
207
+ )
208
+ else:
209
+ call = InterceptedUnaryStreamCall(
210
+ self._interceptors,
211
+ request,
212
+ timeout,
213
+ metadata,
214
+ credentials,
215
+ wait_for_ready,
216
+ self._channel,
217
+ self._method,
218
+ self._request_serializer,
219
+ self._response_deserializer,
220
+ self._loop,
221
+ )
222
+
223
+ return call
224
+
225
+
226
+ class StreamUnaryMultiCallable(
227
+ _BaseMultiCallable, _base_channel.StreamUnaryMultiCallable
228
+ ):
229
+ def __call__(
230
+ self,
231
+ request_iterator: Optional[RequestIterableType] = None,
232
+ timeout: Optional[float] = None,
233
+ metadata: Optional[MetadataType] = None,
234
+ credentials: Optional[grpc.CallCredentials] = None,
235
+ wait_for_ready: Optional[bool] = None,
236
+ compression: Optional[grpc.Compression] = None,
237
+ ) -> _base_call.StreamUnaryCall:
238
+ metadata = self._init_metadata(metadata, compression)
239
+
240
+ if not self._interceptors:
241
+ call = StreamUnaryCall(
242
+ request_iterator,
243
+ _timeout_to_deadline(timeout),
244
+ metadata,
245
+ credentials,
246
+ wait_for_ready,
247
+ self._channel,
248
+ self._method,
249
+ self._request_serializer,
250
+ self._response_deserializer,
251
+ self._loop,
252
+ )
253
+ else:
254
+ call = InterceptedStreamUnaryCall(
255
+ self._interceptors,
256
+ request_iterator,
257
+ timeout,
258
+ metadata,
259
+ credentials,
260
+ wait_for_ready,
261
+ self._channel,
262
+ self._method,
263
+ self._request_serializer,
264
+ self._response_deserializer,
265
+ self._loop,
266
+ )
267
+
268
+ return call
269
+
270
+
271
+ class StreamStreamMultiCallable(
272
+ _BaseMultiCallable, _base_channel.StreamStreamMultiCallable
273
+ ):
274
+ def __call__(
275
+ self,
276
+ request_iterator: Optional[RequestIterableType] = None,
277
+ timeout: Optional[float] = None,
278
+ metadata: Optional[MetadataType] = None,
279
+ credentials: Optional[grpc.CallCredentials] = None,
280
+ wait_for_ready: Optional[bool] = None,
281
+ compression: Optional[grpc.Compression] = None,
282
+ ) -> _base_call.StreamStreamCall:
283
+ metadata = self._init_metadata(metadata, compression)
284
+
285
+ if not self._interceptors:
286
+ call = StreamStreamCall(
287
+ request_iterator,
288
+ _timeout_to_deadline(timeout),
289
+ metadata,
290
+ credentials,
291
+ wait_for_ready,
292
+ self._channel,
293
+ self._method,
294
+ self._request_serializer,
295
+ self._response_deserializer,
296
+ self._loop,
297
+ )
298
+ else:
299
+ call = InterceptedStreamStreamCall(
300
+ self._interceptors,
301
+ request_iterator,
302
+ timeout,
303
+ metadata,
304
+ credentials,
305
+ wait_for_ready,
306
+ self._channel,
307
+ self._method,
308
+ self._request_serializer,
309
+ self._response_deserializer,
310
+ self._loop,
311
+ )
312
+
313
+ return call
314
+
315
+
316
+ class Channel(_base_channel.Channel):
317
+ _loop: asyncio.AbstractEventLoop
318
+ _channel: cygrpc.AioChannel
319
+ _unary_unary_interceptors: List[UnaryUnaryClientInterceptor]
320
+ _unary_stream_interceptors: List[UnaryStreamClientInterceptor]
321
+ _stream_unary_interceptors: List[StreamUnaryClientInterceptor]
322
+ _stream_stream_interceptors: List[StreamStreamClientInterceptor]
323
+
324
+ def __init__(
325
+ self,
326
+ target: str,
327
+ options: ChannelArgumentType,
328
+ credentials: Optional[grpc.ChannelCredentials],
329
+ compression: Optional[grpc.Compression],
330
+ interceptors: Optional[Sequence[ClientInterceptor]],
331
+ ):
332
+ """Constructor.
333
+
334
+ Args:
335
+ target: The target to which to connect.
336
+ options: Configuration options for the channel.
337
+ credentials: A cygrpc.ChannelCredentials or None.
338
+ compression: An optional value indicating the compression method to be
339
+ used over the lifetime of the channel.
340
+ interceptors: An optional list of interceptors that would be used for
341
+ intercepting any RPC executed with that channel.
342
+ """
343
+ self._unary_unary_interceptors = []
344
+ self._unary_stream_interceptors = []
345
+ self._stream_unary_interceptors = []
346
+ self._stream_stream_interceptors = []
347
+
348
+ if interceptors is not None:
349
+ for interceptor in interceptors:
350
+ if isinstance(interceptor, UnaryUnaryClientInterceptor):
351
+ self._unary_unary_interceptors.append(interceptor)
352
+ elif isinstance(interceptor, UnaryStreamClientInterceptor):
353
+ self._unary_stream_interceptors.append(interceptor)
354
+ elif isinstance(interceptor, StreamUnaryClientInterceptor):
355
+ self._stream_unary_interceptors.append(interceptor)
356
+ elif isinstance(interceptor, StreamStreamClientInterceptor):
357
+ self._stream_stream_interceptors.append(interceptor)
358
+ else:
359
+ raise ValueError(
360
+ "Interceptor {} must be ".format(interceptor)
361
+ + "{} or ".format(UnaryUnaryClientInterceptor.__name__)
362
+ + "{} or ".format(UnaryStreamClientInterceptor.__name__)
363
+ + "{} or ".format(StreamUnaryClientInterceptor.__name__)
364
+ + "{}. ".format(StreamStreamClientInterceptor.__name__)
365
+ )
366
+
367
+ self._loop = cygrpc.get_working_loop()
368
+ self._channel = cygrpc.AioChannel(
369
+ _common.encode(target),
370
+ _augment_channel_arguments(options, compression),
371
+ credentials,
372
+ self._loop,
373
+ )
374
+
375
+ async def __aenter__(self):
376
+ return self
377
+
378
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
379
+ await self._close(None)
380
+
381
+ async def _close(self, grace): # pylint: disable=too-many-branches
382
+ if self._channel.closed():
383
+ return
384
+
385
+ # No new calls will be accepted by the Cython channel.
386
+ self._channel.closing()
387
+
388
+ # Iterate through running tasks
389
+ tasks = _all_tasks()
390
+ calls = []
391
+ call_tasks = []
392
+ for task in tasks:
393
+ try:
394
+ stack = task.get_stack(limit=1)
395
+ except AttributeError as attribute_error:
396
+ # NOTE(lidiz) tl;dr: If the Task is created with a CPython
397
+ # object, it will trigger AttributeError.
398
+ #
399
+ # In the global finalizer, the event loop schedules
400
+ # a CPython PyAsyncGenAThrow object.
401
+ # https://github.com/python/cpython/blob/00e45877e33d32bb61aa13a2033e3bba370bda4d/Lib/asyncio/base_events.py#L484
402
+ #
403
+ # However, the PyAsyncGenAThrow object is written in C and
404
+ # failed to include the normal Python frame objects. Hence,
405
+ # this exception is a false negative, and it is safe to ignore
406
+ # the failure. It is fixed by https://github.com/python/cpython/pull/18669,
407
+ # but not available until 3.9 or 3.8.3. So, we have to keep it
408
+ # for a while.
409
+ # TODO(lidiz) drop this hack after 3.8 deprecation
410
+ if "frame" in str(attribute_error):
411
+ continue
412
+ else:
413
+ raise
414
+
415
+ # If the Task is created by a C-extension, the stack will be empty.
416
+ if not stack:
417
+ continue
418
+
419
+ # Locate ones created by `aio.Call`.
420
+ frame = stack[0]
421
+ candidate = frame.f_locals.get("self")
422
+ # Explicitly check for a non-null candidate instead of the more pythonic 'if candidate:'
423
+ # because doing 'if candidate:' assumes that the coroutine implements '__bool__' which
424
+ # might not always be the case.
425
+ if candidate is not None:
426
+ if isinstance(candidate, _base_call.Call):
427
+ if hasattr(candidate, "_channel"):
428
+ # For intercepted Call object
429
+ if candidate._channel is not self._channel:
430
+ continue
431
+ elif hasattr(candidate, "_cython_call"):
432
+ # For normal Call object
433
+ if candidate._cython_call._channel is not self._channel:
434
+ continue
435
+ else:
436
+ # Unidentified Call object
437
+ raise cygrpc.InternalError(
438
+ f"Unrecognized call object: {candidate}"
439
+ )
440
+
441
+ calls.append(candidate)
442
+ call_tasks.append(task)
443
+
444
+ # If needed, try to wait for them to finish.
445
+ # Call objects are not always awaitables.
446
+ if grace and call_tasks:
447
+ await asyncio.wait(call_tasks, timeout=grace)
448
+
449
+ # Time to cancel existing calls.
450
+ for call in calls:
451
+ call.cancel()
452
+
453
+ # Destroy the channel
454
+ self._channel.close()
455
+
456
+ async def close(self, grace: Optional[float] = None):
457
+ await self._close(grace)
458
+
459
+ def __del__(self):
460
+ if hasattr(self, "_channel"):
461
+ if not self._channel.closed():
462
+ self._channel.close()
463
+
464
+ def get_state(
465
+ self, try_to_connect: bool = False
466
+ ) -> grpc.ChannelConnectivity:
467
+ result = self._channel.check_connectivity_state(try_to_connect)
468
+ return _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[result]
469
+
470
+ async def wait_for_state_change(
471
+ self,
472
+ last_observed_state: grpc.ChannelConnectivity,
473
+ ) -> None:
474
+ assert await self._channel.watch_connectivity_state(
475
+ last_observed_state.value[0], None
476
+ )
477
+
478
+ async def channel_ready(self) -> None:
479
+ state = self.get_state(try_to_connect=True)
480
+ while state != grpc.ChannelConnectivity.READY:
481
+ await self.wait_for_state_change(state)
482
+ state = self.get_state(try_to_connect=True)
483
+
484
+ # TODO(xuanwn): Implement this method after we have
485
+ # observability for Asyncio.
486
+ def _get_registered_call_handle(self, method: str) -> int:
487
+ pass
488
+
489
+ # TODO(xuanwn): Implement _registered_method after we have
490
+ # observability for Asyncio.
491
+ # pylint: disable=arguments-differ,unused-argument
492
+ def unary_unary(
493
+ self,
494
+ method: str,
495
+ request_serializer: Optional[SerializingFunction] = None,
496
+ response_deserializer: Optional[DeserializingFunction] = None,
497
+ _registered_method: Optional[bool] = False,
498
+ ) -> UnaryUnaryMultiCallable:
499
+ return UnaryUnaryMultiCallable(
500
+ self._channel,
501
+ _common.encode(method),
502
+ request_serializer,
503
+ response_deserializer,
504
+ self._unary_unary_interceptors,
505
+ [self],
506
+ self._loop,
507
+ )
508
+
509
+ # TODO(xuanwn): Implement _registered_method after we have
510
+ # observability for Asyncio.
511
+ # pylint: disable=arguments-differ,unused-argument
512
+ def unary_stream(
513
+ self,
514
+ method: str,
515
+ request_serializer: Optional[SerializingFunction] = None,
516
+ response_deserializer: Optional[DeserializingFunction] = None,
517
+ _registered_method: Optional[bool] = False,
518
+ ) -> UnaryStreamMultiCallable:
519
+ return UnaryStreamMultiCallable(
520
+ self._channel,
521
+ _common.encode(method),
522
+ request_serializer,
523
+ response_deserializer,
524
+ self._unary_stream_interceptors,
525
+ [self],
526
+ self._loop,
527
+ )
528
+
529
+ # TODO(xuanwn): Implement _registered_method after we have
530
+ # observability for Asyncio.
531
+ # pylint: disable=arguments-differ,unused-argument
532
+ def stream_unary(
533
+ self,
534
+ method: str,
535
+ request_serializer: Optional[SerializingFunction] = None,
536
+ response_deserializer: Optional[DeserializingFunction] = None,
537
+ _registered_method: Optional[bool] = False,
538
+ ) -> StreamUnaryMultiCallable:
539
+ return StreamUnaryMultiCallable(
540
+ self._channel,
541
+ _common.encode(method),
542
+ request_serializer,
543
+ response_deserializer,
544
+ self._stream_unary_interceptors,
545
+ [self],
546
+ self._loop,
547
+ )
548
+
549
+ # TODO(xuanwn): Implement _registered_method after we have
550
+ # observability for Asyncio.
551
+ # pylint: disable=arguments-differ,unused-argument
552
+ def stream_stream(
553
+ self,
554
+ method: str,
555
+ request_serializer: Optional[SerializingFunction] = None,
556
+ response_deserializer: Optional[DeserializingFunction] = None,
557
+ _registered_method: Optional[bool] = False,
558
+ ) -> StreamStreamMultiCallable:
559
+ return StreamStreamMultiCallable(
560
+ self._channel,
561
+ _common.encode(method),
562
+ request_serializer,
563
+ response_deserializer,
564
+ self._stream_stream_interceptors,
565
+ [self],
566
+ self._loop,
567
+ )
568
+
569
+
570
+ def insecure_channel(
571
+ target: str,
572
+ options: Optional[ChannelArgumentType] = None,
573
+ compression: Optional[grpc.Compression] = None,
574
+ interceptors: Optional[Sequence[ClientInterceptor]] = None,
575
+ ):
576
+ """Creates an insecure asynchronous Channel to a server.
577
+
578
+ Args:
579
+ target: The server address
580
+ options: An optional list of key-value pairs (:term:`channel_arguments`
581
+ in gRPC Core runtime) to configure the channel.
582
+ compression: An optional value indicating the compression method to be
583
+ used over the lifetime of the channel.
584
+ interceptors: An optional sequence of interceptors that will be executed for
585
+ any call executed with this channel.
586
+
587
+ Returns:
588
+ A Channel.
589
+ """
590
+ return Channel(
591
+ target,
592
+ () if options is None else options,
593
+ None,
594
+ compression,
595
+ interceptors,
596
+ )
597
+
598
+
599
+ def secure_channel(
600
+ target: str,
601
+ credentials: grpc.ChannelCredentials,
602
+ options: Optional[ChannelArgumentType] = None,
603
+ compression: Optional[grpc.Compression] = None,
604
+ interceptors: Optional[Sequence[ClientInterceptor]] = None,
605
+ ):
606
+ """Creates a secure asynchronous Channel to a server.
607
+
608
+ Args:
609
+ target: The server address.
610
+ credentials: A ChannelCredentials instance.
611
+ options: An optional list of key-value pairs (:term:`channel_arguments`
612
+ in gRPC Core runtime) to configure the channel.
613
+ compression: An optional value indicating the compression method to be
614
+ used over the lifetime of the channel.
615
+ interceptors: An optional sequence of interceptors that will be executed for
616
+ any call executed with this channel.
617
+
618
+ Returns:
619
+ An aio.Channel.
620
+ """
621
+ return Channel(
622
+ target,
623
+ () if options is None else options,
624
+ credentials._credentials,
625
+ compression,
626
+ interceptors,
627
+ )
lib/python3.10/site-packages/grpc/aio/_interceptor.py ADDED
@@ -0,0 +1,1178 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Interceptors implementation of gRPC Asyncio Python."""
15
+ from abc import ABCMeta
16
+ from abc import abstractmethod
17
+ import asyncio
18
+ import collections
19
+ import functools
20
+ from typing import (
21
+ AsyncIterable,
22
+ Awaitable,
23
+ Callable,
24
+ Iterator,
25
+ List,
26
+ Optional,
27
+ Sequence,
28
+ Union,
29
+ )
30
+
31
+ import grpc
32
+ from grpc._cython import cygrpc
33
+
34
+ from . import _base_call
35
+ from ._call import AioRpcError
36
+ from ._call import StreamStreamCall
37
+ from ._call import StreamUnaryCall
38
+ from ._call import UnaryStreamCall
39
+ from ._call import UnaryUnaryCall
40
+ from ._call import _API_STYLE_ERROR
41
+ from ._call import _RPC_ALREADY_FINISHED_DETAILS
42
+ from ._call import _RPC_HALF_CLOSED_DETAILS
43
+ from ._metadata import Metadata
44
+ from ._typing import DeserializingFunction
45
+ from ._typing import DoneCallbackType
46
+ from ._typing import EOFType
47
+ from ._typing import RequestIterableType
48
+ from ._typing import RequestType
49
+ from ._typing import ResponseIterableType
50
+ from ._typing import ResponseType
51
+ from ._typing import SerializingFunction
52
+ from ._utils import _timeout_to_deadline
53
+
54
+ _LOCAL_CANCELLATION_DETAILS = "Locally cancelled by application!"
55
+
56
+
57
+ class ServerInterceptor(metaclass=ABCMeta):
58
+ """Affords intercepting incoming RPCs on the service-side.
59
+
60
+ This is an EXPERIMENTAL API.
61
+ """
62
+
63
+ @abstractmethod
64
+ async def intercept_service(
65
+ self,
66
+ continuation: Callable[
67
+ [grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
68
+ ],
69
+ handler_call_details: grpc.HandlerCallDetails,
70
+ ) -> grpc.RpcMethodHandler:
71
+ """Intercepts incoming RPCs before handing them over to a handler.
72
+
73
+ State can be passed from an interceptor to downstream interceptors
74
+ via contextvars. The first interceptor is called from an empty
75
+ contextvars.Context, and the same Context is used for downstream
76
+ interceptors and for the final handler call. Note that there are no
77
+ guarantees that interceptors and handlers will be called from the
78
+ same thread.
79
+
80
+ Args:
81
+ continuation: A function that takes a HandlerCallDetails and
82
+ proceeds to invoke the next interceptor in the chain, if any,
83
+ or the RPC handler lookup logic, with the call details passed
84
+ as an argument, and returns an RpcMethodHandler instance if
85
+ the RPC is considered serviced, or None otherwise.
86
+ handler_call_details: A HandlerCallDetails describing the RPC.
87
+
88
+ Returns:
89
+ An RpcMethodHandler with which the RPC may be serviced if the
90
+ interceptor chooses to service this RPC, or None otherwise.
91
+ """
92
+
93
+
94
+ class ClientCallDetails(
95
+ collections.namedtuple(
96
+ "ClientCallDetails",
97
+ ("method", "timeout", "metadata", "credentials", "wait_for_ready"),
98
+ ),
99
+ grpc.ClientCallDetails,
100
+ ):
101
+ """Describes an RPC to be invoked.
102
+
103
+ This is an EXPERIMENTAL API.
104
+
105
+ Args:
106
+ method: The method name of the RPC.
107
+ timeout: An optional duration of time in seconds to allow for the RPC.
108
+ metadata: Optional metadata to be transmitted to the service-side of
109
+ the RPC.
110
+ credentials: An optional CallCredentials for the RPC.
111
+ wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
112
+ """
113
+
114
+ method: str
115
+ timeout: Optional[float]
116
+ metadata: Optional[Metadata]
117
+ credentials: Optional[grpc.CallCredentials]
118
+ wait_for_ready: Optional[bool]
119
+
120
+
121
+ class ClientInterceptor(metaclass=ABCMeta):
122
+ """Base class used for all Aio Client Interceptor classes"""
123
+
124
+
125
+ class UnaryUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
126
+ """Affords intercepting unary-unary invocations."""
127
+
128
+ @abstractmethod
129
+ async def intercept_unary_unary(
130
+ self,
131
+ continuation: Callable[
132
+ [ClientCallDetails, RequestType], UnaryUnaryCall
133
+ ],
134
+ client_call_details: ClientCallDetails,
135
+ request: RequestType,
136
+ ) -> Union[UnaryUnaryCall, ResponseType]:
137
+ """Intercepts a unary-unary invocation asynchronously.
138
+
139
+ Args:
140
+ continuation: A coroutine that proceeds with the invocation by
141
+ executing the next interceptor in the chain or invoking the
142
+ actual RPC on the underlying Channel. It is the interceptor's
143
+ responsibility to call it if it decides to move the RPC forward.
144
+ The interceptor can use
145
+ `call = await continuation(client_call_details, request)`
146
+ to continue with the RPC. `continuation` returns the call to the
147
+ RPC.
148
+ client_call_details: A ClientCallDetails object describing the
149
+ outgoing RPC.
150
+ request: The request value for the RPC.
151
+
152
+ Returns:
153
+ An object with the RPC response.
154
+
155
+ Raises:
156
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
157
+ asyncio.CancelledError: Indicating that the RPC was canceled.
158
+ """
159
+
160
+
161
+ class UnaryStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
162
+ """Affords intercepting unary-stream invocations."""
163
+
164
+ @abstractmethod
165
+ async def intercept_unary_stream(
166
+ self,
167
+ continuation: Callable[
168
+ [ClientCallDetails, RequestType], UnaryStreamCall
169
+ ],
170
+ client_call_details: ClientCallDetails,
171
+ request: RequestType,
172
+ ) -> Union[ResponseIterableType, UnaryStreamCall]:
173
+ """Intercepts a unary-stream invocation asynchronously.
174
+
175
+ The function could return the call object or an asynchronous
176
+ iterator, in case of being an asyncrhonous iterator this will
177
+ become the source of the reads done by the caller.
178
+
179
+ Args:
180
+ continuation: A coroutine that proceeds with the invocation by
181
+ executing the next interceptor in the chain or invoking the
182
+ actual RPC on the underlying Channel. It is the interceptor's
183
+ responsibility to call it if it decides to move the RPC forward.
184
+ The interceptor can use
185
+ `call = await continuation(client_call_details, request)`
186
+ to continue with the RPC. `continuation` returns the call to the
187
+ RPC.
188
+ client_call_details: A ClientCallDetails object describing the
189
+ outgoing RPC.
190
+ request: The request value for the RPC.
191
+
192
+ Returns:
193
+ The RPC Call or an asynchronous iterator.
194
+
195
+ Raises:
196
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
197
+ asyncio.CancelledError: Indicating that the RPC was canceled.
198
+ """
199
+
200
+
201
+ class StreamUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
202
+ """Affords intercepting stream-unary invocations."""
203
+
204
+ @abstractmethod
205
+ async def intercept_stream_unary(
206
+ self,
207
+ continuation: Callable[
208
+ [ClientCallDetails, RequestType], StreamUnaryCall
209
+ ],
210
+ client_call_details: ClientCallDetails,
211
+ request_iterator: RequestIterableType,
212
+ ) -> StreamUnaryCall:
213
+ """Intercepts a stream-unary invocation asynchronously.
214
+
215
+ Within the interceptor the usage of the call methods like `write` or
216
+ even awaiting the call should be done carefully, since the caller
217
+ could be expecting an untouched call, for example for start writing
218
+ messages to it.
219
+
220
+ Args:
221
+ continuation: A coroutine that proceeds with the invocation by
222
+ executing the next interceptor in the chain or invoking the
223
+ actual RPC on the underlying Channel. It is the interceptor's
224
+ responsibility to call it if it decides to move the RPC forward.
225
+ The interceptor can use
226
+ `call = await continuation(client_call_details, request_iterator)`
227
+ to continue with the RPC. `continuation` returns the call to the
228
+ RPC.
229
+ client_call_details: A ClientCallDetails object describing the
230
+ outgoing RPC.
231
+ request_iterator: The request iterator that will produce requests
232
+ for the RPC.
233
+
234
+ Returns:
235
+ The RPC Call.
236
+
237
+ Raises:
238
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
239
+ asyncio.CancelledError: Indicating that the RPC was canceled.
240
+ """
241
+
242
+
243
+ class StreamStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta):
244
+ """Affords intercepting stream-stream invocations."""
245
+
246
+ @abstractmethod
247
+ async def intercept_stream_stream(
248
+ self,
249
+ continuation: Callable[
250
+ [ClientCallDetails, RequestType], StreamStreamCall
251
+ ],
252
+ client_call_details: ClientCallDetails,
253
+ request_iterator: RequestIterableType,
254
+ ) -> Union[ResponseIterableType, StreamStreamCall]:
255
+ """Intercepts a stream-stream invocation asynchronously.
256
+
257
+ Within the interceptor the usage of the call methods like `write` or
258
+ even awaiting the call should be done carefully, since the caller
259
+ could be expecting an untouched call, for example for start writing
260
+ messages to it.
261
+
262
+ The function could return the call object or an asynchronous
263
+ iterator, in case of being an asyncrhonous iterator this will
264
+ become the source of the reads done by the caller.
265
+
266
+ Args:
267
+ continuation: A coroutine that proceeds with the invocation by
268
+ executing the next interceptor in the chain or invoking the
269
+ actual RPC on the underlying Channel. It is the interceptor's
270
+ responsibility to call it if it decides to move the RPC forward.
271
+ The interceptor can use
272
+ `call = await continuation(client_call_details, request_iterator)`
273
+ to continue with the RPC. `continuation` returns the call to the
274
+ RPC.
275
+ client_call_details: A ClientCallDetails object describing the
276
+ outgoing RPC.
277
+ request_iterator: The request iterator that will produce requests
278
+ for the RPC.
279
+
280
+ Returns:
281
+ The RPC Call or an asynchronous iterator.
282
+
283
+ Raises:
284
+ AioRpcError: Indicating that the RPC terminated with non-OK status.
285
+ asyncio.CancelledError: Indicating that the RPC was canceled.
286
+ """
287
+
288
+
289
+ class InterceptedCall:
290
+ """Base implementation for all intercepted call arities.
291
+
292
+ Interceptors might have some work to do before the RPC invocation with
293
+ the capacity of changing the invocation parameters, and some work to do
294
+ after the RPC invocation with the capacity for accessing to the wrapped
295
+ `UnaryUnaryCall`.
296
+
297
+ It handles also early and later cancellations, when the RPC has not even
298
+ started and the execution is still held by the interceptors or when the
299
+ RPC has finished but again the execution is still held by the interceptors.
300
+
301
+ Once the RPC is finally executed, all methods are finally done against the
302
+ intercepted call, being at the same time the same call returned to the
303
+ interceptors.
304
+
305
+ As a base class for all of the interceptors implements the logic around
306
+ final status, metadata and cancellation.
307
+ """
308
+
309
+ _interceptors_task: asyncio.Task
310
+ _pending_add_done_callbacks: Sequence[DoneCallbackType]
311
+
312
+ def __init__(self, interceptors_task: asyncio.Task) -> None:
313
+ self._interceptors_task = interceptors_task
314
+ self._pending_add_done_callbacks = []
315
+ self._interceptors_task.add_done_callback(
316
+ self._fire_or_add_pending_done_callbacks
317
+ )
318
+
319
+ def __del__(self):
320
+ self.cancel()
321
+
322
+ def _fire_or_add_pending_done_callbacks(
323
+ self, interceptors_task: asyncio.Task
324
+ ) -> None:
325
+ if not self._pending_add_done_callbacks:
326
+ return
327
+
328
+ call_completed = False
329
+
330
+ try:
331
+ call = interceptors_task.result()
332
+ if call.done():
333
+ call_completed = True
334
+ except (AioRpcError, asyncio.CancelledError):
335
+ call_completed = True
336
+
337
+ if call_completed:
338
+ for callback in self._pending_add_done_callbacks:
339
+ callback(self)
340
+ else:
341
+ for callback in self._pending_add_done_callbacks:
342
+ callback = functools.partial(
343
+ self._wrap_add_done_callback, callback
344
+ )
345
+ call.add_done_callback(callback)
346
+
347
+ self._pending_add_done_callbacks = []
348
+
349
+ def _wrap_add_done_callback(
350
+ self, callback: DoneCallbackType, unused_call: _base_call.Call
351
+ ) -> None:
352
+ callback(self)
353
+
354
+ def cancel(self) -> bool:
355
+ if not self._interceptors_task.done():
356
+ # There is no yet the intercepted call available,
357
+ # Trying to cancel it by using the generic Asyncio
358
+ # cancellation method.
359
+ return self._interceptors_task.cancel()
360
+
361
+ try:
362
+ call = self._interceptors_task.result()
363
+ except AioRpcError:
364
+ return False
365
+ except asyncio.CancelledError:
366
+ return False
367
+
368
+ return call.cancel()
369
+
370
+ def cancelled(self) -> bool:
371
+ if not self._interceptors_task.done():
372
+ return False
373
+
374
+ try:
375
+ call = self._interceptors_task.result()
376
+ except AioRpcError as err:
377
+ return err.code() == grpc.StatusCode.CANCELLED
378
+ except asyncio.CancelledError:
379
+ return True
380
+
381
+ return call.cancelled()
382
+
383
+ def done(self) -> bool:
384
+ if not self._interceptors_task.done():
385
+ return False
386
+
387
+ try:
388
+ call = self._interceptors_task.result()
389
+ except (AioRpcError, asyncio.CancelledError):
390
+ return True
391
+
392
+ return call.done()
393
+
394
+ def add_done_callback(self, callback: DoneCallbackType) -> None:
395
+ if not self._interceptors_task.done():
396
+ self._pending_add_done_callbacks.append(callback)
397
+ return
398
+
399
+ try:
400
+ call = self._interceptors_task.result()
401
+ except (AioRpcError, asyncio.CancelledError):
402
+ callback(self)
403
+ return
404
+
405
+ if call.done():
406
+ callback(self)
407
+ else:
408
+ callback = functools.partial(self._wrap_add_done_callback, callback)
409
+ call.add_done_callback(callback)
410
+
411
+ def time_remaining(self) -> Optional[float]:
412
+ raise NotImplementedError()
413
+
414
+ async def initial_metadata(self) -> Optional[Metadata]:
415
+ try:
416
+ call = await self._interceptors_task
417
+ except AioRpcError as err:
418
+ return err.initial_metadata()
419
+ except asyncio.CancelledError:
420
+ return None
421
+
422
+ return await call.initial_metadata()
423
+
424
+ async def trailing_metadata(self) -> Optional[Metadata]:
425
+ try:
426
+ call = await self._interceptors_task
427
+ except AioRpcError as err:
428
+ return err.trailing_metadata()
429
+ except asyncio.CancelledError:
430
+ return None
431
+
432
+ return await call.trailing_metadata()
433
+
434
+ async def code(self) -> grpc.StatusCode:
435
+ try:
436
+ call = await self._interceptors_task
437
+ except AioRpcError as err:
438
+ return err.code()
439
+ except asyncio.CancelledError:
440
+ return grpc.StatusCode.CANCELLED
441
+
442
+ return await call.code()
443
+
444
+ async def details(self) -> str:
445
+ try:
446
+ call = await self._interceptors_task
447
+ except AioRpcError as err:
448
+ return err.details()
449
+ except asyncio.CancelledError:
450
+ return _LOCAL_CANCELLATION_DETAILS
451
+
452
+ return await call.details()
453
+
454
+ async def debug_error_string(self) -> Optional[str]:
455
+ try:
456
+ call = await self._interceptors_task
457
+ except AioRpcError as err:
458
+ return err.debug_error_string()
459
+ except asyncio.CancelledError:
460
+ return ""
461
+
462
+ return await call.debug_error_string()
463
+
464
+ async def wait_for_connection(self) -> None:
465
+ call = await self._interceptors_task
466
+ return await call.wait_for_connection()
467
+
468
+
469
+ class _InterceptedUnaryResponseMixin:
470
+ def __await__(self):
471
+ call = yield from self._interceptors_task.__await__()
472
+ response = yield from call.__await__()
473
+ return response
474
+
475
+
476
+ class _InterceptedStreamResponseMixin:
477
+ _response_aiter: Optional[AsyncIterable[ResponseType]]
478
+
479
+ def _init_stream_response_mixin(self) -> None:
480
+ # Is initialized later, otherwise if the iterator is not finally
481
+ # consumed a logging warning is emitted by Asyncio.
482
+ self._response_aiter = None
483
+
484
+ async def _wait_for_interceptor_task_response_iterator(
485
+ self,
486
+ ) -> ResponseType:
487
+ call = await self._interceptors_task
488
+ async for response in call:
489
+ yield response
490
+
491
+ def __aiter__(self) -> AsyncIterable[ResponseType]:
492
+ if self._response_aiter is None:
493
+ self._response_aiter = (
494
+ self._wait_for_interceptor_task_response_iterator()
495
+ )
496
+ return self._response_aiter
497
+
498
+ async def read(self) -> Union[EOFType, ResponseType]:
499
+ if self._response_aiter is None:
500
+ self._response_aiter = (
501
+ self._wait_for_interceptor_task_response_iterator()
502
+ )
503
+ try:
504
+ return await self._response_aiter.asend(None)
505
+ except StopAsyncIteration:
506
+ return cygrpc.EOF
507
+
508
+
509
+ class _InterceptedStreamRequestMixin:
510
+ _write_to_iterator_async_gen: Optional[AsyncIterable[RequestType]]
511
+ _write_to_iterator_queue: Optional[asyncio.Queue]
512
+ _status_code_task: Optional[asyncio.Task]
513
+
514
+ _FINISH_ITERATOR_SENTINEL = object()
515
+
516
+ def _init_stream_request_mixin(
517
+ self, request_iterator: Optional[RequestIterableType]
518
+ ) -> RequestIterableType:
519
+ if request_iterator is None:
520
+ # We provide our own request iterator which is a proxy
521
+ # of the futures writes that will be done by the caller.
522
+ self._write_to_iterator_queue = asyncio.Queue(maxsize=1)
523
+ self._write_to_iterator_async_gen = (
524
+ self._proxy_writes_as_request_iterator()
525
+ )
526
+ self._status_code_task = None
527
+ request_iterator = self._write_to_iterator_async_gen
528
+ else:
529
+ self._write_to_iterator_queue = None
530
+
531
+ return request_iterator
532
+
533
+ async def _proxy_writes_as_request_iterator(self):
534
+ await self._interceptors_task
535
+
536
+ while True:
537
+ value = await self._write_to_iterator_queue.get()
538
+ if (
539
+ value
540
+ is _InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL
541
+ ):
542
+ break
543
+ yield value
544
+
545
+ async def _write_to_iterator_queue_interruptible(
546
+ self, request: RequestType, call: InterceptedCall
547
+ ):
548
+ # Write the specified 'request' to the request iterator queue using the
549
+ # specified 'call' to allow for interruption of the write in the case
550
+ # of abrupt termination of the call.
551
+ if self._status_code_task is None:
552
+ self._status_code_task = self._loop.create_task(call.code())
553
+
554
+ await asyncio.wait(
555
+ (
556
+ self._loop.create_task(
557
+ self._write_to_iterator_queue.put(request)
558
+ ),
559
+ self._status_code_task,
560
+ ),
561
+ return_when=asyncio.FIRST_COMPLETED,
562
+ )
563
+
564
+ async def write(self, request: RequestType) -> None:
565
+ # If no queue was created it means that requests
566
+ # should be expected through an iterators provided
567
+ # by the caller.
568
+ if self._write_to_iterator_queue is None:
569
+ raise cygrpc.UsageError(_API_STYLE_ERROR)
570
+
571
+ try:
572
+ call = await self._interceptors_task
573
+ except (asyncio.CancelledError, AioRpcError):
574
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
575
+
576
+ if call.done():
577
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
578
+ elif call._done_writing_flag:
579
+ raise asyncio.InvalidStateError(_RPC_HALF_CLOSED_DETAILS)
580
+
581
+ await self._write_to_iterator_queue_interruptible(request, call)
582
+
583
+ if call.done():
584
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
585
+
586
+ async def done_writing(self) -> None:
587
+ """Signal peer that client is done writing.
588
+
589
+ This method is idempotent.
590
+ """
591
+ # If no queue was created it means that requests
592
+ # should be expected through an iterators provided
593
+ # by the caller.
594
+ if self._write_to_iterator_queue is None:
595
+ raise cygrpc.UsageError(_API_STYLE_ERROR)
596
+
597
+ try:
598
+ call = await self._interceptors_task
599
+ except asyncio.CancelledError:
600
+ raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
601
+
602
+ await self._write_to_iterator_queue_interruptible(
603
+ _InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL, call
604
+ )
605
+
606
+
607
+ class InterceptedUnaryUnaryCall(
608
+ _InterceptedUnaryResponseMixin, InterceptedCall, _base_call.UnaryUnaryCall
609
+ ):
610
+ """Used for running a `UnaryUnaryCall` wrapped by interceptors.
611
+
612
+ For the `__await__` method is it is proxied to the intercepted call only when
613
+ the interceptor task is finished.
614
+ """
615
+
616
+ _loop: asyncio.AbstractEventLoop
617
+ _channel: cygrpc.AioChannel
618
+
619
+ # pylint: disable=too-many-arguments
620
+ def __init__(
621
+ self,
622
+ interceptors: Sequence[UnaryUnaryClientInterceptor],
623
+ request: RequestType,
624
+ timeout: Optional[float],
625
+ metadata: Metadata,
626
+ credentials: Optional[grpc.CallCredentials],
627
+ wait_for_ready: Optional[bool],
628
+ channel: cygrpc.AioChannel,
629
+ method: bytes,
630
+ request_serializer: SerializingFunction,
631
+ response_deserializer: DeserializingFunction,
632
+ loop: asyncio.AbstractEventLoop,
633
+ ) -> None:
634
+ self._loop = loop
635
+ self._channel = channel
636
+ interceptors_task = loop.create_task(
637
+ self._invoke(
638
+ interceptors,
639
+ method,
640
+ timeout,
641
+ metadata,
642
+ credentials,
643
+ wait_for_ready,
644
+ request,
645
+ request_serializer,
646
+ response_deserializer,
647
+ )
648
+ )
649
+ super().__init__(interceptors_task)
650
+
651
+ # pylint: disable=too-many-arguments
652
+ async def _invoke(
653
+ self,
654
+ interceptors: Sequence[UnaryUnaryClientInterceptor],
655
+ method: bytes,
656
+ timeout: Optional[float],
657
+ metadata: Optional[Metadata],
658
+ credentials: Optional[grpc.CallCredentials],
659
+ wait_for_ready: Optional[bool],
660
+ request: RequestType,
661
+ request_serializer: SerializingFunction,
662
+ response_deserializer: DeserializingFunction,
663
+ ) -> UnaryUnaryCall:
664
+ """Run the RPC call wrapped in interceptors"""
665
+
666
+ async def _run_interceptor(
667
+ interceptors: List[UnaryUnaryClientInterceptor],
668
+ client_call_details: ClientCallDetails,
669
+ request: RequestType,
670
+ ) -> _base_call.UnaryUnaryCall:
671
+ if interceptors:
672
+ continuation = functools.partial(
673
+ _run_interceptor, interceptors[1:]
674
+ )
675
+ call_or_response = await interceptors[0].intercept_unary_unary(
676
+ continuation, client_call_details, request
677
+ )
678
+
679
+ if isinstance(call_or_response, _base_call.UnaryUnaryCall):
680
+ return call_or_response
681
+ else:
682
+ return UnaryUnaryCallResponse(call_or_response)
683
+
684
+ else:
685
+ return UnaryUnaryCall(
686
+ request,
687
+ _timeout_to_deadline(client_call_details.timeout),
688
+ client_call_details.metadata,
689
+ client_call_details.credentials,
690
+ client_call_details.wait_for_ready,
691
+ self._channel,
692
+ client_call_details.method,
693
+ request_serializer,
694
+ response_deserializer,
695
+ self._loop,
696
+ )
697
+
698
+ client_call_details = ClientCallDetails(
699
+ method, timeout, metadata, credentials, wait_for_ready
700
+ )
701
+ return await _run_interceptor(
702
+ list(interceptors), client_call_details, request
703
+ )
704
+
705
+ def time_remaining(self) -> Optional[float]:
706
+ raise NotImplementedError()
707
+
708
+
709
+ class InterceptedUnaryStreamCall(
710
+ _InterceptedStreamResponseMixin, InterceptedCall, _base_call.UnaryStreamCall
711
+ ):
712
+ """Used for running a `UnaryStreamCall` wrapped by interceptors."""
713
+
714
+ _loop: asyncio.AbstractEventLoop
715
+ _channel: cygrpc.AioChannel
716
+ _last_returned_call_from_interceptors = Optional[_base_call.UnaryStreamCall]
717
+
718
+ # pylint: disable=too-many-arguments
719
+ def __init__(
720
+ self,
721
+ interceptors: Sequence[UnaryStreamClientInterceptor],
722
+ request: RequestType,
723
+ timeout: Optional[float],
724
+ metadata: Metadata,
725
+ credentials: Optional[grpc.CallCredentials],
726
+ wait_for_ready: Optional[bool],
727
+ channel: cygrpc.AioChannel,
728
+ method: bytes,
729
+ request_serializer: SerializingFunction,
730
+ response_deserializer: DeserializingFunction,
731
+ loop: asyncio.AbstractEventLoop,
732
+ ) -> None:
733
+ self._loop = loop
734
+ self._channel = channel
735
+ self._init_stream_response_mixin()
736
+ self._last_returned_call_from_interceptors = None
737
+ interceptors_task = loop.create_task(
738
+ self._invoke(
739
+ interceptors,
740
+ method,
741
+ timeout,
742
+ metadata,
743
+ credentials,
744
+ wait_for_ready,
745
+ request,
746
+ request_serializer,
747
+ response_deserializer,
748
+ )
749
+ )
750
+ super().__init__(interceptors_task)
751
+
752
+ # pylint: disable=too-many-arguments
753
+ async def _invoke(
754
+ self,
755
+ interceptors: Sequence[UnaryStreamClientInterceptor],
756
+ method: bytes,
757
+ timeout: Optional[float],
758
+ metadata: Optional[Metadata],
759
+ credentials: Optional[grpc.CallCredentials],
760
+ wait_for_ready: Optional[bool],
761
+ request: RequestType,
762
+ request_serializer: SerializingFunction,
763
+ response_deserializer: DeserializingFunction,
764
+ ) -> UnaryStreamCall:
765
+ """Run the RPC call wrapped in interceptors"""
766
+
767
+ async def _run_interceptor(
768
+ interceptors: List[UnaryStreamClientInterceptor],
769
+ client_call_details: ClientCallDetails,
770
+ request: RequestType,
771
+ ) -> _base_call.UnaryStreamCall:
772
+ if interceptors:
773
+ continuation = functools.partial(
774
+ _run_interceptor, interceptors[1:]
775
+ )
776
+
777
+ call_or_response_iterator = await interceptors[
778
+ 0
779
+ ].intercept_unary_stream(
780
+ continuation, client_call_details, request
781
+ )
782
+
783
+ if isinstance(
784
+ call_or_response_iterator, _base_call.UnaryStreamCall
785
+ ):
786
+ self._last_returned_call_from_interceptors = (
787
+ call_or_response_iterator
788
+ )
789
+ else:
790
+ self._last_returned_call_from_interceptors = (
791
+ UnaryStreamCallResponseIterator(
792
+ self._last_returned_call_from_interceptors,
793
+ call_or_response_iterator,
794
+ )
795
+ )
796
+ return self._last_returned_call_from_interceptors
797
+ else:
798
+ self._last_returned_call_from_interceptors = UnaryStreamCall(
799
+ request,
800
+ _timeout_to_deadline(client_call_details.timeout),
801
+ client_call_details.metadata,
802
+ client_call_details.credentials,
803
+ client_call_details.wait_for_ready,
804
+ self._channel,
805
+ client_call_details.method,
806
+ request_serializer,
807
+ response_deserializer,
808
+ self._loop,
809
+ )
810
+
811
+ return self._last_returned_call_from_interceptors
812
+
813
+ client_call_details = ClientCallDetails(
814
+ method, timeout, metadata, credentials, wait_for_ready
815
+ )
816
+ return await _run_interceptor(
817
+ list(interceptors), client_call_details, request
818
+ )
819
+
820
+ def time_remaining(self) -> Optional[float]:
821
+ raise NotImplementedError()
822
+
823
+
824
+ class InterceptedStreamUnaryCall(
825
+ _InterceptedUnaryResponseMixin,
826
+ _InterceptedStreamRequestMixin,
827
+ InterceptedCall,
828
+ _base_call.StreamUnaryCall,
829
+ ):
830
+ """Used for running a `StreamUnaryCall` wrapped by interceptors.
831
+
832
+ For the `__await__` method is it is proxied to the intercepted call only when
833
+ the interceptor task is finished.
834
+ """
835
+
836
+ _loop: asyncio.AbstractEventLoop
837
+ _channel: cygrpc.AioChannel
838
+
839
+ # pylint: disable=too-many-arguments
840
+ def __init__(
841
+ self,
842
+ interceptors: Sequence[StreamUnaryClientInterceptor],
843
+ request_iterator: Optional[RequestIterableType],
844
+ timeout: Optional[float],
845
+ metadata: Metadata,
846
+ credentials: Optional[grpc.CallCredentials],
847
+ wait_for_ready: Optional[bool],
848
+ channel: cygrpc.AioChannel,
849
+ method: bytes,
850
+ request_serializer: SerializingFunction,
851
+ response_deserializer: DeserializingFunction,
852
+ loop: asyncio.AbstractEventLoop,
853
+ ) -> None:
854
+ self._loop = loop
855
+ self._channel = channel
856
+ request_iterator = self._init_stream_request_mixin(request_iterator)
857
+ interceptors_task = loop.create_task(
858
+ self._invoke(
859
+ interceptors,
860
+ method,
861
+ timeout,
862
+ metadata,
863
+ credentials,
864
+ wait_for_ready,
865
+ request_iterator,
866
+ request_serializer,
867
+ response_deserializer,
868
+ )
869
+ )
870
+ super().__init__(interceptors_task)
871
+
872
+ # pylint: disable=too-many-arguments
873
+ async def _invoke(
874
+ self,
875
+ interceptors: Sequence[StreamUnaryClientInterceptor],
876
+ method: bytes,
877
+ timeout: Optional[float],
878
+ metadata: Optional[Metadata],
879
+ credentials: Optional[grpc.CallCredentials],
880
+ wait_for_ready: Optional[bool],
881
+ request_iterator: RequestIterableType,
882
+ request_serializer: SerializingFunction,
883
+ response_deserializer: DeserializingFunction,
884
+ ) -> StreamUnaryCall:
885
+ """Run the RPC call wrapped in interceptors"""
886
+
887
+ async def _run_interceptor(
888
+ interceptors: Iterator[StreamUnaryClientInterceptor],
889
+ client_call_details: ClientCallDetails,
890
+ request_iterator: RequestIterableType,
891
+ ) -> _base_call.StreamUnaryCall:
892
+ if interceptors:
893
+ continuation = functools.partial(
894
+ _run_interceptor, interceptors[1:]
895
+ )
896
+
897
+ return await interceptors[0].intercept_stream_unary(
898
+ continuation, client_call_details, request_iterator
899
+ )
900
+ else:
901
+ return StreamUnaryCall(
902
+ request_iterator,
903
+ _timeout_to_deadline(client_call_details.timeout),
904
+ client_call_details.metadata,
905
+ client_call_details.credentials,
906
+ client_call_details.wait_for_ready,
907
+ self._channel,
908
+ client_call_details.method,
909
+ request_serializer,
910
+ response_deserializer,
911
+ self._loop,
912
+ )
913
+
914
+ client_call_details = ClientCallDetails(
915
+ method, timeout, metadata, credentials, wait_for_ready
916
+ )
917
+ return await _run_interceptor(
918
+ list(interceptors), client_call_details, request_iterator
919
+ )
920
+
921
+ def time_remaining(self) -> Optional[float]:
922
+ raise NotImplementedError()
923
+
924
+
925
+ class InterceptedStreamStreamCall(
926
+ _InterceptedStreamResponseMixin,
927
+ _InterceptedStreamRequestMixin,
928
+ InterceptedCall,
929
+ _base_call.StreamStreamCall,
930
+ ):
931
+ """Used for running a `StreamStreamCall` wrapped by interceptors."""
932
+
933
+ _loop: asyncio.AbstractEventLoop
934
+ _channel: cygrpc.AioChannel
935
+ _last_returned_call_from_interceptors = Optional[
936
+ _base_call.StreamStreamCall
937
+ ]
938
+
939
+ # pylint: disable=too-many-arguments
940
+ def __init__(
941
+ self,
942
+ interceptors: Sequence[StreamStreamClientInterceptor],
943
+ request_iterator: Optional[RequestIterableType],
944
+ timeout: Optional[float],
945
+ metadata: Metadata,
946
+ credentials: Optional[grpc.CallCredentials],
947
+ wait_for_ready: Optional[bool],
948
+ channel: cygrpc.AioChannel,
949
+ method: bytes,
950
+ request_serializer: SerializingFunction,
951
+ response_deserializer: DeserializingFunction,
952
+ loop: asyncio.AbstractEventLoop,
953
+ ) -> None:
954
+ self._loop = loop
955
+ self._channel = channel
956
+ self._init_stream_response_mixin()
957
+ request_iterator = self._init_stream_request_mixin(request_iterator)
958
+ self._last_returned_call_from_interceptors = None
959
+ interceptors_task = loop.create_task(
960
+ self._invoke(
961
+ interceptors,
962
+ method,
963
+ timeout,
964
+ metadata,
965
+ credentials,
966
+ wait_for_ready,
967
+ request_iterator,
968
+ request_serializer,
969
+ response_deserializer,
970
+ )
971
+ )
972
+ super().__init__(interceptors_task)
973
+
974
+ # pylint: disable=too-many-arguments
975
+ async def _invoke(
976
+ self,
977
+ interceptors: Sequence[StreamStreamClientInterceptor],
978
+ method: bytes,
979
+ timeout: Optional[float],
980
+ metadata: Optional[Metadata],
981
+ credentials: Optional[grpc.CallCredentials],
982
+ wait_for_ready: Optional[bool],
983
+ request_iterator: RequestIterableType,
984
+ request_serializer: SerializingFunction,
985
+ response_deserializer: DeserializingFunction,
986
+ ) -> StreamStreamCall:
987
+ """Run the RPC call wrapped in interceptors"""
988
+
989
+ async def _run_interceptor(
990
+ interceptors: List[StreamStreamClientInterceptor],
991
+ client_call_details: ClientCallDetails,
992
+ request_iterator: RequestIterableType,
993
+ ) -> _base_call.StreamStreamCall:
994
+ if interceptors:
995
+ continuation = functools.partial(
996
+ _run_interceptor, interceptors[1:]
997
+ )
998
+
999
+ call_or_response_iterator = await interceptors[
1000
+ 0
1001
+ ].intercept_stream_stream(
1002
+ continuation, client_call_details, request_iterator
1003
+ )
1004
+
1005
+ if isinstance(
1006
+ call_or_response_iterator, _base_call.StreamStreamCall
1007
+ ):
1008
+ self._last_returned_call_from_interceptors = (
1009
+ call_or_response_iterator
1010
+ )
1011
+ else:
1012
+ self._last_returned_call_from_interceptors = (
1013
+ StreamStreamCallResponseIterator(
1014
+ self._last_returned_call_from_interceptors,
1015
+ call_or_response_iterator,
1016
+ )
1017
+ )
1018
+ return self._last_returned_call_from_interceptors
1019
+ else:
1020
+ self._last_returned_call_from_interceptors = StreamStreamCall(
1021
+ request_iterator,
1022
+ _timeout_to_deadline(client_call_details.timeout),
1023
+ client_call_details.metadata,
1024
+ client_call_details.credentials,
1025
+ client_call_details.wait_for_ready,
1026
+ self._channel,
1027
+ client_call_details.method,
1028
+ request_serializer,
1029
+ response_deserializer,
1030
+ self._loop,
1031
+ )
1032
+ return self._last_returned_call_from_interceptors
1033
+
1034
+ client_call_details = ClientCallDetails(
1035
+ method, timeout, metadata, credentials, wait_for_ready
1036
+ )
1037
+ return await _run_interceptor(
1038
+ list(interceptors), client_call_details, request_iterator
1039
+ )
1040
+
1041
+ def time_remaining(self) -> Optional[float]:
1042
+ raise NotImplementedError()
1043
+
1044
+
1045
+ class UnaryUnaryCallResponse(_base_call.UnaryUnaryCall):
1046
+ """Final UnaryUnaryCall class finished with a response."""
1047
+
1048
+ _response: ResponseType
1049
+
1050
+ def __init__(self, response: ResponseType) -> None:
1051
+ self._response = response
1052
+
1053
+ def cancel(self) -> bool:
1054
+ return False
1055
+
1056
+ def cancelled(self) -> bool:
1057
+ return False
1058
+
1059
+ def done(self) -> bool:
1060
+ return True
1061
+
1062
+ def add_done_callback(self, unused_callback) -> None:
1063
+ raise NotImplementedError()
1064
+
1065
+ def time_remaining(self) -> Optional[float]:
1066
+ raise NotImplementedError()
1067
+
1068
+ async def initial_metadata(self) -> Optional[Metadata]:
1069
+ return None
1070
+
1071
+ async def trailing_metadata(self) -> Optional[Metadata]:
1072
+ return None
1073
+
1074
+ async def code(self) -> grpc.StatusCode:
1075
+ return grpc.StatusCode.OK
1076
+
1077
+ async def details(self) -> str:
1078
+ return ""
1079
+
1080
+ async def debug_error_string(self) -> Optional[str]:
1081
+ return None
1082
+
1083
+ def __await__(self):
1084
+ if False: # pylint: disable=using-constant-test
1085
+ # This code path is never used, but a yield statement is needed
1086
+ # for telling the interpreter that __await__ is a generator.
1087
+ yield None
1088
+ return self._response
1089
+
1090
+ async def wait_for_connection(self) -> None:
1091
+ pass
1092
+
1093
+
1094
+ class _StreamCallResponseIterator:
1095
+ _call: Union[_base_call.UnaryStreamCall, _base_call.StreamStreamCall]
1096
+ _response_iterator: AsyncIterable[ResponseType]
1097
+
1098
+ def __init__(
1099
+ self,
1100
+ call: Union[_base_call.UnaryStreamCall, _base_call.StreamStreamCall],
1101
+ response_iterator: AsyncIterable[ResponseType],
1102
+ ) -> None:
1103
+ self._response_iterator = response_iterator
1104
+ self._call = call
1105
+
1106
+ def cancel(self) -> bool:
1107
+ return self._call.cancel()
1108
+
1109
+ def cancelled(self) -> bool:
1110
+ return self._call.cancelled()
1111
+
1112
+ def done(self) -> bool:
1113
+ return self._call.done()
1114
+
1115
+ def add_done_callback(self, callback) -> None:
1116
+ self._call.add_done_callback(callback)
1117
+
1118
+ def time_remaining(self) -> Optional[float]:
1119
+ return self._call.time_remaining()
1120
+
1121
+ async def initial_metadata(self) -> Optional[Metadata]:
1122
+ return await self._call.initial_metadata()
1123
+
1124
+ async def trailing_metadata(self) -> Optional[Metadata]:
1125
+ return await self._call.trailing_metadata()
1126
+
1127
+ async def code(self) -> grpc.StatusCode:
1128
+ return await self._call.code()
1129
+
1130
+ async def details(self) -> str:
1131
+ return await self._call.details()
1132
+
1133
+ async def debug_error_string(self) -> Optional[str]:
1134
+ return await self._call.debug_error_string()
1135
+
1136
+ def __aiter__(self):
1137
+ return self._response_iterator.__aiter__()
1138
+
1139
+ async def wait_for_connection(self) -> None:
1140
+ return await self._call.wait_for_connection()
1141
+
1142
+
1143
+ class UnaryStreamCallResponseIterator(
1144
+ _StreamCallResponseIterator, _base_call.UnaryStreamCall
1145
+ ):
1146
+ """UnaryStreamCall class which uses an alternative response iterator."""
1147
+
1148
+ async def read(self) -> Union[EOFType, ResponseType]:
1149
+ # Behind the scenes everything goes through the
1150
+ # async iterator. So this path should not be reached.
1151
+ raise NotImplementedError()
1152
+
1153
+
1154
+ class StreamStreamCallResponseIterator(
1155
+ _StreamCallResponseIterator, _base_call.StreamStreamCall
1156
+ ):
1157
+ """StreamStreamCall class which uses an alternative response iterator."""
1158
+
1159
+ async def read(self) -> Union[EOFType, ResponseType]:
1160
+ # Behind the scenes everything goes through the
1161
+ # async iterator. So this path should not be reached.
1162
+ raise NotImplementedError()
1163
+
1164
+ async def write(self, request: RequestType) -> None:
1165
+ # Behind the scenes everything goes through the
1166
+ # async iterator provided by the InterceptedStreamStreamCall.
1167
+ # So this path should not be reached.
1168
+ raise NotImplementedError()
1169
+
1170
+ async def done_writing(self) -> None:
1171
+ # Behind the scenes everything goes through the
1172
+ # async iterator provided by the InterceptedStreamStreamCall.
1173
+ # So this path should not be reached.
1174
+ raise NotImplementedError()
1175
+
1176
+ @property
1177
+ def _done_writing_flag(self) -> bool:
1178
+ return self._call._done_writing_flag
lib/python3.10/site-packages/grpc/aio/_metadata.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Implementation of the metadata abstraction for gRPC Asyncio Python."""
15
+ from collections import OrderedDict
16
+ from collections import abc
17
+ from typing import Any, Iterator, List, Optional, Tuple, Union
18
+
19
+ MetadataKey = str
20
+ MetadataValue = Union[str, bytes]
21
+
22
+
23
+ class Metadata(abc.Collection):
24
+ """Metadata abstraction for the asynchronous calls and interceptors.
25
+
26
+ The metadata is a mapping from str -> List[str]
27
+
28
+ Traits
29
+ * Multiple entries are allowed for the same key
30
+ * The order of the values by key is preserved
31
+ * Getting by an element by key, retrieves the first mapped value
32
+ * Supports an immutable view of the data
33
+ * Allows partial mutation on the data without recreating the new object from scratch.
34
+ """
35
+
36
+ def __init__(self, *args: Tuple[MetadataKey, MetadataValue]) -> None:
37
+ self._metadata = OrderedDict()
38
+ for md_key, md_value in args:
39
+ self.add(md_key, md_value)
40
+
41
+ @classmethod
42
+ def from_tuple(cls, raw_metadata: tuple):
43
+ if raw_metadata:
44
+ return cls(*raw_metadata)
45
+ return cls()
46
+
47
+ def add(self, key: MetadataKey, value: MetadataValue) -> None:
48
+ self._metadata.setdefault(key, [])
49
+ self._metadata[key].append(value)
50
+
51
+ def __len__(self) -> int:
52
+ """Return the total number of elements that there are in the metadata,
53
+ including multiple values for the same key.
54
+ """
55
+ return sum(map(len, self._metadata.values()))
56
+
57
+ def __getitem__(self, key: MetadataKey) -> MetadataValue:
58
+ """When calling <metadata>[<key>], the first element of all those
59
+ mapped for <key> is returned.
60
+ """
61
+ try:
62
+ return self._metadata[key][0]
63
+ except (ValueError, IndexError) as e:
64
+ raise KeyError("{0!r}".format(key)) from e
65
+
66
+ def __setitem__(self, key: MetadataKey, value: MetadataValue) -> None:
67
+ """Calling metadata[<key>] = <value>
68
+ Maps <value> to the first instance of <key>.
69
+ """
70
+ if key not in self:
71
+ self._metadata[key] = [value]
72
+ else:
73
+ current_values = self.get_all(key)
74
+ self._metadata[key] = [value, *current_values[1:]]
75
+
76
+ def __delitem__(self, key: MetadataKey) -> None:
77
+ """``del metadata[<key>]`` deletes the first mapping for <key>."""
78
+ current_values = self.get_all(key)
79
+ if not current_values:
80
+ raise KeyError(repr(key))
81
+ self._metadata[key] = current_values[1:]
82
+
83
+ def delete_all(self, key: MetadataKey) -> None:
84
+ """Delete all mappings for <key>."""
85
+ del self._metadata[key]
86
+
87
+ def __iter__(self) -> Iterator[Tuple[MetadataKey, MetadataValue]]:
88
+ for key, values in self._metadata.items():
89
+ for value in values:
90
+ yield (key, value)
91
+
92
+ def keys(self) -> abc.KeysView:
93
+ return abc.KeysView(self)
94
+
95
+ def values(self) -> abc.ValuesView:
96
+ return abc.ValuesView(self)
97
+
98
+ def items(self) -> abc.ItemsView:
99
+ return abc.ItemsView(self)
100
+
101
+ def get(
102
+ self, key: MetadataKey, default: MetadataValue = None
103
+ ) -> Optional[MetadataValue]:
104
+ try:
105
+ return self[key]
106
+ except KeyError:
107
+ return default
108
+
109
+ def get_all(self, key: MetadataKey) -> List[MetadataValue]:
110
+ """For compatibility with other Metadata abstraction objects (like in Java),
111
+ this would return all items under the desired <key>.
112
+ """
113
+ return self._metadata.get(key, [])
114
+
115
+ def set_all(self, key: MetadataKey, values: List[MetadataValue]) -> None:
116
+ self._metadata[key] = values
117
+
118
+ def __contains__(self, key: MetadataKey) -> bool:
119
+ return key in self._metadata
120
+
121
+ def __eq__(self, other: Any) -> bool:
122
+ if isinstance(other, self.__class__):
123
+ return self._metadata == other._metadata
124
+ if isinstance(other, tuple):
125
+ return tuple(self) == other
126
+ return NotImplemented # pytype: disable=bad-return-type
127
+
128
+ def __add__(self, other: Any) -> "Metadata":
129
+ if isinstance(other, self.__class__):
130
+ return Metadata(*(tuple(self) + tuple(other)))
131
+ if isinstance(other, tuple):
132
+ return Metadata(*(tuple(self) + other))
133
+ return NotImplemented # pytype: disable=bad-return-type
134
+
135
+ def __repr__(self) -> str:
136
+ view = tuple(self)
137
+ return "{0}({1!r})".format(self.__class__.__name__, view)
lib/python3.10/site-packages/grpc/aio/_server.py ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 The gRPC Authors
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Server-side implementation of gRPC Asyncio Python."""
15
+
16
+ from concurrent.futures import Executor
17
+ from typing import Any, Dict, Optional, Sequence
18
+
19
+ import grpc
20
+ from grpc import _common
21
+ from grpc import _compression
22
+ from grpc._cython import cygrpc
23
+
24
+ from . import _base_server
25
+ from ._interceptor import ServerInterceptor
26
+ from ._typing import ChannelArgumentType
27
+
28
+
29
+ def _augment_channel_arguments(
30
+ base_options: ChannelArgumentType, compression: Optional[grpc.Compression]
31
+ ):
32
+ compression_option = _compression.create_channel_option(compression)
33
+ return tuple(base_options) + compression_option
34
+
35
+
36
+ class Server(_base_server.Server):
37
+ """Serves RPCs."""
38
+
39
+ def __init__(
40
+ self,
41
+ thread_pool: Optional[Executor],
42
+ generic_handlers: Optional[Sequence[grpc.GenericRpcHandler]],
43
+ interceptors: Optional[Sequence[Any]],
44
+ options: ChannelArgumentType,
45
+ maximum_concurrent_rpcs: Optional[int],
46
+ compression: Optional[grpc.Compression],
47
+ ):
48
+ self._loop = cygrpc.get_working_loop()
49
+ if interceptors:
50
+ invalid_interceptors = [
51
+ interceptor
52
+ for interceptor in interceptors
53
+ if not isinstance(interceptor, ServerInterceptor)
54
+ ]
55
+ if invalid_interceptors:
56
+ raise ValueError(
57
+ "Interceptor must be ServerInterceptor, the "
58
+ f"following are invalid: {invalid_interceptors}"
59
+ )
60
+ self._server = cygrpc.AioServer(
61
+ self._loop,
62
+ thread_pool,
63
+ generic_handlers,
64
+ interceptors,
65
+ _augment_channel_arguments(options, compression),
66
+ maximum_concurrent_rpcs,
67
+ )
68
+
69
+ def add_generic_rpc_handlers(
70
+ self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]
71
+ ) -> None:
72
+ """Registers GenericRpcHandlers with this Server.
73
+
74
+ This method is only safe to call before the server is started.
75
+
76
+ Args:
77
+ generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
78
+ used to service RPCs.
79
+ """
80
+ self._server.add_generic_rpc_handlers(generic_rpc_handlers)
81
+
82
+ def add_registered_method_handlers(
83
+ self,
84
+ service_name: str,
85
+ method_handlers: Dict[str, grpc.RpcMethodHandler],
86
+ ) -> None:
87
+ # TODO(xuanwn): Implement this for AsyncIO.
88
+ pass
89
+
90
+ def add_insecure_port(self, address: str) -> int:
91
+ """Opens an insecure port for accepting RPCs.
92
+
93
+ This method may only be called before starting the server.
94
+
95
+ Args:
96
+ address: The address for which to open a port. If the port is 0,
97
+ or not specified in the address, then the gRPC runtime will choose a port.
98
+
99
+ Returns:
100
+ An integer port on which the server will accept RPC requests.
101
+ """
102
+ return _common.validate_port_binding_result(
103
+ address, self._server.add_insecure_port(_common.encode(address))
104
+ )
105
+
106
+ def add_secure_port(
107
+ self, address: str, server_credentials: grpc.ServerCredentials
108
+ ) -> int:
109
+ """Opens a secure port for accepting RPCs.
110
+
111
+ This method may only be called before starting the server.
112
+
113
+ Args:
114
+ address: The address for which to open a port.
115
+ if the port is 0, or not specified in the address, then the gRPC
116
+ runtime will choose a port.
117
+ server_credentials: A ServerCredentials object.
118
+
119
+ Returns:
120
+ An integer port on which the server will accept RPC requests.
121
+ """
122
+ return _common.validate_port_binding_result(
123
+ address,
124
+ self._server.add_secure_port(
125
+ _common.encode(address), server_credentials
126
+ ),
127
+ )
128
+
129
+ async def start(self) -> None:
130
+ """Starts this Server.
131
+
132
+ This method may only be called once. (i.e. it is not idempotent).
133
+ """
134
+ await self._server.start()
135
+
136
+ async def stop(self, grace: Optional[float]) -> None:
137
+ """Stops this Server.
138
+
139
+ This method immediately stops the server from servicing new RPCs in
140
+ all cases.
141
+
142
+ If a grace period is specified, this method waits until all active
143
+ RPCs are finished or until the grace period is reached. RPCs that haven't
144
+ been terminated within the grace period are aborted.
145
+ If a grace period is not specified (by passing None for grace), all
146
+ existing RPCs are aborted immediately and this method blocks until
147
+ the last RPC handler terminates.
148
+
149
+ This method is idempotent and may be called at any time. Passing a
150
+ smaller grace value in a subsequent call will have the effect of
151
+ stopping the Server sooner (passing None will have the effect of
152
+ stopping the server immediately). Passing a larger grace value in a
153
+ subsequent call will not have the effect of stopping the server later
154
+ (i.e. the most restrictive grace value is used).
155
+
156
+ Args:
157
+ grace: A duration of time in seconds or None.
158
+ """
159
+ await self._server.shutdown(grace)
160
+
161
+ async def wait_for_termination(
162
+ self, timeout: Optional[float] = None
163
+ ) -> bool:
164
+ """Block current coroutine until the server stops.
165
+
166
+ This is an EXPERIMENTAL API.
167
+
168
+ The wait will not consume computational resources during blocking, and
169
+ it will block until one of the two following conditions are met:
170
+
171
+ 1) The server is stopped or terminated;
172
+ 2) A timeout occurs if timeout is not `None`.
173
+
174
+ The timeout argument works in the same way as `threading.Event.wait()`.
175
+ https://docs.python.org/3/library/threading.html#threading.Event.wait
176
+
177
+ Args:
178
+ timeout: A floating point number specifying a timeout for the
179
+ operation in seconds.
180
+
181
+ Returns:
182
+ A bool indicates if the operation times out.
183
+ """
184
+ return await self._server.wait_for_termination(timeout)
185
+
186
+ def __del__(self):
187
+ """Schedules a graceful shutdown in current event loop.
188
+
189
+ The Cython AioServer doesn't hold a ref-count to this class. It should
190
+ be safe to slightly extend the underlying Cython object's life span.
191
+ """
192
+ if hasattr(self, "_server"):
193
+ if self._server.is_running():
194
+ cygrpc.schedule_coro_threadsafe(
195
+ self._server.shutdown(None),
196
+ self._loop,
197
+ )
198
+
199
+
200
+ def server(
201
+ migration_thread_pool: Optional[Executor] = None,
202
+ handlers: Optional[Sequence[grpc.GenericRpcHandler]] = None,
203
+ interceptors: Optional[Sequence[Any]] = None,
204
+ options: Optional[ChannelArgumentType] = None,
205
+ maximum_concurrent_rpcs: Optional[int] = None,
206
+ compression: Optional[grpc.Compression] = None,
207
+ ):
208
+ """Creates a Server with which RPCs can be serviced.
209
+
210
+ Args:
211
+ migration_thread_pool: A futures.ThreadPoolExecutor to be used by the
212
+ Server to execute non-AsyncIO RPC handlers for migration purpose.
213
+ handlers: An optional list of GenericRpcHandlers used for executing RPCs.
214
+ More handlers may be added by calling add_generic_rpc_handlers any time
215
+ before the server is started.
216
+ interceptors: An optional list of ServerInterceptor objects that observe
217
+ and optionally manipulate the incoming RPCs before handing them over to
218
+ handlers. The interceptors are given control in the order they are
219
+ specified. This is an EXPERIMENTAL API.
220
+ options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime)
221
+ to configure the channel.
222
+ maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server
223
+ will service before returning RESOURCE_EXHAUSTED status, or None to
224
+ indicate no limit.
225
+ compression: An element of grpc.compression, e.g.
226
+ grpc.compression.Gzip. This compression algorithm will be used for the
227
+ lifetime of the server unless overridden by set_compression.
228
+
229
+ Returns:
230
+ A Server object.
231
+ """
232
+ return Server(
233
+ migration_thread_pool,
234
+ () if handlers is None else handlers,
235
+ () if interceptors is None else interceptors,
236
+ () if options is None else options,
237
+ maximum_concurrent_rpcs,
238
+ compression,
239
+ )
lib/python3.10/site-packages/grpc/aio/_utils.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Internal utilities used by the gRPC Aio module."""
15
+ import time
16
+ from typing import Optional
17
+
18
+
19
+ def _timeout_to_deadline(timeout: Optional[float]) -> Optional[float]:
20
+ if timeout is None:
21
+ return None
22
+ return time.time() + timeout
lib/python3.10/site-packages/grpc/experimental/__init__.py ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2018 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """gRPC's experimental APIs.
15
+
16
+ These APIs are subject to be removed during any minor version release.
17
+ """
18
+
19
+ import copy
20
+ import functools
21
+ import sys
22
+ import warnings
23
+
24
+ import grpc
25
+ from grpc._cython import cygrpc as _cygrpc
26
+
27
+ _EXPERIMENTAL_APIS_USED = set()
28
+
29
+
30
+ class ChannelOptions(object):
31
+ """Indicates a channel option unique to gRPC Python.
32
+
33
+ This enumeration is part of an EXPERIMENTAL API.
34
+
35
+ Attributes:
36
+ SingleThreadedUnaryStream: Perform unary-stream RPCs on a single thread.
37
+ """
38
+
39
+ SingleThreadedUnaryStream = "SingleThreadedUnaryStream"
40
+
41
+
42
+ class UsageError(Exception):
43
+ """Raised by the gRPC library to indicate usage not allowed by the API."""
44
+
45
+
46
+ # It's important that there be a single insecure credentials object so that its
47
+ # hash is deterministic and can be used for indexing in the simple stubs cache.
48
+ _insecure_channel_credentials = grpc.ChannelCredentials(
49
+ _cygrpc.channel_credentials_insecure()
50
+ )
51
+
52
+
53
+ def insecure_channel_credentials():
54
+ """Creates a ChannelCredentials for use with an insecure channel.
55
+
56
+ THIS IS AN EXPERIMENTAL API.
57
+ """
58
+ return _insecure_channel_credentials
59
+
60
+
61
+ class ExperimentalApiWarning(Warning):
62
+ """A warning that an API is experimental."""
63
+
64
+
65
+ def _warn_experimental(api_name, stack_offset):
66
+ if api_name not in _EXPERIMENTAL_APIS_USED:
67
+ _EXPERIMENTAL_APIS_USED.add(api_name)
68
+ msg = (
69
+ "'{}' is an experimental API. It is subject to change or ".format(
70
+ api_name
71
+ )
72
+ + "removal between minor releases. Proceed with caution."
73
+ )
74
+ warnings.warn(msg, ExperimentalApiWarning, stacklevel=2 + stack_offset)
75
+
76
+
77
+ def experimental_api(f):
78
+ @functools.wraps(f)
79
+ def _wrapper(*args, **kwargs):
80
+ _warn_experimental(f.__name__, 1)
81
+ return f(*args, **kwargs)
82
+
83
+ return _wrapper
84
+
85
+
86
+ def wrap_server_method_handler(wrapper, handler):
87
+ """Wraps the server method handler function.
88
+
89
+ The server implementation requires all server handlers being wrapped as
90
+ RpcMethodHandler objects. This helper function ease the pain of writing
91
+ server handler wrappers.
92
+
93
+ Args:
94
+ wrapper: A wrapper function that takes in a method handler behavior
95
+ (the actual function) and returns a wrapped function.
96
+ handler: A RpcMethodHandler object to be wrapped.
97
+
98
+ Returns:
99
+ A newly created RpcMethodHandler.
100
+ """
101
+ if not handler:
102
+ return None
103
+
104
+ if not handler.request_streaming:
105
+ if not handler.response_streaming:
106
+ # NOTE(lidiz) _replace is a public API:
107
+ # https://docs.python.org/dev/library/collections.html
108
+ return handler._replace(unary_unary=wrapper(handler.unary_unary))
109
+ else:
110
+ return handler._replace(unary_stream=wrapper(handler.unary_stream))
111
+ else:
112
+ if not handler.response_streaming:
113
+ return handler._replace(stream_unary=wrapper(handler.stream_unary))
114
+ else:
115
+ return handler._replace(
116
+ stream_stream=wrapper(handler.stream_stream)
117
+ )
118
+
119
+
120
+ __all__ = (
121
+ "ChannelOptions",
122
+ "ExperimentalApiWarning",
123
+ "UsageError",
124
+ "insecure_channel_credentials",
125
+ "wrap_server_method_handler",
126
+ )
127
+
128
+ if sys.version_info > (3, 6):
129
+ from grpc._simple_stubs import stream_stream
130
+ from grpc._simple_stubs import stream_unary
131
+ from grpc._simple_stubs import unary_stream
132
+ from grpc._simple_stubs import unary_unary
133
+
134
+ __all__ = __all__ + (unary_unary, unary_stream, stream_unary, stream_stream)
lib/python3.10/site-packages/grpc/experimental/aio/__init__.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The gRPC Authors
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Alias of grpc.aio to keep backward compatibility."""
15
+
16
+ from grpc.aio import *
lib/python3.10/site-packages/grpc/experimental/gevent.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2018 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """gRPC's Python gEvent APIs."""
15
+
16
+ from grpc._cython import cygrpc as _cygrpc
17
+
18
+
19
+ def init_gevent():
20
+ """Patches gRPC's libraries to be compatible with gevent.
21
+
22
+ This must be called AFTER the python standard lib has been patched,
23
+ but BEFORE creating and gRPC objects.
24
+
25
+ In order for progress to be made, the application must drive the event loop.
26
+ """
27
+ _cygrpc.init_grpc_gevent()
lib/python3.10/site-packages/grpc/experimental/session_cache.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2018 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """gRPC's APIs for TLS Session Resumption support"""
15
+
16
+ from grpc._cython import cygrpc as _cygrpc
17
+
18
+
19
+ def ssl_session_cache_lru(capacity):
20
+ """Creates an SSLSessionCache with LRU replacement policy
21
+
22
+ Args:
23
+ capacity: Size of the cache
24
+
25
+ Returns:
26
+ An SSLSessionCache with LRU replacement policy that can be passed as a value for
27
+ the grpc.ssl_session_cache option to a grpc.Channel. SSL session caches are used
28
+ to store session tickets, which clients can present to resume previous TLS sessions
29
+ with a server.
30
+ """
31
+ return SSLSessionCache(_cygrpc.SSLSessionCacheLRU(capacity))
32
+
33
+
34
+ class SSLSessionCache(object):
35
+ """An encapsulation of a session cache used for TLS session resumption.
36
+
37
+ Instances of this class can be passed to a Channel as values for the
38
+ grpc.ssl_session_cache option
39
+ """
40
+
41
+ def __init__(self, cache):
42
+ self._cache = cache
43
+
44
+ def __int__(self):
45
+ return int(self._cache)
lib/python3.10/site-packages/grpc/framework/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
lib/python3.10/site-packages/grpc/framework/common/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
lib/python3.10/site-packages/grpc/framework/common/cardinality.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Defines an enum for classifying RPC methods by streaming semantics."""
15
+
16
+ import enum
17
+
18
+
19
+ @enum.unique
20
+ class Cardinality(enum.Enum):
21
+ """Describes the streaming semantics of an RPC method."""
22
+
23
+ UNARY_UNARY = "request-unary/response-unary"
24
+ UNARY_STREAM = "request-unary/response-streaming"
25
+ STREAM_UNARY = "request-streaming/response-unary"
26
+ STREAM_STREAM = "request-streaming/response-streaming"
lib/python3.10/site-packages/grpc/framework/common/style.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Defines an enum for classifying RPC methods by control flow semantics."""
15
+
16
+ import enum
17
+
18
+
19
+ @enum.unique
20
+ class Service(enum.Enum):
21
+ """Describes the control flow style of RPC method implementation."""
22
+
23
+ INLINE = "inline"
24
+ EVENT = "event"
lib/python3.10/site-packages/grpc/framework/foundation/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
lib/python3.10/site-packages/grpc/framework/foundation/abandonment.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Utilities for indicating abandonment of computation."""
15
+
16
+
17
+ class Abandoned(Exception):
18
+ """Indicates that some computation is being abandoned.
19
+
20
+ Abandoning a computation is different than returning a value or raising
21
+ an exception indicating some operational or programming defect.
22
+ """
lib/python3.10/site-packages/grpc/framework/foundation/callable_util.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Utilities for working with callables."""
15
+
16
+ from abc import ABC
17
+ import collections
18
+ import enum
19
+ import functools
20
+ import logging
21
+
22
+ _LOGGER = logging.getLogger(__name__)
23
+
24
+
25
+ class Outcome(ABC):
26
+ """A sum type describing the outcome of some call.
27
+
28
+ Attributes:
29
+ kind: One of Kind.RETURNED or Kind.RAISED respectively indicating that the
30
+ call returned a value or raised an exception.
31
+ return_value: The value returned by the call. Must be present if kind is
32
+ Kind.RETURNED.
33
+ exception: The exception raised by the call. Must be present if kind is
34
+ Kind.RAISED.
35
+ """
36
+
37
+ @enum.unique
38
+ class Kind(enum.Enum):
39
+ """Identifies the general kind of the outcome of some call."""
40
+
41
+ RETURNED = object()
42
+ RAISED = object()
43
+
44
+
45
+ class _EasyOutcome(
46
+ collections.namedtuple(
47
+ "_EasyOutcome", ["kind", "return_value", "exception"]
48
+ ),
49
+ Outcome,
50
+ ):
51
+ """A trivial implementation of Outcome."""
52
+
53
+
54
+ def _call_logging_exceptions(behavior, message, *args, **kwargs):
55
+ try:
56
+ return _EasyOutcome(
57
+ Outcome.Kind.RETURNED, behavior(*args, **kwargs), None
58
+ )
59
+ except Exception as e: # pylint: disable=broad-except
60
+ _LOGGER.exception(message)
61
+ return _EasyOutcome(Outcome.Kind.RAISED, None, e)
62
+
63
+
64
+ def with_exceptions_logged(behavior, message):
65
+ """Wraps a callable in a try-except that logs any exceptions it raises.
66
+
67
+ Args:
68
+ behavior: Any callable.
69
+ message: A string to log if the behavior raises an exception.
70
+
71
+ Returns:
72
+ A callable that when executed invokes the given behavior. The returned
73
+ callable takes the same arguments as the given behavior but returns a
74
+ future.Outcome describing whether the given behavior returned a value or
75
+ raised an exception.
76
+ """
77
+
78
+ @functools.wraps(behavior)
79
+ def wrapped_behavior(*args, **kwargs):
80
+ return _call_logging_exceptions(behavior, message, *args, **kwargs)
81
+
82
+ return wrapped_behavior
83
+
84
+
85
+ def call_logging_exceptions(behavior, message, *args, **kwargs):
86
+ """Calls a behavior in a try-except that logs any exceptions it raises.
87
+
88
+ Args:
89
+ behavior: Any callable.
90
+ message: A string to log if the behavior raises an exception.
91
+ *args: Positional arguments to pass to the given behavior.
92
+ **kwargs: Keyword arguments to pass to the given behavior.
93
+
94
+ Returns:
95
+ An Outcome describing whether the given behavior returned a value or raised
96
+ an exception.
97
+ """
98
+ return _call_logging_exceptions(behavior, message, *args, **kwargs)
lib/python3.10/site-packages/grpc/framework/foundation/future.py ADDED
@@ -0,0 +1,219 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """A Future interface.
15
+
16
+ Python doesn't have a Future interface in its standard library. In the absence
17
+ of such a standard, three separate, incompatible implementations
18
+ (concurrent.futures.Future, ndb.Future, and asyncio.Future) have appeared. This
19
+ interface attempts to be as compatible as possible with
20
+ concurrent.futures.Future. From ndb.Future it adopts a traceback-object accessor
21
+ method.
22
+
23
+ Unlike the concrete and implemented Future classes listed above, the Future
24
+ class defined in this module is an entirely abstract interface that anyone may
25
+ implement and use.
26
+
27
+ The one known incompatibility between this interface and the interface of
28
+ concurrent.futures.Future is that this interface defines its own CancelledError
29
+ and TimeoutError exceptions rather than raising the implementation-private
30
+ concurrent.futures._base.CancelledError and the
31
+ built-in-but-only-in-3.3-and-later TimeoutError.
32
+ """
33
+
34
+ import abc
35
+
36
+
37
+ class TimeoutError(Exception):
38
+ """Indicates that a particular call timed out."""
39
+
40
+
41
+ class CancelledError(Exception):
42
+ """Indicates that the computation underlying a Future was cancelled."""
43
+
44
+
45
+ class Future(abc.ABC):
46
+ """A representation of a computation in another control flow.
47
+
48
+ Computations represented by a Future may be yet to be begun, may be ongoing,
49
+ or may have already completed.
50
+ """
51
+
52
+ # NOTE(nathaniel): This isn't the return type that I would want to have if it
53
+ # were up to me. Were this interface being written from scratch, the return
54
+ # type of this method would probably be a sum type like:
55
+ #
56
+ # NOT_COMMENCED
57
+ # COMMENCED_AND_NOT_COMPLETED
58
+ # PARTIAL_RESULT<Partial_Result_Type>
59
+ # COMPLETED<Result_Type>
60
+ # UNCANCELLABLE
61
+ # NOT_IMMEDIATELY_DETERMINABLE
62
+ @abc.abstractmethod
63
+ def cancel(self):
64
+ """Attempts to cancel the computation.
65
+
66
+ This method does not block.
67
+
68
+ Returns:
69
+ True if the computation has not yet begun, will not be allowed to take
70
+ place, and determination of both was possible without blocking. False
71
+ under all other circumstances including but not limited to the
72
+ computation's already having begun, the computation's already having
73
+ finished, and the computation's having been scheduled for execution on a
74
+ remote system for which a determination of whether or not it commenced
75
+ before being cancelled cannot be made without blocking.
76
+ """
77
+ raise NotImplementedError()
78
+
79
+ # NOTE(nathaniel): Here too this isn't the return type that I'd want this
80
+ # method to have if it were up to me. I think I'd go with another sum type
81
+ # like:
82
+ #
83
+ # NOT_CANCELLED (this object's cancel method hasn't been called)
84
+ # NOT_COMMENCED
85
+ # COMMENCED_AND_NOT_COMPLETED
86
+ # PARTIAL_RESULT<Partial_Result_Type>
87
+ # COMPLETED<Result_Type>
88
+ # UNCANCELLABLE
89
+ # NOT_IMMEDIATELY_DETERMINABLE
90
+ #
91
+ # Notice how giving the cancel method the right semantics obviates most
92
+ # reasons for this method to exist.
93
+ @abc.abstractmethod
94
+ def cancelled(self):
95
+ """Describes whether the computation was cancelled.
96
+
97
+ This method does not block.
98
+
99
+ Returns:
100
+ True if the computation was cancelled any time before its result became
101
+ immediately available. False under all other circumstances including but
102
+ not limited to this object's cancel method not having been called and
103
+ the computation's result having become immediately available.
104
+ """
105
+ raise NotImplementedError()
106
+
107
+ @abc.abstractmethod
108
+ def running(self):
109
+ """Describes whether the computation is taking place.
110
+
111
+ This method does not block.
112
+
113
+ Returns:
114
+ True if the computation is scheduled to take place in the future or is
115
+ taking place now, or False if the computation took place in the past or
116
+ was cancelled.
117
+ """
118
+ raise NotImplementedError()
119
+
120
+ # NOTE(nathaniel): These aren't quite the semantics I'd like here either. I
121
+ # would rather this only returned True in cases in which the underlying
122
+ # computation completed successfully. A computation's having been cancelled
123
+ # conflicts with considering that computation "done".
124
+ @abc.abstractmethod
125
+ def done(self):
126
+ """Describes whether the computation has taken place.
127
+
128
+ This method does not block.
129
+
130
+ Returns:
131
+ True if the computation is known to have either completed or have been
132
+ unscheduled or interrupted. False if the computation may possibly be
133
+ executing or scheduled to execute later.
134
+ """
135
+ raise NotImplementedError()
136
+
137
+ @abc.abstractmethod
138
+ def result(self, timeout=None):
139
+ """Accesses the outcome of the computation or raises its exception.
140
+
141
+ This method may return immediately or may block.
142
+
143
+ Args:
144
+ timeout: The length of time in seconds to wait for the computation to
145
+ finish or be cancelled, or None if this method should block until the
146
+ computation has finished or is cancelled no matter how long that takes.
147
+
148
+ Returns:
149
+ The return value of the computation.
150
+
151
+ Raises:
152
+ TimeoutError: If a timeout value is passed and the computation does not
153
+ terminate within the allotted time.
154
+ CancelledError: If the computation was cancelled.
155
+ Exception: If the computation raised an exception, this call will raise
156
+ the same exception.
157
+ """
158
+ raise NotImplementedError()
159
+
160
+ @abc.abstractmethod
161
+ def exception(self, timeout=None):
162
+ """Return the exception raised by the computation.
163
+
164
+ This method may return immediately or may block.
165
+
166
+ Args:
167
+ timeout: The length of time in seconds to wait for the computation to
168
+ terminate or be cancelled, or None if this method should block until
169
+ the computation is terminated or is cancelled no matter how long that
170
+ takes.
171
+
172
+ Returns:
173
+ The exception raised by the computation, or None if the computation did
174
+ not raise an exception.
175
+
176
+ Raises:
177
+ TimeoutError: If a timeout value is passed and the computation does not
178
+ terminate within the allotted time.
179
+ CancelledError: If the computation was cancelled.
180
+ """
181
+ raise NotImplementedError()
182
+
183
+ @abc.abstractmethod
184
+ def traceback(self, timeout=None):
185
+ """Access the traceback of the exception raised by the computation.
186
+
187
+ This method may return immediately or may block.
188
+
189
+ Args:
190
+ timeout: The length of time in seconds to wait for the computation to
191
+ terminate or be cancelled, or None if this method should block until
192
+ the computation is terminated or is cancelled no matter how long that
193
+ takes.
194
+
195
+ Returns:
196
+ The traceback of the exception raised by the computation, or None if the
197
+ computation did not raise an exception.
198
+
199
+ Raises:
200
+ TimeoutError: If a timeout value is passed and the computation does not
201
+ terminate within the allotted time.
202
+ CancelledError: If the computation was cancelled.
203
+ """
204
+ raise NotImplementedError()
205
+
206
+ @abc.abstractmethod
207
+ def add_done_callback(self, fn):
208
+ """Adds a function to be called at completion of the computation.
209
+
210
+ The callback will be passed this Future object describing the outcome of
211
+ the computation.
212
+
213
+ If the computation has already completed, the callback will be called
214
+ immediately.
215
+
216
+ Args:
217
+ fn: A callable taking this Future object as its single parameter.
218
+ """
219
+ raise NotImplementedError()
lib/python3.10/site-packages/grpc/framework/foundation/logging_pool.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """A thread pool that logs exceptions raised by tasks executed within it."""
15
+
16
+ from concurrent import futures
17
+ import logging
18
+
19
+ _LOGGER = logging.getLogger(__name__)
20
+
21
+
22
+ def _wrap(behavior):
23
+ """Wraps an arbitrary callable behavior in exception-logging."""
24
+
25
+ def _wrapping(*args, **kwargs):
26
+ try:
27
+ return behavior(*args, **kwargs)
28
+ except Exception:
29
+ _LOGGER.exception(
30
+ "Unexpected exception from %s executed in logging pool!",
31
+ behavior,
32
+ )
33
+ raise
34
+
35
+ return _wrapping
36
+
37
+
38
+ class _LoggingPool(object):
39
+ """An exception-logging futures.ThreadPoolExecutor-compatible thread pool."""
40
+
41
+ def __init__(self, backing_pool):
42
+ self._backing_pool = backing_pool
43
+
44
+ def __enter__(self):
45
+ return self
46
+
47
+ def __exit__(self, exc_type, exc_val, exc_tb):
48
+ self._backing_pool.shutdown(wait=True)
49
+
50
+ def submit(self, fn, *args, **kwargs):
51
+ return self._backing_pool.submit(_wrap(fn), *args, **kwargs)
52
+
53
+ def map(self, func, *iterables, **kwargs):
54
+ return self._backing_pool.map(
55
+ _wrap(func), *iterables, timeout=kwargs.get("timeout", None)
56
+ )
57
+
58
+ def shutdown(self, wait=True):
59
+ self._backing_pool.shutdown(wait=wait)
60
+
61
+
62
+ def pool(max_workers):
63
+ """Creates a thread pool that logs exceptions raised by the tasks within it.
64
+
65
+ Args:
66
+ max_workers: The maximum number of worker threads to allow the pool.
67
+
68
+ Returns:
69
+ A futures.ThreadPoolExecutor-compatible thread pool that logs exceptions
70
+ raised by the tasks executed within it.
71
+ """
72
+ return _LoggingPool(futures.ThreadPoolExecutor(max_workers))
lib/python3.10/site-packages/grpc/framework/foundation/stream.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 gRPC authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Interfaces related to streams of values or objects."""
15
+
16
+ import abc
17
+
18
+
19
+ class Consumer(abc.ABC):
20
+ """Interface for consumers of finite streams of values or objects."""
21
+
22
+ @abc.abstractmethod
23
+ def consume(self, value):
24
+ """Accepts a value.
25
+
26
+ Args:
27
+ value: Any value accepted by this Consumer.
28
+ """
29
+ raise NotImplementedError()
30
+
31
+ @abc.abstractmethod
32
+ def terminate(self):
33
+ """Indicates to this Consumer that no more values will be supplied."""
34
+ raise NotImplementedError()
35
+
36
+ @abc.abstractmethod
37
+ def consume_and_terminate(self, value):
38
+ """Supplies a value and signals that no more values will be supplied.
39
+
40
+ Args:
41
+ value: Any value accepted by this Consumer.
42
+ """
43
+ raise NotImplementedError()