Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- lib/python3.10/site-packages/google/auth/_cloud_sdk.py +153 -0
- lib/python3.10/site-packages/google/auth/_default.py +719 -0
- lib/python3.10/site-packages/google/auth/_default_async.py +282 -0
- lib/python3.10/site-packages/google/auth/_helpers.py +273 -0
- lib/python3.10/site-packages/google/auth/_refresh_worker.py +109 -0
- lib/python3.10/site-packages/google/auth/api_key.py +76 -0
- lib/python3.10/site-packages/google/auth/aws.py +861 -0
- lib/python3.10/site-packages/google/auth/credentials.py +522 -0
- lib/python3.10/site-packages/google/auth/environment_vars.py +84 -0
- lib/python3.10/site-packages/google/auth/identity_pool.py +439 -0
- lib/python3.10/site-packages/google/auth/jwt.py +878 -0
- lib/python3.10/site-packages/google/auth/pluggable.py +429 -0
- lib/python3.10/site-packages/google/oauth2/_credentials_async.py +118 -0
- lib/python3.10/site-packages/google/oauth2/_service_account_async.py +132 -0
- lib/python3.10/site-packages/google/oauth2/sts.py +176 -0
- lib/python3.10/site-packages/google/oauth2/webauthn_handler.py +82 -0
- lib/python3.10/site-packages/google/oauth2/webauthn_handler_factory.py +16 -0
- lib/python3.10/site-packages/google/oauth2/webauthn_types.py +156 -0
- lib/python3.10/site-packages/google/protobuf/__init__.py +10 -0
- lib/python3.10/site-packages/google/protobuf/any_pb2.py +27 -0
- lib/python3.10/site-packages/google/protobuf/api_pb2.py +33 -0
- lib/python3.10/site-packages/google/protobuf/compiler/__init__.py +0 -0
- lib/python3.10/site-packages/google/protobuf/compiler/plugin_pb2.py +36 -0
- lib/python3.10/site-packages/google/protobuf/descriptor.py +1282 -0
- lib/python3.10/site-packages/google/protobuf/descriptor_database.py +154 -0
- lib/python3.10/site-packages/google/protobuf/descriptor_pb2.py +0 -0
- lib/python3.10/site-packages/google/protobuf/descriptor_pool.py +1271 -0
- lib/python3.10/site-packages/google/protobuf/duration_pb2.py +27 -0
- lib/python3.10/site-packages/google/protobuf/empty_pb2.py +27 -0
- lib/python3.10/site-packages/google/protobuf/field_mask_pb2.py +27 -0
- lib/python3.10/site-packages/google/protobuf/internal/api_implementation.py +140 -0
- lib/python3.10/site-packages/google/protobuf/internal/encoder.py +806 -0
- lib/python3.10/site-packages/google/protobuf/internal/extension_dict.py +194 -0
- lib/python3.10/site-packages/google/protobuf/internal/message_listener.py +55 -0
- lib/python3.10/site-packages/google/protobuf/internal/python_message.py +1546 -0
- lib/python3.10/site-packages/google/protobuf/internal/well_known_types.py +567 -0
- lib/python3.10/site-packages/google/protobuf/json_format.py +904 -0
- lib/python3.10/site-packages/google/protobuf/message.py +399 -0
- lib/python3.10/site-packages/google/protobuf/message_factory.py +233 -0
- lib/python3.10/site-packages/google/protobuf/proto_builder.py +111 -0
- lib/python3.10/site-packages/google/protobuf/pyext/__init__.py +0 -0
- lib/python3.10/site-packages/google/protobuf/pyext/cpp_message.py +49 -0
- lib/python3.10/site-packages/google/protobuf/reflection.py +72 -0
- lib/python3.10/site-packages/google/protobuf/service.py +205 -0
- lib/python3.10/site-packages/google/protobuf/service_reflection.py +272 -0
- lib/python3.10/site-packages/google/protobuf/source_context_pb2.py +27 -0
- lib/python3.10/site-packages/google/protobuf/struct_pb2.py +37 -0
- lib/python3.10/site-packages/google/protobuf/symbol_database.py +197 -0
- lib/python3.10/site-packages/google/protobuf/testdata/__init__.py +0 -0
- lib/python3.10/site-packages/google/protobuf/text_encoding.py +85 -0
lib/python3.10/site-packages/google/auth/_cloud_sdk.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2015 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""Helpers for reading the Google Cloud SDK's configuration."""
|
| 16 |
+
|
| 17 |
+
import os
|
| 18 |
+
import subprocess
|
| 19 |
+
|
| 20 |
+
from google.auth import _helpers
|
| 21 |
+
from google.auth import environment_vars
|
| 22 |
+
from google.auth import exceptions
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# The ~/.config subdirectory containing gcloud credentials.
|
| 26 |
+
_CONFIG_DIRECTORY = "gcloud"
|
| 27 |
+
# Windows systems store config at %APPDATA%\gcloud
|
| 28 |
+
_WINDOWS_CONFIG_ROOT_ENV_VAR = "APPDATA"
|
| 29 |
+
# The name of the file in the Cloud SDK config that contains default
|
| 30 |
+
# credentials.
|
| 31 |
+
_CREDENTIALS_FILENAME = "application_default_credentials.json"
|
| 32 |
+
# The name of the Cloud SDK shell script
|
| 33 |
+
_CLOUD_SDK_POSIX_COMMAND = "gcloud"
|
| 34 |
+
_CLOUD_SDK_WINDOWS_COMMAND = "gcloud.cmd"
|
| 35 |
+
# The command to get the Cloud SDK configuration
|
| 36 |
+
_CLOUD_SDK_CONFIG_GET_PROJECT_COMMAND = ("config", "get", "project")
|
| 37 |
+
# The command to get google user access token
|
| 38 |
+
_CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND = ("auth", "print-access-token")
|
| 39 |
+
# Cloud SDK's application-default client ID
|
| 40 |
+
CLOUD_SDK_CLIENT_ID = (
|
| 41 |
+
"764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com"
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def get_config_path():
|
| 46 |
+
"""Returns the absolute path the the Cloud SDK's configuration directory.
|
| 47 |
+
|
| 48 |
+
Returns:
|
| 49 |
+
str: The Cloud SDK config path.
|
| 50 |
+
"""
|
| 51 |
+
# If the path is explicitly set, return that.
|
| 52 |
+
try:
|
| 53 |
+
return os.environ[environment_vars.CLOUD_SDK_CONFIG_DIR]
|
| 54 |
+
except KeyError:
|
| 55 |
+
pass
|
| 56 |
+
|
| 57 |
+
# Non-windows systems store this at ~/.config/gcloud
|
| 58 |
+
if os.name != "nt":
|
| 59 |
+
return os.path.join(os.path.expanduser("~"), ".config", _CONFIG_DIRECTORY)
|
| 60 |
+
# Windows systems store config at %APPDATA%\gcloud
|
| 61 |
+
else:
|
| 62 |
+
try:
|
| 63 |
+
return os.path.join(
|
| 64 |
+
os.environ[_WINDOWS_CONFIG_ROOT_ENV_VAR], _CONFIG_DIRECTORY
|
| 65 |
+
)
|
| 66 |
+
except KeyError:
|
| 67 |
+
# This should never happen unless someone is really
|
| 68 |
+
# messing with things, but we'll cover the case anyway.
|
| 69 |
+
drive = os.environ.get("SystemDrive", "C:")
|
| 70 |
+
return os.path.join(drive, "\\", _CONFIG_DIRECTORY)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def get_application_default_credentials_path():
|
| 74 |
+
"""Gets the path to the application default credentials file.
|
| 75 |
+
|
| 76 |
+
The path may or may not exist.
|
| 77 |
+
|
| 78 |
+
Returns:
|
| 79 |
+
str: The full path to application default credentials.
|
| 80 |
+
"""
|
| 81 |
+
config_path = get_config_path()
|
| 82 |
+
return os.path.join(config_path, _CREDENTIALS_FILENAME)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def _run_subprocess_ignore_stderr(command):
|
| 86 |
+
""" Return subprocess.check_output with the given command and ignores stderr."""
|
| 87 |
+
with open(os.devnull, "w") as devnull:
|
| 88 |
+
output = subprocess.check_output(command, stderr=devnull)
|
| 89 |
+
return output
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def get_project_id():
|
| 93 |
+
"""Gets the project ID from the Cloud SDK.
|
| 94 |
+
|
| 95 |
+
Returns:
|
| 96 |
+
Optional[str]: The project ID.
|
| 97 |
+
"""
|
| 98 |
+
if os.name == "nt":
|
| 99 |
+
command = _CLOUD_SDK_WINDOWS_COMMAND
|
| 100 |
+
else:
|
| 101 |
+
command = _CLOUD_SDK_POSIX_COMMAND
|
| 102 |
+
|
| 103 |
+
try:
|
| 104 |
+
# Ignore the stderr coming from gcloud, so it won't be mixed into the output.
|
| 105 |
+
# https://github.com/googleapis/google-auth-library-python/issues/673
|
| 106 |
+
project = _run_subprocess_ignore_stderr(
|
| 107 |
+
(command,) + _CLOUD_SDK_CONFIG_GET_PROJECT_COMMAND
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
# Turn bytes into a string and remove "\n"
|
| 111 |
+
project = _helpers.from_bytes(project).strip()
|
| 112 |
+
return project if project else None
|
| 113 |
+
except (subprocess.CalledProcessError, OSError, IOError):
|
| 114 |
+
return None
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def get_auth_access_token(account=None):
|
| 118 |
+
"""Load user access token with the ``gcloud auth print-access-token`` command.
|
| 119 |
+
|
| 120 |
+
Args:
|
| 121 |
+
account (Optional[str]): Account to get the access token for. If not
|
| 122 |
+
specified, the current active account will be used.
|
| 123 |
+
|
| 124 |
+
Returns:
|
| 125 |
+
str: The user access token.
|
| 126 |
+
|
| 127 |
+
Raises:
|
| 128 |
+
google.auth.exceptions.UserAccessTokenError: if failed to get access
|
| 129 |
+
token from gcloud.
|
| 130 |
+
"""
|
| 131 |
+
if os.name == "nt":
|
| 132 |
+
command = _CLOUD_SDK_WINDOWS_COMMAND
|
| 133 |
+
else:
|
| 134 |
+
command = _CLOUD_SDK_POSIX_COMMAND
|
| 135 |
+
|
| 136 |
+
try:
|
| 137 |
+
if account:
|
| 138 |
+
command = (
|
| 139 |
+
(command,)
|
| 140 |
+
+ _CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND
|
| 141 |
+
+ ("--account=" + account,)
|
| 142 |
+
)
|
| 143 |
+
else:
|
| 144 |
+
command = (command,) + _CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND
|
| 145 |
+
|
| 146 |
+
access_token = subprocess.check_output(command, stderr=subprocess.STDOUT)
|
| 147 |
+
# remove the trailing "\n"
|
| 148 |
+
return access_token.decode("utf-8").strip()
|
| 149 |
+
except (subprocess.CalledProcessError, OSError, IOError) as caught_exc:
|
| 150 |
+
new_exc = exceptions.UserAccessTokenError(
|
| 151 |
+
"Failed to obtain access token", caught_exc
|
| 152 |
+
)
|
| 153 |
+
raise new_exc from caught_exc
|
lib/python3.10/site-packages/google/auth/_default.py
ADDED
|
@@ -0,0 +1,719 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2015 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""Application default credentials.
|
| 16 |
+
|
| 17 |
+
Implements application default credentials and project ID detection.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
import io
|
| 21 |
+
import json
|
| 22 |
+
import logging
|
| 23 |
+
import os
|
| 24 |
+
import warnings
|
| 25 |
+
|
| 26 |
+
from google.auth import environment_vars
|
| 27 |
+
from google.auth import exceptions
|
| 28 |
+
import google.auth.transport._http_client
|
| 29 |
+
|
| 30 |
+
_LOGGER = logging.getLogger(__name__)
|
| 31 |
+
|
| 32 |
+
# Valid types accepted for file-based credentials.
|
| 33 |
+
_AUTHORIZED_USER_TYPE = "authorized_user"
|
| 34 |
+
_SERVICE_ACCOUNT_TYPE = "service_account"
|
| 35 |
+
_EXTERNAL_ACCOUNT_TYPE = "external_account"
|
| 36 |
+
_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = "external_account_authorized_user"
|
| 37 |
+
_IMPERSONATED_SERVICE_ACCOUNT_TYPE = "impersonated_service_account"
|
| 38 |
+
_GDCH_SERVICE_ACCOUNT_TYPE = "gdch_service_account"
|
| 39 |
+
_VALID_TYPES = (
|
| 40 |
+
_AUTHORIZED_USER_TYPE,
|
| 41 |
+
_SERVICE_ACCOUNT_TYPE,
|
| 42 |
+
_EXTERNAL_ACCOUNT_TYPE,
|
| 43 |
+
_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE,
|
| 44 |
+
_IMPERSONATED_SERVICE_ACCOUNT_TYPE,
|
| 45 |
+
_GDCH_SERVICE_ACCOUNT_TYPE,
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
# Help message when no credentials can be found.
|
| 49 |
+
_CLOUD_SDK_MISSING_CREDENTIALS = """\
|
| 50 |
+
Your default credentials were not found. To set up Application Default Credentials, \
|
| 51 |
+
see https://cloud.google.com/docs/authentication/external/set-up-adc for more information.\
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
# Warning when using Cloud SDK user credentials
|
| 55 |
+
_CLOUD_SDK_CREDENTIALS_WARNING = """\
|
| 56 |
+
Your application has authenticated using end user credentials from Google \
|
| 57 |
+
Cloud SDK without a quota project. You might receive a "quota exceeded" \
|
| 58 |
+
or "API not enabled" error. See the following page for troubleshooting: \
|
| 59 |
+
https://cloud.google.com/docs/authentication/adc-troubleshooting/user-creds. \
|
| 60 |
+
"""
|
| 61 |
+
|
| 62 |
+
# The subject token type used for AWS external_account credentials.
|
| 63 |
+
_AWS_SUBJECT_TOKEN_TYPE = "urn:ietf:params:aws:token-type:aws4_request"
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def _warn_about_problematic_credentials(credentials):
|
| 67 |
+
"""Determines if the credentials are problematic.
|
| 68 |
+
|
| 69 |
+
Credentials from the Cloud SDK that are associated with Cloud SDK's project
|
| 70 |
+
are problematic because they may not have APIs enabled and have limited
|
| 71 |
+
quota. If this is the case, warn about it.
|
| 72 |
+
"""
|
| 73 |
+
from google.auth import _cloud_sdk
|
| 74 |
+
|
| 75 |
+
if credentials.client_id == _cloud_sdk.CLOUD_SDK_CLIENT_ID:
|
| 76 |
+
warnings.warn(_CLOUD_SDK_CREDENTIALS_WARNING)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def load_credentials_from_file(
|
| 80 |
+
filename, scopes=None, default_scopes=None, quota_project_id=None, request=None
|
| 81 |
+
):
|
| 82 |
+
"""Loads Google credentials from a file.
|
| 83 |
+
|
| 84 |
+
The credentials file must be a service account key, stored authorized
|
| 85 |
+
user credentials, external account credentials, or impersonated service
|
| 86 |
+
account credentials.
|
| 87 |
+
|
| 88 |
+
.. warning::
|
| 89 |
+
Important: If you accept a credential configuration (credential JSON/File/Stream)
|
| 90 |
+
from an external source for authentication to Google Cloud Platform, you must
|
| 91 |
+
validate it before providing it to any Google API or client library. Providing an
|
| 92 |
+
unvalidated credential configuration to Google APIs or libraries can compromise
|
| 93 |
+
the security of your systems and data. For more information, refer to
|
| 94 |
+
`Validate credential configurations from external sources`_.
|
| 95 |
+
|
| 96 |
+
.. _Validate credential configurations from external sources:
|
| 97 |
+
https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
|
| 98 |
+
|
| 99 |
+
Args:
|
| 100 |
+
filename (str): The full path to the credentials file.
|
| 101 |
+
scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
|
| 102 |
+
specified, the credentials will automatically be scoped if
|
| 103 |
+
necessary
|
| 104 |
+
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
|
| 105 |
+
Google client library. Use 'scopes' for user-defined scopes.
|
| 106 |
+
quota_project_id (Optional[str]): The project ID used for
|
| 107 |
+
quota and billing.
|
| 108 |
+
request (Optional[google.auth.transport.Request]): An object used to make
|
| 109 |
+
HTTP requests. This is used to determine the associated project ID
|
| 110 |
+
for a workload identity pool resource (external account credentials).
|
| 111 |
+
If not specified, then it will use a
|
| 112 |
+
google.auth.transport.requests.Request client to make requests.
|
| 113 |
+
|
| 114 |
+
Returns:
|
| 115 |
+
Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
|
| 116 |
+
credentials and the project ID. Authorized user credentials do not
|
| 117 |
+
have the project ID information. External account credentials project
|
| 118 |
+
IDs may not always be determined.
|
| 119 |
+
|
| 120 |
+
Raises:
|
| 121 |
+
google.auth.exceptions.DefaultCredentialsError: if the file is in the
|
| 122 |
+
wrong format or is missing.
|
| 123 |
+
"""
|
| 124 |
+
if not os.path.exists(filename):
|
| 125 |
+
raise exceptions.DefaultCredentialsError(
|
| 126 |
+
"File {} was not found.".format(filename)
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
with io.open(filename, "r") as file_obj:
|
| 130 |
+
try:
|
| 131 |
+
info = json.load(file_obj)
|
| 132 |
+
except ValueError as caught_exc:
|
| 133 |
+
new_exc = exceptions.DefaultCredentialsError(
|
| 134 |
+
"File {} is not a valid json file.".format(filename), caught_exc
|
| 135 |
+
)
|
| 136 |
+
raise new_exc from caught_exc
|
| 137 |
+
return _load_credentials_from_info(
|
| 138 |
+
filename, info, scopes, default_scopes, quota_project_id, request
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def load_credentials_from_dict(
|
| 143 |
+
info, scopes=None, default_scopes=None, quota_project_id=None, request=None
|
| 144 |
+
):
|
| 145 |
+
"""Loads Google credentials from a dict.
|
| 146 |
+
|
| 147 |
+
The credentials file must be a service account key, stored authorized
|
| 148 |
+
user credentials, external account credentials, or impersonated service
|
| 149 |
+
account credentials.
|
| 150 |
+
|
| 151 |
+
.. warning::
|
| 152 |
+
Important: If you accept a credential configuration (credential JSON/File/Stream)
|
| 153 |
+
from an external source for authentication to Google Cloud Platform, you must
|
| 154 |
+
validate it before providing it to any Google API or client library. Providing an
|
| 155 |
+
unvalidated credential configuration to Google APIs or libraries can compromise
|
| 156 |
+
the security of your systems and data. For more information, refer to
|
| 157 |
+
`Validate credential configurations from external sources`_.
|
| 158 |
+
|
| 159 |
+
.. _Validate credential configurations from external sources:
|
| 160 |
+
https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
|
| 161 |
+
|
| 162 |
+
Args:
|
| 163 |
+
info (Dict[str, Any]): A dict object containing the credentials
|
| 164 |
+
scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
|
| 165 |
+
specified, the credentials will automatically be scoped if
|
| 166 |
+
necessary
|
| 167 |
+
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
|
| 168 |
+
Google client library. Use 'scopes' for user-defined scopes.
|
| 169 |
+
quota_project_id (Optional[str]): The project ID used for
|
| 170 |
+
quota and billing.
|
| 171 |
+
request (Optional[google.auth.transport.Request]): An object used to make
|
| 172 |
+
HTTP requests. This is used to determine the associated project ID
|
| 173 |
+
for a workload identity pool resource (external account credentials).
|
| 174 |
+
If not specified, then it will use a
|
| 175 |
+
google.auth.transport.requests.Request client to make requests.
|
| 176 |
+
|
| 177 |
+
Returns:
|
| 178 |
+
Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
|
| 179 |
+
credentials and the project ID. Authorized user credentials do not
|
| 180 |
+
have the project ID information. External account credentials project
|
| 181 |
+
IDs may not always be determined.
|
| 182 |
+
|
| 183 |
+
Raises:
|
| 184 |
+
google.auth.exceptions.DefaultCredentialsError: if the file is in the
|
| 185 |
+
wrong format or is missing.
|
| 186 |
+
"""
|
| 187 |
+
if not isinstance(info, dict):
|
| 188 |
+
raise exceptions.DefaultCredentialsError(
|
| 189 |
+
"info object was of type {} but dict type was expected.".format(type(info))
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
return _load_credentials_from_info(
|
| 193 |
+
"dict object", info, scopes, default_scopes, quota_project_id, request
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def _load_credentials_from_info(
|
| 198 |
+
filename, info, scopes, default_scopes, quota_project_id, request
|
| 199 |
+
):
|
| 200 |
+
from google.auth.credentials import CredentialsWithQuotaProject
|
| 201 |
+
|
| 202 |
+
credential_type = info.get("type")
|
| 203 |
+
|
| 204 |
+
if credential_type == _AUTHORIZED_USER_TYPE:
|
| 205 |
+
credentials, project_id = _get_authorized_user_credentials(
|
| 206 |
+
filename, info, scopes
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
elif credential_type == _SERVICE_ACCOUNT_TYPE:
|
| 210 |
+
credentials, project_id = _get_service_account_credentials(
|
| 211 |
+
filename, info, scopes, default_scopes
|
| 212 |
+
)
|
| 213 |
+
|
| 214 |
+
elif credential_type == _EXTERNAL_ACCOUNT_TYPE:
|
| 215 |
+
credentials, project_id = _get_external_account_credentials(
|
| 216 |
+
info,
|
| 217 |
+
filename,
|
| 218 |
+
scopes=scopes,
|
| 219 |
+
default_scopes=default_scopes,
|
| 220 |
+
request=request,
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
elif credential_type == _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE:
|
| 224 |
+
credentials, project_id = _get_external_account_authorized_user_credentials(
|
| 225 |
+
filename, info, request
|
| 226 |
+
)
|
| 227 |
+
|
| 228 |
+
elif credential_type == _IMPERSONATED_SERVICE_ACCOUNT_TYPE:
|
| 229 |
+
credentials, project_id = _get_impersonated_service_account_credentials(
|
| 230 |
+
filename, info, scopes
|
| 231 |
+
)
|
| 232 |
+
elif credential_type == _GDCH_SERVICE_ACCOUNT_TYPE:
|
| 233 |
+
credentials, project_id = _get_gdch_service_account_credentials(filename, info)
|
| 234 |
+
else:
|
| 235 |
+
raise exceptions.DefaultCredentialsError(
|
| 236 |
+
"The file {file} does not have a valid type. "
|
| 237 |
+
"Type is {type}, expected one of {valid_types}.".format(
|
| 238 |
+
file=filename, type=credential_type, valid_types=_VALID_TYPES
|
| 239 |
+
)
|
| 240 |
+
)
|
| 241 |
+
if isinstance(credentials, CredentialsWithQuotaProject):
|
| 242 |
+
credentials = _apply_quota_project_id(credentials, quota_project_id)
|
| 243 |
+
return credentials, project_id
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def _get_gcloud_sdk_credentials(quota_project_id=None):
|
| 247 |
+
"""Gets the credentials and project ID from the Cloud SDK."""
|
| 248 |
+
from google.auth import _cloud_sdk
|
| 249 |
+
|
| 250 |
+
_LOGGER.debug("Checking Cloud SDK credentials as part of auth process...")
|
| 251 |
+
|
| 252 |
+
# Check if application default credentials exist.
|
| 253 |
+
credentials_filename = _cloud_sdk.get_application_default_credentials_path()
|
| 254 |
+
|
| 255 |
+
if not os.path.isfile(credentials_filename):
|
| 256 |
+
_LOGGER.debug("Cloud SDK credentials not found on disk; not using them")
|
| 257 |
+
return None, None
|
| 258 |
+
|
| 259 |
+
credentials, project_id = load_credentials_from_file(
|
| 260 |
+
credentials_filename, quota_project_id=quota_project_id
|
| 261 |
+
)
|
| 262 |
+
credentials._cred_file_path = credentials_filename
|
| 263 |
+
|
| 264 |
+
if not project_id:
|
| 265 |
+
project_id = _cloud_sdk.get_project_id()
|
| 266 |
+
|
| 267 |
+
return credentials, project_id
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def _get_explicit_environ_credentials(quota_project_id=None):
|
| 271 |
+
"""Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
|
| 272 |
+
variable."""
|
| 273 |
+
from google.auth import _cloud_sdk
|
| 274 |
+
|
| 275 |
+
cloud_sdk_adc_path = _cloud_sdk.get_application_default_credentials_path()
|
| 276 |
+
explicit_file = os.environ.get(environment_vars.CREDENTIALS)
|
| 277 |
+
|
| 278 |
+
_LOGGER.debug(
|
| 279 |
+
"Checking %s for explicit credentials as part of auth process...", explicit_file
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
if explicit_file is not None and explicit_file == cloud_sdk_adc_path:
|
| 283 |
+
# Cloud sdk flow calls gcloud to fetch project id, so if the explicit
|
| 284 |
+
# file path is cloud sdk credentials path, then we should fall back
|
| 285 |
+
# to cloud sdk flow, otherwise project id cannot be obtained.
|
| 286 |
+
_LOGGER.debug(
|
| 287 |
+
"Explicit credentials path %s is the same as Cloud SDK credentials path, fall back to Cloud SDK credentials flow...",
|
| 288 |
+
explicit_file,
|
| 289 |
+
)
|
| 290 |
+
return _get_gcloud_sdk_credentials(quota_project_id=quota_project_id)
|
| 291 |
+
|
| 292 |
+
if explicit_file is not None:
|
| 293 |
+
credentials, project_id = load_credentials_from_file(
|
| 294 |
+
os.environ[environment_vars.CREDENTIALS], quota_project_id=quota_project_id
|
| 295 |
+
)
|
| 296 |
+
credentials._cred_file_path = f"{explicit_file} file via the GOOGLE_APPLICATION_CREDENTIALS environment variable"
|
| 297 |
+
|
| 298 |
+
return credentials, project_id
|
| 299 |
+
|
| 300 |
+
else:
|
| 301 |
+
return None, None
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def _get_gae_credentials():
|
| 305 |
+
"""Gets Google App Engine App Identity credentials and project ID."""
|
| 306 |
+
# If not GAE gen1, prefer the metadata service even if the GAE APIs are
|
| 307 |
+
# available as per https://google.aip.dev/auth/4115.
|
| 308 |
+
if os.environ.get(environment_vars.LEGACY_APPENGINE_RUNTIME) != "python27":
|
| 309 |
+
return None, None
|
| 310 |
+
|
| 311 |
+
# While this library is normally bundled with app_engine, there are
|
| 312 |
+
# some cases where it's not available, so we tolerate ImportError.
|
| 313 |
+
try:
|
| 314 |
+
_LOGGER.debug("Checking for App Engine runtime as part of auth process...")
|
| 315 |
+
import google.auth.app_engine as app_engine
|
| 316 |
+
except ImportError:
|
| 317 |
+
_LOGGER.warning("Import of App Engine auth library failed.")
|
| 318 |
+
return None, None
|
| 319 |
+
|
| 320 |
+
try:
|
| 321 |
+
credentials = app_engine.Credentials()
|
| 322 |
+
project_id = app_engine.get_project_id()
|
| 323 |
+
return credentials, project_id
|
| 324 |
+
except EnvironmentError:
|
| 325 |
+
_LOGGER.debug(
|
| 326 |
+
"No App Engine library was found so cannot authentication via App Engine Identity Credentials."
|
| 327 |
+
)
|
| 328 |
+
return None, None
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
def _get_gce_credentials(request=None, quota_project_id=None):
|
| 332 |
+
"""Gets credentials and project ID from the GCE Metadata Service."""
|
| 333 |
+
# Ping requires a transport, but we want application default credentials
|
| 334 |
+
# to require no arguments. So, we'll use the _http_client transport which
|
| 335 |
+
# uses http.client. This is only acceptable because the metadata server
|
| 336 |
+
# doesn't do SSL and never requires proxies.
|
| 337 |
+
|
| 338 |
+
# While this library is normally bundled with compute_engine, there are
|
| 339 |
+
# some cases where it's not available, so we tolerate ImportError.
|
| 340 |
+
try:
|
| 341 |
+
from google.auth import compute_engine
|
| 342 |
+
from google.auth.compute_engine import _metadata
|
| 343 |
+
except ImportError:
|
| 344 |
+
_LOGGER.warning("Import of Compute Engine auth library failed.")
|
| 345 |
+
return None, None
|
| 346 |
+
|
| 347 |
+
if request is None:
|
| 348 |
+
request = google.auth.transport._http_client.Request()
|
| 349 |
+
|
| 350 |
+
if _metadata.is_on_gce(request=request):
|
| 351 |
+
# Get the project ID.
|
| 352 |
+
try:
|
| 353 |
+
project_id = _metadata.get_project_id(request=request)
|
| 354 |
+
except exceptions.TransportError:
|
| 355 |
+
project_id = None
|
| 356 |
+
|
| 357 |
+
cred = compute_engine.Credentials()
|
| 358 |
+
cred = _apply_quota_project_id(cred, quota_project_id)
|
| 359 |
+
|
| 360 |
+
return cred, project_id
|
| 361 |
+
else:
|
| 362 |
+
_LOGGER.warning(
|
| 363 |
+
"Authentication failed using Compute Engine authentication due to unavailable metadata server."
|
| 364 |
+
)
|
| 365 |
+
return None, None
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def _get_external_account_credentials(
|
| 369 |
+
info, filename, scopes=None, default_scopes=None, request=None
|
| 370 |
+
):
|
| 371 |
+
"""Loads external account Credentials from the parsed external account info.
|
| 372 |
+
|
| 373 |
+
The credentials information must correspond to a supported external account
|
| 374 |
+
credentials.
|
| 375 |
+
|
| 376 |
+
Args:
|
| 377 |
+
info (Mapping[str, str]): The external account info in Google format.
|
| 378 |
+
filename (str): The full path to the credentials file.
|
| 379 |
+
scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
|
| 380 |
+
specified, the credentials will automatically be scoped if
|
| 381 |
+
necessary.
|
| 382 |
+
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
|
| 383 |
+
Google client library. Use 'scopes' for user-defined scopes.
|
| 384 |
+
request (Optional[google.auth.transport.Request]): An object used to make
|
| 385 |
+
HTTP requests. This is used to determine the associated project ID
|
| 386 |
+
for a workload identity pool resource (external account credentials).
|
| 387 |
+
If not specified, then it will use a
|
| 388 |
+
google.auth.transport.requests.Request client to make requests.
|
| 389 |
+
|
| 390 |
+
Returns:
|
| 391 |
+
Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
|
| 392 |
+
credentials and the project ID. External account credentials project
|
| 393 |
+
IDs may not always be determined.
|
| 394 |
+
|
| 395 |
+
Raises:
|
| 396 |
+
google.auth.exceptions.DefaultCredentialsError: if the info dictionary
|
| 397 |
+
is in the wrong format or is missing required information.
|
| 398 |
+
"""
|
| 399 |
+
# There are currently 3 types of external_account credentials.
|
| 400 |
+
if info.get("subject_token_type") == _AWS_SUBJECT_TOKEN_TYPE:
|
| 401 |
+
# Check if configuration corresponds to an AWS credentials.
|
| 402 |
+
from google.auth import aws
|
| 403 |
+
|
| 404 |
+
credentials = aws.Credentials.from_info(
|
| 405 |
+
info, scopes=scopes, default_scopes=default_scopes
|
| 406 |
+
)
|
| 407 |
+
elif (
|
| 408 |
+
info.get("credential_source") is not None
|
| 409 |
+
and info.get("credential_source").get("executable") is not None
|
| 410 |
+
):
|
| 411 |
+
from google.auth import pluggable
|
| 412 |
+
|
| 413 |
+
credentials = pluggable.Credentials.from_info(
|
| 414 |
+
info, scopes=scopes, default_scopes=default_scopes
|
| 415 |
+
)
|
| 416 |
+
else:
|
| 417 |
+
try:
|
| 418 |
+
# Check if configuration corresponds to an Identity Pool credentials.
|
| 419 |
+
from google.auth import identity_pool
|
| 420 |
+
|
| 421 |
+
credentials = identity_pool.Credentials.from_info(
|
| 422 |
+
info, scopes=scopes, default_scopes=default_scopes
|
| 423 |
+
)
|
| 424 |
+
except ValueError:
|
| 425 |
+
# If the configuration is invalid or does not correspond to any
|
| 426 |
+
# supported external_account credentials, raise an error.
|
| 427 |
+
raise exceptions.DefaultCredentialsError(
|
| 428 |
+
"Failed to load external account credentials from {}".format(filename)
|
| 429 |
+
)
|
| 430 |
+
if request is None:
|
| 431 |
+
import google.auth.transport.requests
|
| 432 |
+
|
| 433 |
+
request = google.auth.transport.requests.Request()
|
| 434 |
+
|
| 435 |
+
return credentials, credentials.get_project_id(request=request)
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
def _get_external_account_authorized_user_credentials(
|
| 439 |
+
filename, info, scopes=None, default_scopes=None, request=None
|
| 440 |
+
):
|
| 441 |
+
try:
|
| 442 |
+
from google.auth import external_account_authorized_user
|
| 443 |
+
|
| 444 |
+
credentials = external_account_authorized_user.Credentials.from_info(info)
|
| 445 |
+
except ValueError:
|
| 446 |
+
raise exceptions.DefaultCredentialsError(
|
| 447 |
+
"Failed to load external account authorized user credentials from {}".format(
|
| 448 |
+
filename
|
| 449 |
+
)
|
| 450 |
+
)
|
| 451 |
+
|
| 452 |
+
return credentials, None
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
def _get_authorized_user_credentials(filename, info, scopes=None):
|
| 456 |
+
from google.oauth2 import credentials
|
| 457 |
+
|
| 458 |
+
try:
|
| 459 |
+
credentials = credentials.Credentials.from_authorized_user_info(
|
| 460 |
+
info, scopes=scopes
|
| 461 |
+
)
|
| 462 |
+
except ValueError as caught_exc:
|
| 463 |
+
msg = "Failed to load authorized user credentials from {}".format(filename)
|
| 464 |
+
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
|
| 465 |
+
raise new_exc from caught_exc
|
| 466 |
+
return credentials, None
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def _get_service_account_credentials(filename, info, scopes=None, default_scopes=None):
|
| 470 |
+
from google.oauth2 import service_account
|
| 471 |
+
|
| 472 |
+
try:
|
| 473 |
+
credentials = service_account.Credentials.from_service_account_info(
|
| 474 |
+
info, scopes=scopes, default_scopes=default_scopes
|
| 475 |
+
)
|
| 476 |
+
except ValueError as caught_exc:
|
| 477 |
+
msg = "Failed to load service account credentials from {}".format(filename)
|
| 478 |
+
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
|
| 479 |
+
raise new_exc from caught_exc
|
| 480 |
+
return credentials, info.get("project_id")
|
| 481 |
+
|
| 482 |
+
|
| 483 |
+
def _get_impersonated_service_account_credentials(filename, info, scopes):
|
| 484 |
+
from google.auth import impersonated_credentials
|
| 485 |
+
|
| 486 |
+
try:
|
| 487 |
+
source_credentials_info = info.get("source_credentials")
|
| 488 |
+
source_credentials_type = source_credentials_info.get("type")
|
| 489 |
+
if source_credentials_type == _AUTHORIZED_USER_TYPE:
|
| 490 |
+
source_credentials, _ = _get_authorized_user_credentials(
|
| 491 |
+
filename, source_credentials_info
|
| 492 |
+
)
|
| 493 |
+
elif source_credentials_type == _SERVICE_ACCOUNT_TYPE:
|
| 494 |
+
source_credentials, _ = _get_service_account_credentials(
|
| 495 |
+
filename, source_credentials_info
|
| 496 |
+
)
|
| 497 |
+
elif source_credentials_type == _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE:
|
| 498 |
+
source_credentials, _ = _get_external_account_authorized_user_credentials(
|
| 499 |
+
filename, source_credentials_info
|
| 500 |
+
)
|
| 501 |
+
else:
|
| 502 |
+
raise exceptions.InvalidType(
|
| 503 |
+
"source credential of type {} is not supported.".format(
|
| 504 |
+
source_credentials_type
|
| 505 |
+
)
|
| 506 |
+
)
|
| 507 |
+
impersonation_url = info.get("service_account_impersonation_url")
|
| 508 |
+
start_index = impersonation_url.rfind("/")
|
| 509 |
+
end_index = impersonation_url.find(":generateAccessToken")
|
| 510 |
+
if start_index == -1 or end_index == -1 or start_index > end_index:
|
| 511 |
+
raise exceptions.InvalidValue(
|
| 512 |
+
"Cannot extract target principal from {}".format(impersonation_url)
|
| 513 |
+
)
|
| 514 |
+
target_principal = impersonation_url[start_index + 1 : end_index]
|
| 515 |
+
delegates = info.get("delegates")
|
| 516 |
+
quota_project_id = info.get("quota_project_id")
|
| 517 |
+
credentials = impersonated_credentials.Credentials(
|
| 518 |
+
source_credentials,
|
| 519 |
+
target_principal,
|
| 520 |
+
scopes,
|
| 521 |
+
delegates,
|
| 522 |
+
quota_project_id=quota_project_id,
|
| 523 |
+
)
|
| 524 |
+
except ValueError as caught_exc:
|
| 525 |
+
msg = "Failed to load impersonated service account credentials from {}".format(
|
| 526 |
+
filename
|
| 527 |
+
)
|
| 528 |
+
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
|
| 529 |
+
raise new_exc from caught_exc
|
| 530 |
+
return credentials, None
|
| 531 |
+
|
| 532 |
+
|
| 533 |
+
def _get_gdch_service_account_credentials(filename, info):
|
| 534 |
+
from google.oauth2 import gdch_credentials
|
| 535 |
+
|
| 536 |
+
try:
|
| 537 |
+
credentials = gdch_credentials.ServiceAccountCredentials.from_service_account_info(
|
| 538 |
+
info
|
| 539 |
+
)
|
| 540 |
+
except ValueError as caught_exc:
|
| 541 |
+
msg = "Failed to load GDCH service account credentials from {}".format(filename)
|
| 542 |
+
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
|
| 543 |
+
raise new_exc from caught_exc
|
| 544 |
+
return credentials, info.get("project")
|
| 545 |
+
|
| 546 |
+
|
| 547 |
+
def get_api_key_credentials(key):
|
| 548 |
+
"""Return credentials with the given API key."""
|
| 549 |
+
from google.auth import api_key
|
| 550 |
+
|
| 551 |
+
return api_key.Credentials(key)
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
def _apply_quota_project_id(credentials, quota_project_id):
|
| 555 |
+
if quota_project_id:
|
| 556 |
+
credentials = credentials.with_quota_project(quota_project_id)
|
| 557 |
+
else:
|
| 558 |
+
credentials = credentials.with_quota_project_from_environment()
|
| 559 |
+
|
| 560 |
+
from google.oauth2 import credentials as authorized_user_credentials
|
| 561 |
+
|
| 562 |
+
if isinstance(credentials, authorized_user_credentials.Credentials) and (
|
| 563 |
+
not credentials.quota_project_id
|
| 564 |
+
):
|
| 565 |
+
_warn_about_problematic_credentials(credentials)
|
| 566 |
+
return credentials
|
| 567 |
+
|
| 568 |
+
|
| 569 |
+
def default(scopes=None, request=None, quota_project_id=None, default_scopes=None):
|
| 570 |
+
"""Gets the default credentials for the current environment.
|
| 571 |
+
|
| 572 |
+
`Application Default Credentials`_ provides an easy way to obtain
|
| 573 |
+
credentials to call Google APIs for server-to-server or local applications.
|
| 574 |
+
This function acquires credentials from the environment in the following
|
| 575 |
+
order:
|
| 576 |
+
|
| 577 |
+
1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
|
| 578 |
+
to the path of a valid service account JSON private key file, then it is
|
| 579 |
+
loaded and returned. The project ID returned is the project ID defined
|
| 580 |
+
in the service account file if available (some older files do not
|
| 581 |
+
contain project ID information).
|
| 582 |
+
|
| 583 |
+
If the environment variable is set to the path of a valid external
|
| 584 |
+
account JSON configuration file (workload identity federation), then the
|
| 585 |
+
configuration file is used to determine and retrieve the external
|
| 586 |
+
credentials from the current environment (AWS, Azure, etc).
|
| 587 |
+
These will then be exchanged for Google access tokens via the Google STS
|
| 588 |
+
endpoint.
|
| 589 |
+
The project ID returned in this case is the one corresponding to the
|
| 590 |
+
underlying workload identity pool resource if determinable.
|
| 591 |
+
|
| 592 |
+
If the environment variable is set to the path of a valid GDCH service
|
| 593 |
+
account JSON file (`Google Distributed Cloud Hosted`_), then a GDCH
|
| 594 |
+
credential will be returned. The project ID returned is the project
|
| 595 |
+
specified in the JSON file.
|
| 596 |
+
2. If the `Google Cloud SDK`_ is installed and has application default
|
| 597 |
+
credentials set they are loaded and returned.
|
| 598 |
+
|
| 599 |
+
To enable application default credentials with the Cloud SDK run::
|
| 600 |
+
|
| 601 |
+
gcloud auth application-default login
|
| 602 |
+
|
| 603 |
+
If the Cloud SDK has an active project, the project ID is returned. The
|
| 604 |
+
active project can be set using::
|
| 605 |
+
|
| 606 |
+
gcloud config set project
|
| 607 |
+
|
| 608 |
+
3. If the application is running in the `App Engine standard environment`_
|
| 609 |
+
(first generation) then the credentials and project ID from the
|
| 610 |
+
`App Identity Service`_ are used.
|
| 611 |
+
4. If the application is running in `Compute Engine`_ or `Cloud Run`_ or
|
| 612 |
+
the `App Engine flexible environment`_ or the `App Engine standard
|
| 613 |
+
environment`_ (second generation) then the credentials and project ID
|
| 614 |
+
are obtained from the `Metadata Service`_.
|
| 615 |
+
5. If no credentials are found,
|
| 616 |
+
:class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
|
| 617 |
+
|
| 618 |
+
.. _Application Default Credentials: https://developers.google.com\
|
| 619 |
+
/identity/protocols/application-default-credentials
|
| 620 |
+
.. _Google Cloud SDK: https://cloud.google.com/sdk
|
| 621 |
+
.. _App Engine standard environment: https://cloud.google.com/appengine
|
| 622 |
+
.. _App Identity Service: https://cloud.google.com/appengine/docs/python\
|
| 623 |
+
/appidentity/
|
| 624 |
+
.. _Compute Engine: https://cloud.google.com/compute
|
| 625 |
+
.. _App Engine flexible environment: https://cloud.google.com\
|
| 626 |
+
/appengine/flexible
|
| 627 |
+
.. _Metadata Service: https://cloud.google.com/compute/docs\
|
| 628 |
+
/storing-retrieving-metadata
|
| 629 |
+
.. _Cloud Run: https://cloud.google.com/run
|
| 630 |
+
.. _Google Distributed Cloud Hosted: https://cloud.google.com/blog/topics\
|
| 631 |
+
/hybrid-cloud/announcing-google-distributed-cloud-edge-and-hosted
|
| 632 |
+
|
| 633 |
+
Example::
|
| 634 |
+
|
| 635 |
+
import google.auth
|
| 636 |
+
|
| 637 |
+
credentials, project_id = google.auth.default()
|
| 638 |
+
|
| 639 |
+
Args:
|
| 640 |
+
scopes (Sequence[str]): The list of scopes for the credentials. If
|
| 641 |
+
specified, the credentials will automatically be scoped if
|
| 642 |
+
necessary.
|
| 643 |
+
request (Optional[google.auth.transport.Request]): An object used to make
|
| 644 |
+
HTTP requests. This is used to either detect whether the application
|
| 645 |
+
is running on Compute Engine or to determine the associated project
|
| 646 |
+
ID for a workload identity pool resource (external account
|
| 647 |
+
credentials). If not specified, then it will either use the standard
|
| 648 |
+
library http client to make requests for Compute Engine credentials
|
| 649 |
+
or a google.auth.transport.requests.Request client for external
|
| 650 |
+
account credentials.
|
| 651 |
+
quota_project_id (Optional[str]): The project ID used for
|
| 652 |
+
quota and billing.
|
| 653 |
+
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
|
| 654 |
+
Google client library. Use 'scopes' for user-defined scopes.
|
| 655 |
+
Returns:
|
| 656 |
+
Tuple[~google.auth.credentials.Credentials, Optional[str]]:
|
| 657 |
+
the current environment's credentials and project ID. Project ID
|
| 658 |
+
may be None, which indicates that the Project ID could not be
|
| 659 |
+
ascertained from the environment.
|
| 660 |
+
|
| 661 |
+
Raises:
|
| 662 |
+
~google.auth.exceptions.DefaultCredentialsError:
|
| 663 |
+
If no credentials were found, or if the credentials found were
|
| 664 |
+
invalid.
|
| 665 |
+
"""
|
| 666 |
+
from google.auth.credentials import with_scopes_if_required
|
| 667 |
+
from google.auth.credentials import CredentialsWithQuotaProject
|
| 668 |
+
|
| 669 |
+
explicit_project_id = os.environ.get(
|
| 670 |
+
environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
|
| 671 |
+
)
|
| 672 |
+
|
| 673 |
+
checkers = (
|
| 674 |
+
# Avoid passing scopes here to prevent passing scopes to user credentials.
|
| 675 |
+
# with_scopes_if_required() below will ensure scopes/default scopes are
|
| 676 |
+
# safely set on the returned credentials since requires_scopes will
|
| 677 |
+
# guard against setting scopes on user credentials.
|
| 678 |
+
lambda: _get_explicit_environ_credentials(quota_project_id=quota_project_id),
|
| 679 |
+
lambda: _get_gcloud_sdk_credentials(quota_project_id=quota_project_id),
|
| 680 |
+
_get_gae_credentials,
|
| 681 |
+
lambda: _get_gce_credentials(request, quota_project_id=quota_project_id),
|
| 682 |
+
)
|
| 683 |
+
|
| 684 |
+
for checker in checkers:
|
| 685 |
+
credentials, project_id = checker()
|
| 686 |
+
if credentials is not None:
|
| 687 |
+
credentials = with_scopes_if_required(
|
| 688 |
+
credentials, scopes, default_scopes=default_scopes
|
| 689 |
+
)
|
| 690 |
+
|
| 691 |
+
effective_project_id = explicit_project_id or project_id
|
| 692 |
+
|
| 693 |
+
# For external account credentials, scopes are required to determine
|
| 694 |
+
# the project ID. Try to get the project ID again if not yet
|
| 695 |
+
# determined.
|
| 696 |
+
if not effective_project_id and callable(
|
| 697 |
+
getattr(credentials, "get_project_id", None)
|
| 698 |
+
):
|
| 699 |
+
if request is None:
|
| 700 |
+
import google.auth.transport.requests
|
| 701 |
+
|
| 702 |
+
request = google.auth.transport.requests.Request()
|
| 703 |
+
effective_project_id = credentials.get_project_id(request=request)
|
| 704 |
+
|
| 705 |
+
if quota_project_id and isinstance(
|
| 706 |
+
credentials, CredentialsWithQuotaProject
|
| 707 |
+
):
|
| 708 |
+
credentials = credentials.with_quota_project(quota_project_id)
|
| 709 |
+
|
| 710 |
+
if not effective_project_id:
|
| 711 |
+
_LOGGER.warning(
|
| 712 |
+
"No project ID could be determined. Consider running "
|
| 713 |
+
"`gcloud config set project` or setting the %s "
|
| 714 |
+
"environment variable",
|
| 715 |
+
environment_vars.PROJECT,
|
| 716 |
+
)
|
| 717 |
+
return credentials, effective_project_id
|
| 718 |
+
|
| 719 |
+
raise exceptions.DefaultCredentialsError(_CLOUD_SDK_MISSING_CREDENTIALS)
|
lib/python3.10/site-packages/google/auth/_default_async.py
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""Application default credentials.
|
| 16 |
+
|
| 17 |
+
Implements application default credentials and project ID detection.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
import io
|
| 21 |
+
import json
|
| 22 |
+
import os
|
| 23 |
+
|
| 24 |
+
from google.auth import _default
|
| 25 |
+
from google.auth import environment_vars
|
| 26 |
+
from google.auth import exceptions
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def load_credentials_from_file(filename, scopes=None, quota_project_id=None):
|
| 30 |
+
"""Loads Google credentials from a file.
|
| 31 |
+
|
| 32 |
+
The credentials file must be a service account key or stored authorized
|
| 33 |
+
user credentials.
|
| 34 |
+
|
| 35 |
+
Args:
|
| 36 |
+
filename (str): The full path to the credentials file.
|
| 37 |
+
scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
|
| 38 |
+
specified, the credentials will automatically be scoped if
|
| 39 |
+
necessary
|
| 40 |
+
quota_project_id (Optional[str]): The project ID used for
|
| 41 |
+
quota and billing.
|
| 42 |
+
|
| 43 |
+
Returns:
|
| 44 |
+
Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
|
| 45 |
+
credentials and the project ID. Authorized user credentials do not
|
| 46 |
+
have the project ID information.
|
| 47 |
+
|
| 48 |
+
Raises:
|
| 49 |
+
google.auth.exceptions.DefaultCredentialsError: if the file is in the
|
| 50 |
+
wrong format or is missing.
|
| 51 |
+
"""
|
| 52 |
+
if not os.path.exists(filename):
|
| 53 |
+
raise exceptions.DefaultCredentialsError(
|
| 54 |
+
"File {} was not found.".format(filename)
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
with io.open(filename, "r") as file_obj:
|
| 58 |
+
try:
|
| 59 |
+
info = json.load(file_obj)
|
| 60 |
+
except ValueError as caught_exc:
|
| 61 |
+
new_exc = exceptions.DefaultCredentialsError(
|
| 62 |
+
"File {} is not a valid json file.".format(filename), caught_exc
|
| 63 |
+
)
|
| 64 |
+
raise new_exc from caught_exc
|
| 65 |
+
|
| 66 |
+
# The type key should indicate that the file is either a service account
|
| 67 |
+
# credentials file or an authorized user credentials file.
|
| 68 |
+
credential_type = info.get("type")
|
| 69 |
+
|
| 70 |
+
if credential_type == _default._AUTHORIZED_USER_TYPE:
|
| 71 |
+
from google.oauth2 import _credentials_async as credentials
|
| 72 |
+
|
| 73 |
+
try:
|
| 74 |
+
credentials = credentials.Credentials.from_authorized_user_info(
|
| 75 |
+
info, scopes=scopes
|
| 76 |
+
)
|
| 77 |
+
except ValueError as caught_exc:
|
| 78 |
+
msg = "Failed to load authorized user credentials from {}".format(filename)
|
| 79 |
+
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
|
| 80 |
+
raise new_exc from caught_exc
|
| 81 |
+
if quota_project_id:
|
| 82 |
+
credentials = credentials.with_quota_project(quota_project_id)
|
| 83 |
+
if not credentials.quota_project_id:
|
| 84 |
+
_default._warn_about_problematic_credentials(credentials)
|
| 85 |
+
return credentials, None
|
| 86 |
+
|
| 87 |
+
elif credential_type == _default._SERVICE_ACCOUNT_TYPE:
|
| 88 |
+
from google.oauth2 import _service_account_async as service_account
|
| 89 |
+
|
| 90 |
+
try:
|
| 91 |
+
credentials = service_account.Credentials.from_service_account_info(
|
| 92 |
+
info, scopes=scopes
|
| 93 |
+
).with_quota_project(quota_project_id)
|
| 94 |
+
except ValueError as caught_exc:
|
| 95 |
+
msg = "Failed to load service account credentials from {}".format(filename)
|
| 96 |
+
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
|
| 97 |
+
raise new_exc from caught_exc
|
| 98 |
+
return credentials, info.get("project_id")
|
| 99 |
+
|
| 100 |
+
else:
|
| 101 |
+
raise exceptions.DefaultCredentialsError(
|
| 102 |
+
"The file {file} does not have a valid type. "
|
| 103 |
+
"Type is {type}, expected one of {valid_types}.".format(
|
| 104 |
+
file=filename, type=credential_type, valid_types=_default._VALID_TYPES
|
| 105 |
+
)
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def _get_gcloud_sdk_credentials(quota_project_id=None):
|
| 110 |
+
"""Gets the credentials and project ID from the Cloud SDK."""
|
| 111 |
+
from google.auth import _cloud_sdk
|
| 112 |
+
|
| 113 |
+
# Check if application default credentials exist.
|
| 114 |
+
credentials_filename = _cloud_sdk.get_application_default_credentials_path()
|
| 115 |
+
|
| 116 |
+
if not os.path.isfile(credentials_filename):
|
| 117 |
+
return None, None
|
| 118 |
+
|
| 119 |
+
credentials, project_id = load_credentials_from_file(
|
| 120 |
+
credentials_filename, quota_project_id=quota_project_id
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
if not project_id:
|
| 124 |
+
project_id = _cloud_sdk.get_project_id()
|
| 125 |
+
|
| 126 |
+
return credentials, project_id
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def _get_explicit_environ_credentials(quota_project_id=None):
|
| 130 |
+
"""Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
|
| 131 |
+
variable."""
|
| 132 |
+
from google.auth import _cloud_sdk
|
| 133 |
+
|
| 134 |
+
cloud_sdk_adc_path = _cloud_sdk.get_application_default_credentials_path()
|
| 135 |
+
explicit_file = os.environ.get(environment_vars.CREDENTIALS)
|
| 136 |
+
|
| 137 |
+
if explicit_file is not None and explicit_file == cloud_sdk_adc_path:
|
| 138 |
+
# Cloud sdk flow calls gcloud to fetch project id, so if the explicit
|
| 139 |
+
# file path is cloud sdk credentials path, then we should fall back
|
| 140 |
+
# to cloud sdk flow, otherwise project id cannot be obtained.
|
| 141 |
+
return _get_gcloud_sdk_credentials(quota_project_id=quota_project_id)
|
| 142 |
+
|
| 143 |
+
if explicit_file is not None:
|
| 144 |
+
credentials, project_id = load_credentials_from_file(
|
| 145 |
+
os.environ[environment_vars.CREDENTIALS], quota_project_id=quota_project_id
|
| 146 |
+
)
|
| 147 |
+
|
| 148 |
+
return credentials, project_id
|
| 149 |
+
|
| 150 |
+
else:
|
| 151 |
+
return None, None
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def _get_gae_credentials():
|
| 155 |
+
"""Gets Google App Engine App Identity credentials and project ID."""
|
| 156 |
+
# While this library is normally bundled with app_engine, there are
|
| 157 |
+
# some cases where it's not available, so we tolerate ImportError.
|
| 158 |
+
|
| 159 |
+
return _default._get_gae_credentials()
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def _get_gce_credentials(request=None):
|
| 163 |
+
"""Gets credentials and project ID from the GCE Metadata Service."""
|
| 164 |
+
# Ping requires a transport, but we want application default credentials
|
| 165 |
+
# to require no arguments. So, we'll use the _http_client transport which
|
| 166 |
+
# uses http.client. This is only acceptable because the metadata server
|
| 167 |
+
# doesn't do SSL and never requires proxies.
|
| 168 |
+
|
| 169 |
+
# While this library is normally bundled with compute_engine, there are
|
| 170 |
+
# some cases where it's not available, so we tolerate ImportError.
|
| 171 |
+
|
| 172 |
+
return _default._get_gce_credentials(request)
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def default_async(scopes=None, request=None, quota_project_id=None):
|
| 176 |
+
"""Gets the default credentials for the current environment.
|
| 177 |
+
|
| 178 |
+
`Application Default Credentials`_ provides an easy way to obtain
|
| 179 |
+
credentials to call Google APIs for server-to-server or local applications.
|
| 180 |
+
This function acquires credentials from the environment in the following
|
| 181 |
+
order:
|
| 182 |
+
|
| 183 |
+
1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
|
| 184 |
+
to the path of a valid service account JSON private key file, then it is
|
| 185 |
+
loaded and returned. The project ID returned is the project ID defined
|
| 186 |
+
in the service account file if available (some older files do not
|
| 187 |
+
contain project ID information).
|
| 188 |
+
2. If the `Google Cloud SDK`_ is installed and has application default
|
| 189 |
+
credentials set they are loaded and returned.
|
| 190 |
+
|
| 191 |
+
To enable application default credentials with the Cloud SDK run::
|
| 192 |
+
|
| 193 |
+
gcloud auth application-default login
|
| 194 |
+
|
| 195 |
+
If the Cloud SDK has an active project, the project ID is returned. The
|
| 196 |
+
active project can be set using::
|
| 197 |
+
|
| 198 |
+
gcloud config set project
|
| 199 |
+
|
| 200 |
+
3. If the application is running in the `App Engine standard environment`_
|
| 201 |
+
(first generation) then the credentials and project ID from the
|
| 202 |
+
`App Identity Service`_ are used.
|
| 203 |
+
4. If the application is running in `Compute Engine`_ or `Cloud Run`_ or
|
| 204 |
+
the `App Engine flexible environment`_ or the `App Engine standard
|
| 205 |
+
environment`_ (second generation) then the credentials and project ID
|
| 206 |
+
are obtained from the `Metadata Service`_.
|
| 207 |
+
5. If no credentials are found,
|
| 208 |
+
:class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
|
| 209 |
+
|
| 210 |
+
.. _Application Default Credentials: https://developers.google.com\
|
| 211 |
+
/identity/protocols/application-default-credentials
|
| 212 |
+
.. _Google Cloud SDK: https://cloud.google.com/sdk
|
| 213 |
+
.. _App Engine standard environment: https://cloud.google.com/appengine
|
| 214 |
+
.. _App Identity Service: https://cloud.google.com/appengine/docs/python\
|
| 215 |
+
/appidentity/
|
| 216 |
+
.. _Compute Engine: https://cloud.google.com/compute
|
| 217 |
+
.. _App Engine flexible environment: https://cloud.google.com\
|
| 218 |
+
/appengine/flexible
|
| 219 |
+
.. _Metadata Service: https://cloud.google.com/compute/docs\
|
| 220 |
+
/storing-retrieving-metadata
|
| 221 |
+
.. _Cloud Run: https://cloud.google.com/run
|
| 222 |
+
|
| 223 |
+
Example::
|
| 224 |
+
|
| 225 |
+
import google.auth
|
| 226 |
+
|
| 227 |
+
credentials, project_id = google.auth.default()
|
| 228 |
+
|
| 229 |
+
Args:
|
| 230 |
+
scopes (Sequence[str]): The list of scopes for the credentials. If
|
| 231 |
+
specified, the credentials will automatically be scoped if
|
| 232 |
+
necessary.
|
| 233 |
+
request (google.auth.transport.Request): An object used to make
|
| 234 |
+
HTTP requests. This is used to detect whether the application
|
| 235 |
+
is running on Compute Engine. If not specified, then it will
|
| 236 |
+
use the standard library http client to make requests.
|
| 237 |
+
quota_project_id (Optional[str]): The project ID used for
|
| 238 |
+
quota and billing.
|
| 239 |
+
Returns:
|
| 240 |
+
Tuple[~google.auth.credentials.Credentials, Optional[str]]:
|
| 241 |
+
the current environment's credentials and project ID. Project ID
|
| 242 |
+
may be None, which indicates that the Project ID could not be
|
| 243 |
+
ascertained from the environment.
|
| 244 |
+
|
| 245 |
+
Raises:
|
| 246 |
+
~google.auth.exceptions.DefaultCredentialsError:
|
| 247 |
+
If no credentials were found, or if the credentials found were
|
| 248 |
+
invalid.
|
| 249 |
+
"""
|
| 250 |
+
from google.auth._credentials_async import with_scopes_if_required
|
| 251 |
+
from google.auth.credentials import CredentialsWithQuotaProject
|
| 252 |
+
|
| 253 |
+
explicit_project_id = os.environ.get(
|
| 254 |
+
environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
|
| 255 |
+
)
|
| 256 |
+
|
| 257 |
+
checkers = (
|
| 258 |
+
lambda: _get_explicit_environ_credentials(quota_project_id=quota_project_id),
|
| 259 |
+
lambda: _get_gcloud_sdk_credentials(quota_project_id=quota_project_id),
|
| 260 |
+
_get_gae_credentials,
|
| 261 |
+
lambda: _get_gce_credentials(request),
|
| 262 |
+
)
|
| 263 |
+
|
| 264 |
+
for checker in checkers:
|
| 265 |
+
credentials, project_id = checker()
|
| 266 |
+
if credentials is not None:
|
| 267 |
+
credentials = with_scopes_if_required(credentials, scopes)
|
| 268 |
+
if quota_project_id and isinstance(
|
| 269 |
+
credentials, CredentialsWithQuotaProject
|
| 270 |
+
):
|
| 271 |
+
credentials = credentials.with_quota_project(quota_project_id)
|
| 272 |
+
effective_project_id = explicit_project_id or project_id
|
| 273 |
+
if not effective_project_id:
|
| 274 |
+
_default._LOGGER.warning(
|
| 275 |
+
"No project ID could be determined. Consider running "
|
| 276 |
+
"`gcloud config set project` or setting the %s "
|
| 277 |
+
"environment variable",
|
| 278 |
+
environment_vars.PROJECT,
|
| 279 |
+
)
|
| 280 |
+
return credentials, effective_project_id
|
| 281 |
+
|
| 282 |
+
raise exceptions.DefaultCredentialsError(_default._CLOUD_SDK_MISSING_CREDENTIALS)
|
lib/python3.10/site-packages/google/auth/_helpers.py
ADDED
|
@@ -0,0 +1,273 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2015 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""Helper functions for commonly used utilities."""
|
| 16 |
+
|
| 17 |
+
import base64
|
| 18 |
+
import calendar
|
| 19 |
+
import datetime
|
| 20 |
+
from email.message import Message
|
| 21 |
+
import sys
|
| 22 |
+
import urllib
|
| 23 |
+
|
| 24 |
+
from google.auth import exceptions
|
| 25 |
+
|
| 26 |
+
# The smallest MDS cache used by this library stores tokens until 4 minutes from
|
| 27 |
+
# expiry.
|
| 28 |
+
REFRESH_THRESHOLD = datetime.timedelta(minutes=3, seconds=45)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def copy_docstring(source_class):
|
| 32 |
+
"""Decorator that copies a method's docstring from another class.
|
| 33 |
+
|
| 34 |
+
Args:
|
| 35 |
+
source_class (type): The class that has the documented method.
|
| 36 |
+
|
| 37 |
+
Returns:
|
| 38 |
+
Callable: A decorator that will copy the docstring of the same
|
| 39 |
+
named method in the source class to the decorated method.
|
| 40 |
+
"""
|
| 41 |
+
|
| 42 |
+
def decorator(method):
|
| 43 |
+
"""Decorator implementation.
|
| 44 |
+
|
| 45 |
+
Args:
|
| 46 |
+
method (Callable): The method to copy the docstring to.
|
| 47 |
+
|
| 48 |
+
Returns:
|
| 49 |
+
Callable: the same method passed in with an updated docstring.
|
| 50 |
+
|
| 51 |
+
Raises:
|
| 52 |
+
google.auth.exceptions.InvalidOperation: if the method already has a docstring.
|
| 53 |
+
"""
|
| 54 |
+
if method.__doc__:
|
| 55 |
+
raise exceptions.InvalidOperation("Method already has a docstring.")
|
| 56 |
+
|
| 57 |
+
source_method = getattr(source_class, method.__name__)
|
| 58 |
+
method.__doc__ = source_method.__doc__
|
| 59 |
+
|
| 60 |
+
return method
|
| 61 |
+
|
| 62 |
+
return decorator
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def parse_content_type(header_value):
|
| 66 |
+
"""Parse a 'content-type' header value to get just the plain media-type (without parameters).
|
| 67 |
+
|
| 68 |
+
This is done using the class Message from email.message as suggested in PEP 594
|
| 69 |
+
(because the cgi is now deprecated and will be removed in python 3.13,
|
| 70 |
+
see https://peps.python.org/pep-0594/#cgi).
|
| 71 |
+
|
| 72 |
+
Args:
|
| 73 |
+
header_value (str): The value of a 'content-type' header as a string.
|
| 74 |
+
|
| 75 |
+
Returns:
|
| 76 |
+
str: A string with just the lowercase media-type from the parsed 'content-type' header.
|
| 77 |
+
If the provided content-type is not parsable, returns 'text/plain',
|
| 78 |
+
the default value for textual files.
|
| 79 |
+
"""
|
| 80 |
+
m = Message()
|
| 81 |
+
m["content-type"] = header_value
|
| 82 |
+
return (
|
| 83 |
+
m.get_content_type()
|
| 84 |
+
) # Despite the name, actually returns just the media-type
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def utcnow():
|
| 88 |
+
"""Returns the current UTC datetime.
|
| 89 |
+
|
| 90 |
+
Returns:
|
| 91 |
+
datetime: The current time in UTC.
|
| 92 |
+
"""
|
| 93 |
+
# We used datetime.utcnow() before, since it's deprecated from python 3.12,
|
| 94 |
+
# we are using datetime.now(timezone.utc) now. "utcnow()" is offset-native
|
| 95 |
+
# (no timezone info), but "now()" is offset-aware (with timezone info).
|
| 96 |
+
# This will cause datetime comparison problem. For backward compatibility,
|
| 97 |
+
# we need to remove the timezone info.
|
| 98 |
+
now = datetime.datetime.now(datetime.timezone.utc)
|
| 99 |
+
now = now.replace(tzinfo=None)
|
| 100 |
+
return now
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def datetime_to_secs(value):
|
| 104 |
+
"""Convert a datetime object to the number of seconds since the UNIX epoch.
|
| 105 |
+
|
| 106 |
+
Args:
|
| 107 |
+
value (datetime): The datetime to convert.
|
| 108 |
+
|
| 109 |
+
Returns:
|
| 110 |
+
int: The number of seconds since the UNIX epoch.
|
| 111 |
+
"""
|
| 112 |
+
return calendar.timegm(value.utctimetuple())
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def to_bytes(value, encoding="utf-8"):
|
| 116 |
+
"""Converts a string value to bytes, if necessary.
|
| 117 |
+
|
| 118 |
+
Args:
|
| 119 |
+
value (Union[str, bytes]): The value to be converted.
|
| 120 |
+
encoding (str): The encoding to use to convert unicode to bytes.
|
| 121 |
+
Defaults to "utf-8".
|
| 122 |
+
|
| 123 |
+
Returns:
|
| 124 |
+
bytes: The original value converted to bytes (if unicode) or as
|
| 125 |
+
passed in if it started out as bytes.
|
| 126 |
+
|
| 127 |
+
Raises:
|
| 128 |
+
google.auth.exceptions.InvalidValue: If the value could not be converted to bytes.
|
| 129 |
+
"""
|
| 130 |
+
result = value.encode(encoding) if isinstance(value, str) else value
|
| 131 |
+
if isinstance(result, bytes):
|
| 132 |
+
return result
|
| 133 |
+
else:
|
| 134 |
+
raise exceptions.InvalidValue(
|
| 135 |
+
"{0!r} could not be converted to bytes".format(value)
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def from_bytes(value):
|
| 140 |
+
"""Converts bytes to a string value, if necessary.
|
| 141 |
+
|
| 142 |
+
Args:
|
| 143 |
+
value (Union[str, bytes]): The value to be converted.
|
| 144 |
+
|
| 145 |
+
Returns:
|
| 146 |
+
str: The original value converted to unicode (if bytes) or as passed in
|
| 147 |
+
if it started out as unicode.
|
| 148 |
+
|
| 149 |
+
Raises:
|
| 150 |
+
google.auth.exceptions.InvalidValue: If the value could not be converted to unicode.
|
| 151 |
+
"""
|
| 152 |
+
result = value.decode("utf-8") if isinstance(value, bytes) else value
|
| 153 |
+
if isinstance(result, str):
|
| 154 |
+
return result
|
| 155 |
+
else:
|
| 156 |
+
raise exceptions.InvalidValue(
|
| 157 |
+
"{0!r} could not be converted to unicode".format(value)
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
def update_query(url, params, remove=None):
|
| 162 |
+
"""Updates a URL's query parameters.
|
| 163 |
+
|
| 164 |
+
Replaces any current values if they are already present in the URL.
|
| 165 |
+
|
| 166 |
+
Args:
|
| 167 |
+
url (str): The URL to update.
|
| 168 |
+
params (Mapping[str, str]): A mapping of query parameter
|
| 169 |
+
keys to values.
|
| 170 |
+
remove (Sequence[str]): Parameters to remove from the query string.
|
| 171 |
+
|
| 172 |
+
Returns:
|
| 173 |
+
str: The URL with updated query parameters.
|
| 174 |
+
|
| 175 |
+
Examples:
|
| 176 |
+
|
| 177 |
+
>>> url = 'http://example.com?a=1'
|
| 178 |
+
>>> update_query(url, {'a': '2'})
|
| 179 |
+
http://example.com?a=2
|
| 180 |
+
>>> update_query(url, {'b': '3'})
|
| 181 |
+
http://example.com?a=1&b=3
|
| 182 |
+
>> update_query(url, {'b': '3'}, remove=['a'])
|
| 183 |
+
http://example.com?b=3
|
| 184 |
+
|
| 185 |
+
"""
|
| 186 |
+
if remove is None:
|
| 187 |
+
remove = []
|
| 188 |
+
|
| 189 |
+
# Split the URL into parts.
|
| 190 |
+
parts = urllib.parse.urlparse(url)
|
| 191 |
+
# Parse the query string.
|
| 192 |
+
query_params = urllib.parse.parse_qs(parts.query)
|
| 193 |
+
# Update the query parameters with the new parameters.
|
| 194 |
+
query_params.update(params)
|
| 195 |
+
# Remove any values specified in remove.
|
| 196 |
+
query_params = {
|
| 197 |
+
key: value for key, value in query_params.items() if key not in remove
|
| 198 |
+
}
|
| 199 |
+
# Re-encoded the query string.
|
| 200 |
+
new_query = urllib.parse.urlencode(query_params, doseq=True)
|
| 201 |
+
# Unsplit the url.
|
| 202 |
+
new_parts = parts._replace(query=new_query)
|
| 203 |
+
return urllib.parse.urlunparse(new_parts)
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def scopes_to_string(scopes):
|
| 207 |
+
"""Converts scope value to a string suitable for sending to OAuth 2.0
|
| 208 |
+
authorization servers.
|
| 209 |
+
|
| 210 |
+
Args:
|
| 211 |
+
scopes (Sequence[str]): The sequence of scopes to convert.
|
| 212 |
+
|
| 213 |
+
Returns:
|
| 214 |
+
str: The scopes formatted as a single string.
|
| 215 |
+
"""
|
| 216 |
+
return " ".join(scopes)
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def string_to_scopes(scopes):
|
| 220 |
+
"""Converts stringifed scopes value to a list.
|
| 221 |
+
|
| 222 |
+
Args:
|
| 223 |
+
scopes (Union[Sequence, str]): The string of space-separated scopes
|
| 224 |
+
to convert.
|
| 225 |
+
Returns:
|
| 226 |
+
Sequence(str): The separated scopes.
|
| 227 |
+
"""
|
| 228 |
+
if not scopes:
|
| 229 |
+
return []
|
| 230 |
+
|
| 231 |
+
return scopes.split(" ")
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
def padded_urlsafe_b64decode(value):
|
| 235 |
+
"""Decodes base64 strings lacking padding characters.
|
| 236 |
+
|
| 237 |
+
Google infrastructure tends to omit the base64 padding characters.
|
| 238 |
+
|
| 239 |
+
Args:
|
| 240 |
+
value (Union[str, bytes]): The encoded value.
|
| 241 |
+
|
| 242 |
+
Returns:
|
| 243 |
+
bytes: The decoded value
|
| 244 |
+
"""
|
| 245 |
+
b64string = to_bytes(value)
|
| 246 |
+
padded = b64string + b"=" * (-len(b64string) % 4)
|
| 247 |
+
return base64.urlsafe_b64decode(padded)
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
def unpadded_urlsafe_b64encode(value):
|
| 251 |
+
"""Encodes base64 strings removing any padding characters.
|
| 252 |
+
|
| 253 |
+
`rfc 7515`_ defines Base64url to NOT include any padding
|
| 254 |
+
characters, but the stdlib doesn't do that by default.
|
| 255 |
+
|
| 256 |
+
_rfc7515: https://tools.ietf.org/html/rfc7515#page-6
|
| 257 |
+
|
| 258 |
+
Args:
|
| 259 |
+
value (Union[str|bytes]): The bytes-like value to encode
|
| 260 |
+
|
| 261 |
+
Returns:
|
| 262 |
+
Union[str|bytes]: The encoded value
|
| 263 |
+
"""
|
| 264 |
+
return base64.urlsafe_b64encode(value).rstrip(b"=")
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def is_python_3():
|
| 268 |
+
"""Check if the Python interpreter is Python 2 or 3.
|
| 269 |
+
|
| 270 |
+
Returns:
|
| 271 |
+
bool: True if the Python interpreter is Python 3 and False otherwise.
|
| 272 |
+
"""
|
| 273 |
+
return sys.version_info > (3, 0)
|
lib/python3.10/site-packages/google/auth/_refresh_worker.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2023 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
import copy
|
| 16 |
+
import logging
|
| 17 |
+
import threading
|
| 18 |
+
|
| 19 |
+
import google.auth.exceptions as e
|
| 20 |
+
|
| 21 |
+
_LOGGER = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class RefreshThreadManager:
|
| 25 |
+
"""
|
| 26 |
+
Organizes exactly one background job that refresh a token.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
def __init__(self):
|
| 30 |
+
"""Initializes the manager."""
|
| 31 |
+
|
| 32 |
+
self._worker = None
|
| 33 |
+
self._lock = threading.Lock() # protects access to worker threads.
|
| 34 |
+
|
| 35 |
+
def start_refresh(self, cred, request):
|
| 36 |
+
"""Starts a refresh thread for the given credentials.
|
| 37 |
+
The credentials are refreshed using the request parameter.
|
| 38 |
+
request and cred MUST not be None
|
| 39 |
+
|
| 40 |
+
Returns True if a background refresh was kicked off. False otherwise.
|
| 41 |
+
|
| 42 |
+
Args:
|
| 43 |
+
cred: A credentials object.
|
| 44 |
+
request: A request object.
|
| 45 |
+
Returns:
|
| 46 |
+
bool
|
| 47 |
+
"""
|
| 48 |
+
if cred is None or request is None:
|
| 49 |
+
raise e.InvalidValue(
|
| 50 |
+
"Unable to start refresh. cred and request must be valid and instantiated objects."
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
with self._lock:
|
| 54 |
+
if self._worker is not None and self._worker._error_info is not None:
|
| 55 |
+
return False
|
| 56 |
+
|
| 57 |
+
if self._worker is None or not self._worker.is_alive(): # pragma: NO COVER
|
| 58 |
+
self._worker = RefreshThread(cred=cred, request=copy.deepcopy(request))
|
| 59 |
+
self._worker.start()
|
| 60 |
+
return True
|
| 61 |
+
|
| 62 |
+
def clear_error(self):
|
| 63 |
+
"""
|
| 64 |
+
Removes any errors that were stored from previous background refreshes.
|
| 65 |
+
"""
|
| 66 |
+
with self._lock:
|
| 67 |
+
if self._worker:
|
| 68 |
+
self._worker._error_info = None
|
| 69 |
+
|
| 70 |
+
def __getstate__(self):
|
| 71 |
+
"""Pickle helper that serializes the _lock attribute."""
|
| 72 |
+
state = self.__dict__.copy()
|
| 73 |
+
state["_lock"] = None
|
| 74 |
+
return state
|
| 75 |
+
|
| 76 |
+
def __setstate__(self, state):
|
| 77 |
+
"""Pickle helper that deserializes the _lock attribute."""
|
| 78 |
+
state["_lock"] = threading.Lock()
|
| 79 |
+
self.__dict__.update(state)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class RefreshThread(threading.Thread):
|
| 83 |
+
"""
|
| 84 |
+
Thread that refreshes credentials.
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
def __init__(self, cred, request, **kwargs):
|
| 88 |
+
"""Initializes the thread.
|
| 89 |
+
|
| 90 |
+
Args:
|
| 91 |
+
cred: A Credential object to refresh.
|
| 92 |
+
request: A Request object used to perform a credential refresh.
|
| 93 |
+
**kwargs: Additional keyword arguments.
|
| 94 |
+
"""
|
| 95 |
+
|
| 96 |
+
super().__init__(**kwargs)
|
| 97 |
+
self._cred = cred
|
| 98 |
+
self._request = request
|
| 99 |
+
self._error_info = None
|
| 100 |
+
|
| 101 |
+
def run(self):
|
| 102 |
+
"""
|
| 103 |
+
Perform the credential refresh.
|
| 104 |
+
"""
|
| 105 |
+
try:
|
| 106 |
+
self._cred.refresh(self._request)
|
| 107 |
+
except Exception as err: # pragma: NO COVER
|
| 108 |
+
_LOGGER.error(f"Background refresh failed due to: {err}")
|
| 109 |
+
self._error_info = err
|
lib/python3.10/site-packages/google/auth/api_key.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""Google API key support.
|
| 16 |
+
This module provides authentication using the `API key`_.
|
| 17 |
+
.. _API key:
|
| 18 |
+
https://cloud.google.com/docs/authentication/api-keys/
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
from google.auth import _helpers
|
| 22 |
+
from google.auth import credentials
|
| 23 |
+
from google.auth import exceptions
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class Credentials(credentials.Credentials):
|
| 27 |
+
"""API key credentials.
|
| 28 |
+
These credentials use API key to provide authorization to applications.
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
def __init__(self, token):
|
| 32 |
+
"""
|
| 33 |
+
Args:
|
| 34 |
+
token (str): API key string
|
| 35 |
+
Raises:
|
| 36 |
+
ValueError: If the provided API key is not a non-empty string.
|
| 37 |
+
"""
|
| 38 |
+
super(Credentials, self).__init__()
|
| 39 |
+
if not token:
|
| 40 |
+
raise exceptions.InvalidValue("Token must be a non-empty API key string")
|
| 41 |
+
self.token = token
|
| 42 |
+
|
| 43 |
+
@property
|
| 44 |
+
def expired(self):
|
| 45 |
+
return False
|
| 46 |
+
|
| 47 |
+
@property
|
| 48 |
+
def valid(self):
|
| 49 |
+
return True
|
| 50 |
+
|
| 51 |
+
@_helpers.copy_docstring(credentials.Credentials)
|
| 52 |
+
def refresh(self, request):
|
| 53 |
+
return
|
| 54 |
+
|
| 55 |
+
def apply(self, headers, token=None):
|
| 56 |
+
"""Apply the API key token to the x-goog-api-key header.
|
| 57 |
+
Args:
|
| 58 |
+
headers (Mapping): The HTTP request headers.
|
| 59 |
+
token (Optional[str]): If specified, overrides the current access
|
| 60 |
+
token.
|
| 61 |
+
"""
|
| 62 |
+
headers["x-goog-api-key"] = token or self.token
|
| 63 |
+
|
| 64 |
+
def before_request(self, request, method, url, headers):
|
| 65 |
+
"""Performs credential-specific before request logic.
|
| 66 |
+
Refreshes the credentials if necessary, then calls :meth:`apply` to
|
| 67 |
+
apply the token to the x-goog-api-key header.
|
| 68 |
+
Args:
|
| 69 |
+
request (google.auth.transport.Request): The object used to make
|
| 70 |
+
HTTP requests.
|
| 71 |
+
method (str): The request's HTTP method or the RPC method being
|
| 72 |
+
invoked.
|
| 73 |
+
url (str): The request's URI or the RPC service's URI.
|
| 74 |
+
headers (Mapping): The request's headers.
|
| 75 |
+
"""
|
| 76 |
+
self.apply(headers)
|
lib/python3.10/site-packages/google/auth/aws.py
ADDED
|
@@ -0,0 +1,861 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""AWS Credentials and AWS Signature V4 Request Signer.
|
| 16 |
+
|
| 17 |
+
This module provides credentials to access Google Cloud resources from Amazon
|
| 18 |
+
Web Services (AWS) workloads. These credentials are recommended over the
|
| 19 |
+
use of service account credentials in AWS as they do not involve the management
|
| 20 |
+
of long-live service account private keys.
|
| 21 |
+
|
| 22 |
+
AWS Credentials are initialized using external_account arguments which are
|
| 23 |
+
typically loaded from the external credentials JSON file.
|
| 24 |
+
|
| 25 |
+
This module also provides a definition for an abstract AWS security credentials supplier.
|
| 26 |
+
This supplier can be implemented to return valid AWS security credentials and an AWS region
|
| 27 |
+
and used to create AWS credentials. The credentials will then call the
|
| 28 |
+
supplier instead of using pre-defined methods such as calling the EC2 metadata endpoints.
|
| 29 |
+
|
| 30 |
+
This module also provides a basic implementation of the
|
| 31 |
+
`AWS Signature Version 4`_ request signing algorithm.
|
| 32 |
+
|
| 33 |
+
AWS Credentials use serialized signed requests to the
|
| 34 |
+
`AWS STS GetCallerIdentity`_ API that can be exchanged for Google access tokens
|
| 35 |
+
via the GCP STS endpoint.
|
| 36 |
+
|
| 37 |
+
.. _AWS Signature Version 4: https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
|
| 38 |
+
.. _AWS STS GetCallerIdentity: https://docs.aws.amazon.com/STS/latest/APIReference/API_GetCallerIdentity.html
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
import abc
|
| 42 |
+
from dataclasses import dataclass
|
| 43 |
+
import hashlib
|
| 44 |
+
import hmac
|
| 45 |
+
import http.client as http_client
|
| 46 |
+
import json
|
| 47 |
+
import os
|
| 48 |
+
import posixpath
|
| 49 |
+
import re
|
| 50 |
+
from typing import Optional
|
| 51 |
+
import urllib
|
| 52 |
+
from urllib.parse import urljoin
|
| 53 |
+
|
| 54 |
+
from google.auth import _helpers
|
| 55 |
+
from google.auth import environment_vars
|
| 56 |
+
from google.auth import exceptions
|
| 57 |
+
from google.auth import external_account
|
| 58 |
+
|
| 59 |
+
# AWS Signature Version 4 signing algorithm identifier.
|
| 60 |
+
_AWS_ALGORITHM = "AWS4-HMAC-SHA256"
|
| 61 |
+
# The termination string for the AWS credential scope value as defined in
|
| 62 |
+
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
|
| 63 |
+
_AWS_REQUEST_TYPE = "aws4_request"
|
| 64 |
+
# The AWS authorization header name for the security session token if available.
|
| 65 |
+
_AWS_SECURITY_TOKEN_HEADER = "x-amz-security-token"
|
| 66 |
+
# The AWS authorization header name for the auto-generated date.
|
| 67 |
+
_AWS_DATE_HEADER = "x-amz-date"
|
| 68 |
+
# The default AWS regional credential verification URL.
|
| 69 |
+
_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = (
|
| 70 |
+
"https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15"
|
| 71 |
+
)
|
| 72 |
+
# IMDSV2 session token lifetime. This is set to a low value because the session token is used immediately.
|
| 73 |
+
_IMDSV2_SESSION_TOKEN_TTL_SECONDS = "300"
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class RequestSigner(object):
|
| 77 |
+
"""Implements an AWS request signer based on the AWS Signature Version 4 signing
|
| 78 |
+
process.
|
| 79 |
+
https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
|
| 80 |
+
"""
|
| 81 |
+
|
| 82 |
+
def __init__(self, region_name):
|
| 83 |
+
"""Instantiates an AWS request signer used to compute authenticated signed
|
| 84 |
+
requests to AWS APIs based on the AWS Signature Version 4 signing process.
|
| 85 |
+
|
| 86 |
+
Args:
|
| 87 |
+
region_name (str): The AWS region to use.
|
| 88 |
+
"""
|
| 89 |
+
|
| 90 |
+
self._region_name = region_name
|
| 91 |
+
|
| 92 |
+
def get_request_options(
|
| 93 |
+
self,
|
| 94 |
+
aws_security_credentials,
|
| 95 |
+
url,
|
| 96 |
+
method,
|
| 97 |
+
request_payload="",
|
| 98 |
+
additional_headers={},
|
| 99 |
+
):
|
| 100 |
+
"""Generates the signed request for the provided HTTP request for calling
|
| 101 |
+
an AWS API. This follows the steps described at:
|
| 102 |
+
https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html
|
| 103 |
+
|
| 104 |
+
Args:
|
| 105 |
+
aws_security_credentials (AWSSecurityCredentials): The AWS security credentials.
|
| 106 |
+
url (str): The AWS service URL containing the canonical URI and
|
| 107 |
+
query string.
|
| 108 |
+
method (str): The HTTP method used to call this API.
|
| 109 |
+
request_payload (Optional[str]): The optional request payload if
|
| 110 |
+
available.
|
| 111 |
+
additional_headers (Optional[Mapping[str, str]]): The optional
|
| 112 |
+
additional headers needed for the requested AWS API.
|
| 113 |
+
|
| 114 |
+
Returns:
|
| 115 |
+
Mapping[str, str]: The AWS signed request dictionary object.
|
| 116 |
+
"""
|
| 117 |
+
|
| 118 |
+
additional_headers = additional_headers or {}
|
| 119 |
+
|
| 120 |
+
uri = urllib.parse.urlparse(url)
|
| 121 |
+
# Normalize the URL path. This is needed for the canonical_uri.
|
| 122 |
+
# os.path.normpath can't be used since it normalizes "/" paths
|
| 123 |
+
# to "\\" in Windows OS.
|
| 124 |
+
normalized_uri = urllib.parse.urlparse(
|
| 125 |
+
urljoin(url, posixpath.normpath(uri.path))
|
| 126 |
+
)
|
| 127 |
+
# Validate provided URL.
|
| 128 |
+
if not uri.hostname or uri.scheme != "https":
|
| 129 |
+
raise exceptions.InvalidResource("Invalid AWS service URL")
|
| 130 |
+
|
| 131 |
+
header_map = _generate_authentication_header_map(
|
| 132 |
+
host=uri.hostname,
|
| 133 |
+
canonical_uri=normalized_uri.path or "/",
|
| 134 |
+
canonical_querystring=_get_canonical_querystring(uri.query),
|
| 135 |
+
method=method,
|
| 136 |
+
region=self._region_name,
|
| 137 |
+
aws_security_credentials=aws_security_credentials,
|
| 138 |
+
request_payload=request_payload,
|
| 139 |
+
additional_headers=additional_headers,
|
| 140 |
+
)
|
| 141 |
+
headers = {
|
| 142 |
+
"Authorization": header_map.get("authorization_header"),
|
| 143 |
+
"host": uri.hostname,
|
| 144 |
+
}
|
| 145 |
+
# Add x-amz-date if available.
|
| 146 |
+
if "amz_date" in header_map:
|
| 147 |
+
headers[_AWS_DATE_HEADER] = header_map.get("amz_date")
|
| 148 |
+
# Append additional optional headers, eg. X-Amz-Target, Content-Type, etc.
|
| 149 |
+
for key in additional_headers:
|
| 150 |
+
headers[key] = additional_headers[key]
|
| 151 |
+
|
| 152 |
+
# Add session token if available.
|
| 153 |
+
if aws_security_credentials.session_token is not None:
|
| 154 |
+
headers[_AWS_SECURITY_TOKEN_HEADER] = aws_security_credentials.session_token
|
| 155 |
+
|
| 156 |
+
signed_request = {"url": url, "method": method, "headers": headers}
|
| 157 |
+
if request_payload:
|
| 158 |
+
signed_request["data"] = request_payload
|
| 159 |
+
return signed_request
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def _get_canonical_querystring(query):
|
| 163 |
+
"""Generates the canonical query string given a raw query string.
|
| 164 |
+
Logic is based on
|
| 165 |
+
https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
|
| 166 |
+
|
| 167 |
+
Args:
|
| 168 |
+
query (str): The raw query string.
|
| 169 |
+
|
| 170 |
+
Returns:
|
| 171 |
+
str: The canonical query string.
|
| 172 |
+
"""
|
| 173 |
+
# Parse raw query string.
|
| 174 |
+
querystring = urllib.parse.parse_qs(query)
|
| 175 |
+
querystring_encoded_map = {}
|
| 176 |
+
for key in querystring:
|
| 177 |
+
quote_key = urllib.parse.quote(key, safe="-_.~")
|
| 178 |
+
# URI encode key.
|
| 179 |
+
querystring_encoded_map[quote_key] = []
|
| 180 |
+
for item in querystring[key]:
|
| 181 |
+
# For each key, URI encode all values for that key.
|
| 182 |
+
querystring_encoded_map[quote_key].append(
|
| 183 |
+
urllib.parse.quote(item, safe="-_.~")
|
| 184 |
+
)
|
| 185 |
+
# Sort values for each key.
|
| 186 |
+
querystring_encoded_map[quote_key].sort()
|
| 187 |
+
# Sort keys.
|
| 188 |
+
sorted_keys = list(querystring_encoded_map.keys())
|
| 189 |
+
sorted_keys.sort()
|
| 190 |
+
# Reconstruct the query string. Preserve keys with multiple values.
|
| 191 |
+
querystring_encoded_pairs = []
|
| 192 |
+
for key in sorted_keys:
|
| 193 |
+
for item in querystring_encoded_map[key]:
|
| 194 |
+
querystring_encoded_pairs.append("{}={}".format(key, item))
|
| 195 |
+
return "&".join(querystring_encoded_pairs)
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def _sign(key, msg):
|
| 199 |
+
"""Creates the HMAC-SHA256 hash of the provided message using the provided
|
| 200 |
+
key.
|
| 201 |
+
|
| 202 |
+
Args:
|
| 203 |
+
key (str): The HMAC-SHA256 key to use.
|
| 204 |
+
msg (str): The message to hash.
|
| 205 |
+
|
| 206 |
+
Returns:
|
| 207 |
+
str: The computed hash bytes.
|
| 208 |
+
"""
|
| 209 |
+
return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def _get_signing_key(key, date_stamp, region_name, service_name):
|
| 213 |
+
"""Calculates the signing key used to calculate the signature for
|
| 214 |
+
AWS Signature Version 4 based on:
|
| 215 |
+
https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
|
| 216 |
+
|
| 217 |
+
Args:
|
| 218 |
+
key (str): The AWS secret access key.
|
| 219 |
+
date_stamp (str): The '%Y%m%d' date format.
|
| 220 |
+
region_name (str): The AWS region.
|
| 221 |
+
service_name (str): The AWS service name, eg. sts.
|
| 222 |
+
|
| 223 |
+
Returns:
|
| 224 |
+
str: The signing key bytes.
|
| 225 |
+
"""
|
| 226 |
+
k_date = _sign(("AWS4" + key).encode("utf-8"), date_stamp)
|
| 227 |
+
k_region = _sign(k_date, region_name)
|
| 228 |
+
k_service = _sign(k_region, service_name)
|
| 229 |
+
k_signing = _sign(k_service, "aws4_request")
|
| 230 |
+
return k_signing
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _generate_authentication_header_map(
|
| 234 |
+
host,
|
| 235 |
+
canonical_uri,
|
| 236 |
+
canonical_querystring,
|
| 237 |
+
method,
|
| 238 |
+
region,
|
| 239 |
+
aws_security_credentials,
|
| 240 |
+
request_payload="",
|
| 241 |
+
additional_headers={},
|
| 242 |
+
):
|
| 243 |
+
"""Generates the authentication header map needed for generating the AWS
|
| 244 |
+
Signature Version 4 signed request.
|
| 245 |
+
|
| 246 |
+
Args:
|
| 247 |
+
host (str): The AWS service URL hostname.
|
| 248 |
+
canonical_uri (str): The AWS service URL path name.
|
| 249 |
+
canonical_querystring (str): The AWS service URL query string.
|
| 250 |
+
method (str): The HTTP method used to call this API.
|
| 251 |
+
region (str): The AWS region.
|
| 252 |
+
aws_security_credentials (AWSSecurityCredentials): The AWS security credentials.
|
| 253 |
+
request_payload (Optional[str]): The optional request payload if
|
| 254 |
+
available.
|
| 255 |
+
additional_headers (Optional[Mapping[str, str]]): The optional
|
| 256 |
+
additional headers needed for the requested AWS API.
|
| 257 |
+
|
| 258 |
+
Returns:
|
| 259 |
+
Mapping[str, str]: The AWS authentication header dictionary object.
|
| 260 |
+
This contains the x-amz-date and authorization header information.
|
| 261 |
+
"""
|
| 262 |
+
# iam.amazonaws.com host => iam service.
|
| 263 |
+
# sts.us-east-2.amazonaws.com host => sts service.
|
| 264 |
+
service_name = host.split(".")[0]
|
| 265 |
+
|
| 266 |
+
current_time = _helpers.utcnow()
|
| 267 |
+
amz_date = current_time.strftime("%Y%m%dT%H%M%SZ")
|
| 268 |
+
date_stamp = current_time.strftime("%Y%m%d")
|
| 269 |
+
|
| 270 |
+
# Change all additional headers to be lower case.
|
| 271 |
+
full_headers = {}
|
| 272 |
+
for key in additional_headers:
|
| 273 |
+
full_headers[key.lower()] = additional_headers[key]
|
| 274 |
+
# Add AWS session token if available.
|
| 275 |
+
if aws_security_credentials.session_token is not None:
|
| 276 |
+
full_headers[
|
| 277 |
+
_AWS_SECURITY_TOKEN_HEADER
|
| 278 |
+
] = aws_security_credentials.session_token
|
| 279 |
+
|
| 280 |
+
# Required headers
|
| 281 |
+
full_headers["host"] = host
|
| 282 |
+
# Do not use generated x-amz-date if the date header is provided.
|
| 283 |
+
# Previously the date was not fixed with x-amz- and could be provided
|
| 284 |
+
# manually.
|
| 285 |
+
# https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req
|
| 286 |
+
if "date" not in full_headers:
|
| 287 |
+
full_headers[_AWS_DATE_HEADER] = amz_date
|
| 288 |
+
|
| 289 |
+
# Header keys need to be sorted alphabetically.
|
| 290 |
+
canonical_headers = ""
|
| 291 |
+
header_keys = list(full_headers.keys())
|
| 292 |
+
header_keys.sort()
|
| 293 |
+
for key in header_keys:
|
| 294 |
+
canonical_headers = "{}{}:{}\n".format(
|
| 295 |
+
canonical_headers, key, full_headers[key]
|
| 296 |
+
)
|
| 297 |
+
signed_headers = ";".join(header_keys)
|
| 298 |
+
|
| 299 |
+
payload_hash = hashlib.sha256((request_payload or "").encode("utf-8")).hexdigest()
|
| 300 |
+
|
| 301 |
+
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
|
| 302 |
+
canonical_request = "{}\n{}\n{}\n{}\n{}\n{}".format(
|
| 303 |
+
method,
|
| 304 |
+
canonical_uri,
|
| 305 |
+
canonical_querystring,
|
| 306 |
+
canonical_headers,
|
| 307 |
+
signed_headers,
|
| 308 |
+
payload_hash,
|
| 309 |
+
)
|
| 310 |
+
|
| 311 |
+
credential_scope = "{}/{}/{}/{}".format(
|
| 312 |
+
date_stamp, region, service_name, _AWS_REQUEST_TYPE
|
| 313 |
+
)
|
| 314 |
+
|
| 315 |
+
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
|
| 316 |
+
string_to_sign = "{}\n{}\n{}\n{}".format(
|
| 317 |
+
_AWS_ALGORITHM,
|
| 318 |
+
amz_date,
|
| 319 |
+
credential_scope,
|
| 320 |
+
hashlib.sha256(canonical_request.encode("utf-8")).hexdigest(),
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
# https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
|
| 324 |
+
signing_key = _get_signing_key(
|
| 325 |
+
aws_security_credentials.secret_access_key, date_stamp, region, service_name
|
| 326 |
+
)
|
| 327 |
+
signature = hmac.new(
|
| 328 |
+
signing_key, string_to_sign.encode("utf-8"), hashlib.sha256
|
| 329 |
+
).hexdigest()
|
| 330 |
+
|
| 331 |
+
# https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html
|
| 332 |
+
authorization_header = "{} Credential={}/{}, SignedHeaders={}, Signature={}".format(
|
| 333 |
+
_AWS_ALGORITHM,
|
| 334 |
+
aws_security_credentials.access_key_id,
|
| 335 |
+
credential_scope,
|
| 336 |
+
signed_headers,
|
| 337 |
+
signature,
|
| 338 |
+
)
|
| 339 |
+
|
| 340 |
+
authentication_header = {"authorization_header": authorization_header}
|
| 341 |
+
# Do not use generated x-amz-date if the date header is provided.
|
| 342 |
+
if "date" not in full_headers:
|
| 343 |
+
authentication_header["amz_date"] = amz_date
|
| 344 |
+
return authentication_header
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
@dataclass
|
| 348 |
+
class AwsSecurityCredentials:
|
| 349 |
+
"""A class that models AWS security credentials with an optional session token.
|
| 350 |
+
|
| 351 |
+
Attributes:
|
| 352 |
+
access_key_id (str): The AWS security credentials access key id.
|
| 353 |
+
secret_access_key (str): The AWS security credentials secret access key.
|
| 354 |
+
session_token (Optional[str]): The optional AWS security credentials session token. This should be set when using temporary credentials.
|
| 355 |
+
"""
|
| 356 |
+
|
| 357 |
+
access_key_id: str
|
| 358 |
+
secret_access_key: str
|
| 359 |
+
session_token: Optional[str] = None
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
class AwsSecurityCredentialsSupplier(metaclass=abc.ABCMeta):
|
| 363 |
+
"""Base class for AWS security credential suppliers. This can be implemented with custom logic to retrieve
|
| 364 |
+
AWS security credentials to exchange for a Google Cloud access token. The AWS external account credential does
|
| 365 |
+
not cache the AWS security credentials, so caching logic should be added in the implementation.
|
| 366 |
+
"""
|
| 367 |
+
|
| 368 |
+
@abc.abstractmethod
|
| 369 |
+
def get_aws_security_credentials(self, context, request):
|
| 370 |
+
"""Returns the AWS security credentials for the requested context.
|
| 371 |
+
|
| 372 |
+
.. warning: This is not cached by the calling Google credential, so caching logic should be implemented in the supplier.
|
| 373 |
+
|
| 374 |
+
Args:
|
| 375 |
+
context (google.auth.externalaccount.SupplierContext): The context object
|
| 376 |
+
containing information about the requested audience and subject token type.
|
| 377 |
+
request (google.auth.transport.Request): The object used to make
|
| 378 |
+
HTTP requests.
|
| 379 |
+
|
| 380 |
+
Raises:
|
| 381 |
+
google.auth.exceptions.RefreshError: If an error is encountered during
|
| 382 |
+
security credential retrieval logic.
|
| 383 |
+
|
| 384 |
+
Returns:
|
| 385 |
+
AwsSecurityCredentials: The requested AWS security credentials.
|
| 386 |
+
"""
|
| 387 |
+
raise NotImplementedError("")
|
| 388 |
+
|
| 389 |
+
@abc.abstractmethod
|
| 390 |
+
def get_aws_region(self, context, request):
|
| 391 |
+
"""Returns the AWS region for the requested context.
|
| 392 |
+
|
| 393 |
+
Args:
|
| 394 |
+
context (google.auth.externalaccount.SupplierContext): The context object
|
| 395 |
+
containing information about the requested audience and subject token type.
|
| 396 |
+
request (google.auth.transport.Request): The object used to make
|
| 397 |
+
HTTP requests.
|
| 398 |
+
|
| 399 |
+
Raises:
|
| 400 |
+
google.auth.exceptions.RefreshError: If an error is encountered during
|
| 401 |
+
region retrieval logic.
|
| 402 |
+
|
| 403 |
+
Returns:
|
| 404 |
+
str: The AWS region.
|
| 405 |
+
"""
|
| 406 |
+
raise NotImplementedError("")
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
class _DefaultAwsSecurityCredentialsSupplier(AwsSecurityCredentialsSupplier):
|
| 410 |
+
"""Default implementation of AWS security credentials supplier. Supports retrieving
|
| 411 |
+
credentials and region via EC2 metadata endpoints and environment variables.
|
| 412 |
+
"""
|
| 413 |
+
|
| 414 |
+
def __init__(self, credential_source):
|
| 415 |
+
self._region_url = credential_source.get("region_url")
|
| 416 |
+
self._security_credentials_url = credential_source.get("url")
|
| 417 |
+
self._imdsv2_session_token_url = credential_source.get(
|
| 418 |
+
"imdsv2_session_token_url"
|
| 419 |
+
)
|
| 420 |
+
|
| 421 |
+
@_helpers.copy_docstring(AwsSecurityCredentialsSupplier)
|
| 422 |
+
def get_aws_security_credentials(self, context, request):
|
| 423 |
+
|
| 424 |
+
# Check environment variables for permanent credentials first.
|
| 425 |
+
# https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html
|
| 426 |
+
env_aws_access_key_id = os.environ.get(environment_vars.AWS_ACCESS_KEY_ID)
|
| 427 |
+
env_aws_secret_access_key = os.environ.get(
|
| 428 |
+
environment_vars.AWS_SECRET_ACCESS_KEY
|
| 429 |
+
)
|
| 430 |
+
# This is normally not available for permanent credentials.
|
| 431 |
+
env_aws_session_token = os.environ.get(environment_vars.AWS_SESSION_TOKEN)
|
| 432 |
+
if env_aws_access_key_id and env_aws_secret_access_key:
|
| 433 |
+
return AwsSecurityCredentials(
|
| 434 |
+
env_aws_access_key_id, env_aws_secret_access_key, env_aws_session_token
|
| 435 |
+
)
|
| 436 |
+
|
| 437 |
+
imdsv2_session_token = self._get_imdsv2_session_token(request)
|
| 438 |
+
role_name = self._get_metadata_role_name(request, imdsv2_session_token)
|
| 439 |
+
|
| 440 |
+
# Get security credentials.
|
| 441 |
+
credentials = self._get_metadata_security_credentials(
|
| 442 |
+
request, role_name, imdsv2_session_token
|
| 443 |
+
)
|
| 444 |
+
|
| 445 |
+
return AwsSecurityCredentials(
|
| 446 |
+
credentials.get("AccessKeyId"),
|
| 447 |
+
credentials.get("SecretAccessKey"),
|
| 448 |
+
credentials.get("Token"),
|
| 449 |
+
)
|
| 450 |
+
|
| 451 |
+
@_helpers.copy_docstring(AwsSecurityCredentialsSupplier)
|
| 452 |
+
def get_aws_region(self, context, request):
|
| 453 |
+
# The AWS metadata server is not available in some AWS environments
|
| 454 |
+
# such as AWS lambda. Instead, it is available via environment
|
| 455 |
+
# variable.
|
| 456 |
+
env_aws_region = os.environ.get(environment_vars.AWS_REGION)
|
| 457 |
+
if env_aws_region is not None:
|
| 458 |
+
return env_aws_region
|
| 459 |
+
|
| 460 |
+
env_aws_region = os.environ.get(environment_vars.AWS_DEFAULT_REGION)
|
| 461 |
+
if env_aws_region is not None:
|
| 462 |
+
return env_aws_region
|
| 463 |
+
|
| 464 |
+
if not self._region_url:
|
| 465 |
+
raise exceptions.RefreshError("Unable to determine AWS region")
|
| 466 |
+
|
| 467 |
+
headers = None
|
| 468 |
+
imdsv2_session_token = self._get_imdsv2_session_token(request)
|
| 469 |
+
if imdsv2_session_token is not None:
|
| 470 |
+
headers = {"X-aws-ec2-metadata-token": imdsv2_session_token}
|
| 471 |
+
|
| 472 |
+
response = request(url=self._region_url, method="GET", headers=headers)
|
| 473 |
+
|
| 474 |
+
# Support both string and bytes type response.data.
|
| 475 |
+
response_body = (
|
| 476 |
+
response.data.decode("utf-8")
|
| 477 |
+
if hasattr(response.data, "decode")
|
| 478 |
+
else response.data
|
| 479 |
+
)
|
| 480 |
+
|
| 481 |
+
if response.status != http_client.OK:
|
| 482 |
+
raise exceptions.RefreshError(
|
| 483 |
+
"Unable to retrieve AWS region: {}".format(response_body)
|
| 484 |
+
)
|
| 485 |
+
|
| 486 |
+
# This endpoint will return the region in format: us-east-2b.
|
| 487 |
+
# Only the us-east-2 part should be used.
|
| 488 |
+
return response_body[:-1]
|
| 489 |
+
|
| 490 |
+
def _get_imdsv2_session_token(self, request):
|
| 491 |
+
if request is not None and self._imdsv2_session_token_url is not None:
|
| 492 |
+
headers = {
|
| 493 |
+
"X-aws-ec2-metadata-token-ttl-seconds": _IMDSV2_SESSION_TOKEN_TTL_SECONDS
|
| 494 |
+
}
|
| 495 |
+
|
| 496 |
+
imdsv2_session_token_response = request(
|
| 497 |
+
url=self._imdsv2_session_token_url, method="PUT", headers=headers
|
| 498 |
+
)
|
| 499 |
+
|
| 500 |
+
if imdsv2_session_token_response.status != http_client.OK:
|
| 501 |
+
raise exceptions.RefreshError(
|
| 502 |
+
"Unable to retrieve AWS Session Token: {}".format(
|
| 503 |
+
imdsv2_session_token_response.data
|
| 504 |
+
)
|
| 505 |
+
)
|
| 506 |
+
|
| 507 |
+
return imdsv2_session_token_response.data
|
| 508 |
+
else:
|
| 509 |
+
return None
|
| 510 |
+
|
| 511 |
+
def _get_metadata_security_credentials(
|
| 512 |
+
self, request, role_name, imdsv2_session_token
|
| 513 |
+
):
|
| 514 |
+
"""Retrieves the AWS security credentials required for signing AWS
|
| 515 |
+
requests from the AWS metadata server.
|
| 516 |
+
|
| 517 |
+
Args:
|
| 518 |
+
request (google.auth.transport.Request): A callable used to make
|
| 519 |
+
HTTP requests.
|
| 520 |
+
role_name (str): The AWS role name required by the AWS metadata
|
| 521 |
+
server security_credentials endpoint in order to return the
|
| 522 |
+
credentials.
|
| 523 |
+
imdsv2_session_token (str): The AWS IMDSv2 session token to be added as a
|
| 524 |
+
header in the requests to AWS metadata endpoint.
|
| 525 |
+
|
| 526 |
+
Returns:
|
| 527 |
+
Mapping[str, str]: The AWS metadata server security credentials
|
| 528 |
+
response.
|
| 529 |
+
|
| 530 |
+
Raises:
|
| 531 |
+
google.auth.exceptions.RefreshError: If an error occurs while
|
| 532 |
+
retrieving the AWS security credentials.
|
| 533 |
+
"""
|
| 534 |
+
headers = {"Content-Type": "application/json"}
|
| 535 |
+
if imdsv2_session_token is not None:
|
| 536 |
+
headers["X-aws-ec2-metadata-token"] = imdsv2_session_token
|
| 537 |
+
|
| 538 |
+
response = request(
|
| 539 |
+
url="{}/{}".format(self._security_credentials_url, role_name),
|
| 540 |
+
method="GET",
|
| 541 |
+
headers=headers,
|
| 542 |
+
)
|
| 543 |
+
|
| 544 |
+
# support both string and bytes type response.data
|
| 545 |
+
response_body = (
|
| 546 |
+
response.data.decode("utf-8")
|
| 547 |
+
if hasattr(response.data, "decode")
|
| 548 |
+
else response.data
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
if response.status != http_client.OK:
|
| 552 |
+
raise exceptions.RefreshError(
|
| 553 |
+
"Unable to retrieve AWS security credentials: {}".format(response_body)
|
| 554 |
+
)
|
| 555 |
+
|
| 556 |
+
credentials_response = json.loads(response_body)
|
| 557 |
+
|
| 558 |
+
return credentials_response
|
| 559 |
+
|
| 560 |
+
def _get_metadata_role_name(self, request, imdsv2_session_token):
|
| 561 |
+
"""Retrieves the AWS role currently attached to the current AWS
|
| 562 |
+
workload by querying the AWS metadata server. This is needed for the
|
| 563 |
+
AWS metadata server security credentials endpoint in order to retrieve
|
| 564 |
+
the AWS security credentials needed to sign requests to AWS APIs.
|
| 565 |
+
|
| 566 |
+
Args:
|
| 567 |
+
request (google.auth.transport.Request): A callable used to make
|
| 568 |
+
HTTP requests.
|
| 569 |
+
imdsv2_session_token (str): The AWS IMDSv2 session token to be added as a
|
| 570 |
+
header in the requests to AWS metadata endpoint.
|
| 571 |
+
|
| 572 |
+
Returns:
|
| 573 |
+
str: The AWS role name.
|
| 574 |
+
|
| 575 |
+
Raises:
|
| 576 |
+
google.auth.exceptions.RefreshError: If an error occurs while
|
| 577 |
+
retrieving the AWS role name.
|
| 578 |
+
"""
|
| 579 |
+
if self._security_credentials_url is None:
|
| 580 |
+
raise exceptions.RefreshError(
|
| 581 |
+
"Unable to determine the AWS metadata server security credentials endpoint"
|
| 582 |
+
)
|
| 583 |
+
|
| 584 |
+
headers = None
|
| 585 |
+
if imdsv2_session_token is not None:
|
| 586 |
+
headers = {"X-aws-ec2-metadata-token": imdsv2_session_token}
|
| 587 |
+
|
| 588 |
+
response = request(
|
| 589 |
+
url=self._security_credentials_url, method="GET", headers=headers
|
| 590 |
+
)
|
| 591 |
+
|
| 592 |
+
# support both string and bytes type response.data
|
| 593 |
+
response_body = (
|
| 594 |
+
response.data.decode("utf-8")
|
| 595 |
+
if hasattr(response.data, "decode")
|
| 596 |
+
else response.data
|
| 597 |
+
)
|
| 598 |
+
|
| 599 |
+
if response.status != http_client.OK:
|
| 600 |
+
raise exceptions.RefreshError(
|
| 601 |
+
"Unable to retrieve AWS role name {}".format(response_body)
|
| 602 |
+
)
|
| 603 |
+
|
| 604 |
+
return response_body
|
| 605 |
+
|
| 606 |
+
|
| 607 |
+
class Credentials(external_account.Credentials):
|
| 608 |
+
"""AWS external account credentials.
|
| 609 |
+
This is used to exchange serialized AWS signature v4 signed requests to
|
| 610 |
+
AWS STS GetCallerIdentity service for Google access tokens.
|
| 611 |
+
"""
|
| 612 |
+
|
| 613 |
+
def __init__(
|
| 614 |
+
self,
|
| 615 |
+
audience,
|
| 616 |
+
subject_token_type,
|
| 617 |
+
token_url=external_account._DEFAULT_TOKEN_URL,
|
| 618 |
+
credential_source=None,
|
| 619 |
+
aws_security_credentials_supplier=None,
|
| 620 |
+
*args,
|
| 621 |
+
**kwargs
|
| 622 |
+
):
|
| 623 |
+
"""Instantiates an AWS workload external account credentials object.
|
| 624 |
+
|
| 625 |
+
Args:
|
| 626 |
+
audience (str): The STS audience field.
|
| 627 |
+
subject_token_type (str): The subject token type based on the Oauth2.0 token exchange spec.
|
| 628 |
+
Expected values include::
|
| 629 |
+
|
| 630 |
+
“urn:ietf:params:aws:token-type:aws4_request”
|
| 631 |
+
|
| 632 |
+
token_url (Optional [str]): The STS endpoint URL. If not provided, will default to "https://sts.googleapis.com/v1/token".
|
| 633 |
+
credential_source (Optional [Mapping]): The credential source dictionary used
|
| 634 |
+
to provide instructions on how to retrieve external credential to be exchanged for Google access tokens.
|
| 635 |
+
Either a credential source or an AWS security credentials supplier must be provided.
|
| 636 |
+
|
| 637 |
+
Example credential_source for AWS credential::
|
| 638 |
+
|
| 639 |
+
{
|
| 640 |
+
"environment_id": "aws1",
|
| 641 |
+
"regional_cred_verification_url": "https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15",
|
| 642 |
+
"region_url": "http://169.254.169.254/latest/meta-data/placement/availability-zone",
|
| 643 |
+
"url": "http://169.254.169.254/latest/meta-data/iam/security-credentials",
|
| 644 |
+
imdsv2_session_token_url": "http://169.254.169.254/latest/api/token"
|
| 645 |
+
}
|
| 646 |
+
|
| 647 |
+
aws_security_credentials_supplier (Optional [AwsSecurityCredentialsSupplier]): Optional AWS security credentials supplier.
|
| 648 |
+
This will be called to supply valid AWS security credentails which will then
|
| 649 |
+
be exchanged for Google access tokens. Either an AWS security credentials supplier
|
| 650 |
+
or a credential source must be provided.
|
| 651 |
+
args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
|
| 652 |
+
kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
|
| 653 |
+
|
| 654 |
+
Raises:
|
| 655 |
+
google.auth.exceptions.RefreshError: If an error is encountered during
|
| 656 |
+
access token retrieval logic.
|
| 657 |
+
ValueError: For invalid parameters.
|
| 658 |
+
|
| 659 |
+
.. note:: Typically one of the helper constructors
|
| 660 |
+
:meth:`from_file` or
|
| 661 |
+
:meth:`from_info` are used instead of calling the constructor directly.
|
| 662 |
+
"""
|
| 663 |
+
super(Credentials, self).__init__(
|
| 664 |
+
audience=audience,
|
| 665 |
+
subject_token_type=subject_token_type,
|
| 666 |
+
token_url=token_url,
|
| 667 |
+
credential_source=credential_source,
|
| 668 |
+
*args,
|
| 669 |
+
**kwargs
|
| 670 |
+
)
|
| 671 |
+
if credential_source is None and aws_security_credentials_supplier is None:
|
| 672 |
+
raise exceptions.InvalidValue(
|
| 673 |
+
"A valid credential source or AWS security credentials supplier must be provided."
|
| 674 |
+
)
|
| 675 |
+
if (
|
| 676 |
+
credential_source is not None
|
| 677 |
+
and aws_security_credentials_supplier is not None
|
| 678 |
+
):
|
| 679 |
+
raise exceptions.InvalidValue(
|
| 680 |
+
"AWS credential cannot have both a credential source and an AWS security credentials supplier."
|
| 681 |
+
)
|
| 682 |
+
|
| 683 |
+
if aws_security_credentials_supplier:
|
| 684 |
+
self._aws_security_credentials_supplier = aws_security_credentials_supplier
|
| 685 |
+
# The regional cred verification URL would normally be provided through the credential source. So set it to the default one here.
|
| 686 |
+
self._cred_verification_url = (
|
| 687 |
+
_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL
|
| 688 |
+
)
|
| 689 |
+
else:
|
| 690 |
+
environment_id = credential_source.get("environment_id") or ""
|
| 691 |
+
self._aws_security_credentials_supplier = _DefaultAwsSecurityCredentialsSupplier(
|
| 692 |
+
credential_source
|
| 693 |
+
)
|
| 694 |
+
self._cred_verification_url = credential_source.get(
|
| 695 |
+
"regional_cred_verification_url"
|
| 696 |
+
)
|
| 697 |
+
|
| 698 |
+
# Get the environment ID, i.e. "aws1". Currently, only one version supported (1).
|
| 699 |
+
matches = re.match(r"^(aws)([\d]+)$", environment_id)
|
| 700 |
+
if matches:
|
| 701 |
+
env_id, env_version = matches.groups()
|
| 702 |
+
else:
|
| 703 |
+
env_id, env_version = (None, None)
|
| 704 |
+
|
| 705 |
+
if env_id != "aws" or self._cred_verification_url is None:
|
| 706 |
+
raise exceptions.InvalidResource(
|
| 707 |
+
"No valid AWS 'credential_source' provided"
|
| 708 |
+
)
|
| 709 |
+
elif env_version is None or int(env_version) != 1:
|
| 710 |
+
raise exceptions.InvalidValue(
|
| 711 |
+
"aws version '{}' is not supported in the current build.".format(
|
| 712 |
+
env_version
|
| 713 |
+
)
|
| 714 |
+
)
|
| 715 |
+
|
| 716 |
+
self._target_resource = audience
|
| 717 |
+
self._request_signer = None
|
| 718 |
+
|
| 719 |
+
def retrieve_subject_token(self, request):
|
| 720 |
+
"""Retrieves the subject token using the credential_source object.
|
| 721 |
+
The subject token is a serialized `AWS GetCallerIdentity signed request`_.
|
| 722 |
+
|
| 723 |
+
The logic is summarized as:
|
| 724 |
+
|
| 725 |
+
Retrieve the AWS region from the AWS_REGION or AWS_DEFAULT_REGION
|
| 726 |
+
environment variable or from the AWS metadata server availability-zone
|
| 727 |
+
if not found in the environment variable.
|
| 728 |
+
|
| 729 |
+
Check AWS credentials in environment variables. If not found, retrieve
|
| 730 |
+
from the AWS metadata server security-credentials endpoint.
|
| 731 |
+
|
| 732 |
+
When retrieving AWS credentials from the metadata server
|
| 733 |
+
security-credentials endpoint, the AWS role needs to be determined by
|
| 734 |
+
calling the security-credentials endpoint without any argument. Then the
|
| 735 |
+
credentials can be retrieved via: security-credentials/role_name
|
| 736 |
+
|
| 737 |
+
Generate the signed request to AWS STS GetCallerIdentity action.
|
| 738 |
+
|
| 739 |
+
Inject x-goog-cloud-target-resource into header and serialize the
|
| 740 |
+
signed request. This will be the subject-token to pass to GCP STS.
|
| 741 |
+
|
| 742 |
+
.. _AWS GetCallerIdentity signed request:
|
| 743 |
+
https://cloud.google.com/iam/docs/access-resources-aws#exchange-token
|
| 744 |
+
|
| 745 |
+
Args:
|
| 746 |
+
request (google.auth.transport.Request): A callable used to make
|
| 747 |
+
HTTP requests.
|
| 748 |
+
Returns:
|
| 749 |
+
str: The retrieved subject token.
|
| 750 |
+
"""
|
| 751 |
+
|
| 752 |
+
# Initialize the request signer if not yet initialized after determining
|
| 753 |
+
# the current AWS region.
|
| 754 |
+
if self._request_signer is None:
|
| 755 |
+
self._region = self._aws_security_credentials_supplier.get_aws_region(
|
| 756 |
+
self._supplier_context, request
|
| 757 |
+
)
|
| 758 |
+
self._request_signer = RequestSigner(self._region)
|
| 759 |
+
|
| 760 |
+
# Retrieve the AWS security credentials needed to generate the signed
|
| 761 |
+
# request.
|
| 762 |
+
aws_security_credentials = self._aws_security_credentials_supplier.get_aws_security_credentials(
|
| 763 |
+
self._supplier_context, request
|
| 764 |
+
)
|
| 765 |
+
# Generate the signed request to AWS STS GetCallerIdentity API.
|
| 766 |
+
# Use the required regional endpoint. Otherwise, the request will fail.
|
| 767 |
+
request_options = self._request_signer.get_request_options(
|
| 768 |
+
aws_security_credentials,
|
| 769 |
+
self._cred_verification_url.replace("{region}", self._region),
|
| 770 |
+
"POST",
|
| 771 |
+
)
|
| 772 |
+
# The GCP STS endpoint expects the headers to be formatted as:
|
| 773 |
+
# [
|
| 774 |
+
# {key: 'x-amz-date', value: '...'},
|
| 775 |
+
# {key: 'Authorization', value: '...'},
|
| 776 |
+
# ...
|
| 777 |
+
# ]
|
| 778 |
+
# And then serialized as:
|
| 779 |
+
# quote(json.dumps({
|
| 780 |
+
# url: '...',
|
| 781 |
+
# method: 'POST',
|
| 782 |
+
# headers: [{key: 'x-amz-date', value: '...'}, ...]
|
| 783 |
+
# }))
|
| 784 |
+
request_headers = request_options.get("headers")
|
| 785 |
+
# The full, canonical resource name of the workload identity pool
|
| 786 |
+
# provider, with or without the HTTPS prefix.
|
| 787 |
+
# Including this header as part of the signature is recommended to
|
| 788 |
+
# ensure data integrity.
|
| 789 |
+
request_headers["x-goog-cloud-target-resource"] = self._target_resource
|
| 790 |
+
|
| 791 |
+
# Serialize AWS signed request.
|
| 792 |
+
aws_signed_req = {}
|
| 793 |
+
aws_signed_req["url"] = request_options.get("url")
|
| 794 |
+
aws_signed_req["method"] = request_options.get("method")
|
| 795 |
+
aws_signed_req["headers"] = []
|
| 796 |
+
# Reformat header to GCP STS expected format.
|
| 797 |
+
for key in request_headers.keys():
|
| 798 |
+
aws_signed_req["headers"].append(
|
| 799 |
+
{"key": key, "value": request_headers[key]}
|
| 800 |
+
)
|
| 801 |
+
|
| 802 |
+
return urllib.parse.quote(
|
| 803 |
+
json.dumps(aws_signed_req, separators=(",", ":"), sort_keys=True)
|
| 804 |
+
)
|
| 805 |
+
|
| 806 |
+
def _create_default_metrics_options(self):
|
| 807 |
+
metrics_options = super(Credentials, self)._create_default_metrics_options()
|
| 808 |
+
metrics_options["source"] = "aws"
|
| 809 |
+
if self._has_custom_supplier():
|
| 810 |
+
metrics_options["source"] = "programmatic"
|
| 811 |
+
return metrics_options
|
| 812 |
+
|
| 813 |
+
def _has_custom_supplier(self):
|
| 814 |
+
return self._credential_source is None
|
| 815 |
+
|
| 816 |
+
def _constructor_args(self):
|
| 817 |
+
args = super(Credentials, self)._constructor_args()
|
| 818 |
+
# If a custom supplier was used, append it to the args dict.
|
| 819 |
+
if self._has_custom_supplier():
|
| 820 |
+
args.update(
|
| 821 |
+
{
|
| 822 |
+
"aws_security_credentials_supplier": self._aws_security_credentials_supplier
|
| 823 |
+
}
|
| 824 |
+
)
|
| 825 |
+
return args
|
| 826 |
+
|
| 827 |
+
@classmethod
|
| 828 |
+
def from_info(cls, info, **kwargs):
|
| 829 |
+
"""Creates an AWS Credentials instance from parsed external account info.
|
| 830 |
+
|
| 831 |
+
Args:
|
| 832 |
+
info (Mapping[str, str]): The AWS external account info in Google
|
| 833 |
+
format.
|
| 834 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 835 |
+
|
| 836 |
+
Returns:
|
| 837 |
+
google.auth.aws.Credentials: The constructed credentials.
|
| 838 |
+
|
| 839 |
+
Raises:
|
| 840 |
+
ValueError: For invalid parameters.
|
| 841 |
+
"""
|
| 842 |
+
aws_security_credentials_supplier = info.get(
|
| 843 |
+
"aws_security_credentials_supplier"
|
| 844 |
+
)
|
| 845 |
+
kwargs.update(
|
| 846 |
+
{"aws_security_credentials_supplier": aws_security_credentials_supplier}
|
| 847 |
+
)
|
| 848 |
+
return super(Credentials, cls).from_info(info, **kwargs)
|
| 849 |
+
|
| 850 |
+
@classmethod
|
| 851 |
+
def from_file(cls, filename, **kwargs):
|
| 852 |
+
"""Creates an AWS Credentials instance from an external account json file.
|
| 853 |
+
|
| 854 |
+
Args:
|
| 855 |
+
filename (str): The path to the AWS external account json file.
|
| 856 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 857 |
+
|
| 858 |
+
Returns:
|
| 859 |
+
google.auth.aws.Credentials: The constructed credentials.
|
| 860 |
+
"""
|
| 861 |
+
return super(Credentials, cls).from_file(filename, **kwargs)
|
lib/python3.10/site-packages/google/auth/credentials.py
ADDED
|
@@ -0,0 +1,522 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2016 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
"""Interfaces for credentials."""
|
| 17 |
+
|
| 18 |
+
import abc
|
| 19 |
+
from enum import Enum
|
| 20 |
+
import os
|
| 21 |
+
|
| 22 |
+
from google.auth import _helpers, environment_vars
|
| 23 |
+
from google.auth import exceptions
|
| 24 |
+
from google.auth import metrics
|
| 25 |
+
from google.auth._credentials_base import _BaseCredentials
|
| 26 |
+
from google.auth._refresh_worker import RefreshThreadManager
|
| 27 |
+
|
| 28 |
+
DEFAULT_UNIVERSE_DOMAIN = "googleapis.com"
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class Credentials(_BaseCredentials):
|
| 32 |
+
"""Base class for all credentials.
|
| 33 |
+
|
| 34 |
+
All credentials have a :attr:`token` that is used for authentication and
|
| 35 |
+
may also optionally set an :attr:`expiry` to indicate when the token will
|
| 36 |
+
no longer be valid.
|
| 37 |
+
|
| 38 |
+
Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
|
| 39 |
+
Credentials can do this automatically before the first HTTP request in
|
| 40 |
+
:meth:`before_request`.
|
| 41 |
+
|
| 42 |
+
Although the token and expiration will change as the credentials are
|
| 43 |
+
:meth:`refreshed <refresh>` and used, credentials should be considered
|
| 44 |
+
immutable. Various credentials will accept configuration such as private
|
| 45 |
+
keys, scopes, and other options. These options are not changeable after
|
| 46 |
+
construction. Some classes will provide mechanisms to copy the credentials
|
| 47 |
+
with modifications such as :meth:`ScopedCredentials.with_scopes`.
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
def __init__(self):
|
| 51 |
+
super(Credentials, self).__init__()
|
| 52 |
+
|
| 53 |
+
self.expiry = None
|
| 54 |
+
"""Optional[datetime]: When the token expires and is no longer valid.
|
| 55 |
+
If this is None, the token is assumed to never expire."""
|
| 56 |
+
self._quota_project_id = None
|
| 57 |
+
"""Optional[str]: Project to use for quota and billing purposes."""
|
| 58 |
+
self._trust_boundary = None
|
| 59 |
+
"""Optional[dict]: Cache of a trust boundary response which has a list
|
| 60 |
+
of allowed regions and an encoded string representation of credentials
|
| 61 |
+
trust boundary."""
|
| 62 |
+
self._universe_domain = DEFAULT_UNIVERSE_DOMAIN
|
| 63 |
+
"""Optional[str]: The universe domain value, default is googleapis.com
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
self._use_non_blocking_refresh = False
|
| 67 |
+
self._refresh_worker = RefreshThreadManager()
|
| 68 |
+
|
| 69 |
+
@property
|
| 70 |
+
def expired(self):
|
| 71 |
+
"""Checks if the credentials are expired.
|
| 72 |
+
|
| 73 |
+
Note that credentials can be invalid but not expired because
|
| 74 |
+
Credentials with :attr:`expiry` set to None is considered to never
|
| 75 |
+
expire.
|
| 76 |
+
|
| 77 |
+
.. deprecated:: v2.24.0
|
| 78 |
+
Prefer checking :attr:`token_state` instead.
|
| 79 |
+
"""
|
| 80 |
+
if not self.expiry:
|
| 81 |
+
return False
|
| 82 |
+
# Remove some threshold from expiry to err on the side of reporting
|
| 83 |
+
# expiration early so that we avoid the 401-refresh-retry loop.
|
| 84 |
+
skewed_expiry = self.expiry - _helpers.REFRESH_THRESHOLD
|
| 85 |
+
return _helpers.utcnow() >= skewed_expiry
|
| 86 |
+
|
| 87 |
+
@property
|
| 88 |
+
def valid(self):
|
| 89 |
+
"""Checks the validity of the credentials.
|
| 90 |
+
|
| 91 |
+
This is True if the credentials have a :attr:`token` and the token
|
| 92 |
+
is not :attr:`expired`.
|
| 93 |
+
|
| 94 |
+
.. deprecated:: v2.24.0
|
| 95 |
+
Prefer checking :attr:`token_state` instead.
|
| 96 |
+
"""
|
| 97 |
+
return self.token is not None and not self.expired
|
| 98 |
+
|
| 99 |
+
@property
|
| 100 |
+
def token_state(self):
|
| 101 |
+
"""
|
| 102 |
+
See `:obj:`TokenState`
|
| 103 |
+
"""
|
| 104 |
+
if self.token is None:
|
| 105 |
+
return TokenState.INVALID
|
| 106 |
+
|
| 107 |
+
# Credentials that can't expire are always treated as fresh.
|
| 108 |
+
if self.expiry is None:
|
| 109 |
+
return TokenState.FRESH
|
| 110 |
+
|
| 111 |
+
expired = _helpers.utcnow() >= self.expiry
|
| 112 |
+
if expired:
|
| 113 |
+
return TokenState.INVALID
|
| 114 |
+
|
| 115 |
+
is_stale = _helpers.utcnow() >= (self.expiry - _helpers.REFRESH_THRESHOLD)
|
| 116 |
+
if is_stale:
|
| 117 |
+
return TokenState.STALE
|
| 118 |
+
|
| 119 |
+
return TokenState.FRESH
|
| 120 |
+
|
| 121 |
+
@property
|
| 122 |
+
def quota_project_id(self):
|
| 123 |
+
"""Project to use for quota and billing purposes."""
|
| 124 |
+
return self._quota_project_id
|
| 125 |
+
|
| 126 |
+
@property
|
| 127 |
+
def universe_domain(self):
|
| 128 |
+
"""The universe domain value."""
|
| 129 |
+
return self._universe_domain
|
| 130 |
+
|
| 131 |
+
def get_cred_info(self):
|
| 132 |
+
"""The credential information JSON.
|
| 133 |
+
|
| 134 |
+
The credential information will be added to auth related error messages
|
| 135 |
+
by client library.
|
| 136 |
+
|
| 137 |
+
Returns:
|
| 138 |
+
Mapping[str, str]: The credential information JSON.
|
| 139 |
+
"""
|
| 140 |
+
return None
|
| 141 |
+
|
| 142 |
+
@abc.abstractmethod
|
| 143 |
+
def refresh(self, request):
|
| 144 |
+
"""Refreshes the access token.
|
| 145 |
+
|
| 146 |
+
Args:
|
| 147 |
+
request (google.auth.transport.Request): The object used to make
|
| 148 |
+
HTTP requests.
|
| 149 |
+
|
| 150 |
+
Raises:
|
| 151 |
+
google.auth.exceptions.RefreshError: If the credentials could
|
| 152 |
+
not be refreshed.
|
| 153 |
+
"""
|
| 154 |
+
# pylint: disable=missing-raises-doc
|
| 155 |
+
# (pylint doesn't recognize that this is abstract)
|
| 156 |
+
raise NotImplementedError("Refresh must be implemented")
|
| 157 |
+
|
| 158 |
+
def _metric_header_for_usage(self):
|
| 159 |
+
"""The x-goog-api-client header for token usage metric.
|
| 160 |
+
|
| 161 |
+
This header will be added to the API service requests in before_request
|
| 162 |
+
method. For example, "cred-type/sa-jwt" means service account self
|
| 163 |
+
signed jwt access token is used in the API service request
|
| 164 |
+
authorization header. Children credentials classes need to override
|
| 165 |
+
this method to provide the header value, if the token usage metric is
|
| 166 |
+
needed.
|
| 167 |
+
|
| 168 |
+
Returns:
|
| 169 |
+
str: The x-goog-api-client header value.
|
| 170 |
+
"""
|
| 171 |
+
return None
|
| 172 |
+
|
| 173 |
+
def apply(self, headers, token=None):
|
| 174 |
+
"""Apply the token to the authentication header.
|
| 175 |
+
|
| 176 |
+
Args:
|
| 177 |
+
headers (Mapping): The HTTP request headers.
|
| 178 |
+
token (Optional[str]): If specified, overrides the current access
|
| 179 |
+
token.
|
| 180 |
+
"""
|
| 181 |
+
self._apply(headers, token=token)
|
| 182 |
+
"""Trust boundary value will be a cached value from global lookup.
|
| 183 |
+
|
| 184 |
+
The response of trust boundary will be a list of regions and a hex
|
| 185 |
+
encoded representation.
|
| 186 |
+
|
| 187 |
+
An example of global lookup response:
|
| 188 |
+
{
|
| 189 |
+
"locations": [
|
| 190 |
+
"us-central1", "us-east1", "europe-west1", "asia-east1"
|
| 191 |
+
]
|
| 192 |
+
"encoded_locations": "0xA30"
|
| 193 |
+
}
|
| 194 |
+
"""
|
| 195 |
+
if self._trust_boundary is not None:
|
| 196 |
+
headers["x-allowed-locations"] = self._trust_boundary["encoded_locations"]
|
| 197 |
+
if self.quota_project_id:
|
| 198 |
+
headers["x-goog-user-project"] = self.quota_project_id
|
| 199 |
+
|
| 200 |
+
def _blocking_refresh(self, request):
|
| 201 |
+
if not self.valid:
|
| 202 |
+
self.refresh(request)
|
| 203 |
+
|
| 204 |
+
def _non_blocking_refresh(self, request):
|
| 205 |
+
use_blocking_refresh_fallback = False
|
| 206 |
+
|
| 207 |
+
if self.token_state == TokenState.STALE:
|
| 208 |
+
use_blocking_refresh_fallback = not self._refresh_worker.start_refresh(
|
| 209 |
+
self, request
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
if self.token_state == TokenState.INVALID or use_blocking_refresh_fallback:
|
| 213 |
+
self.refresh(request)
|
| 214 |
+
# If the blocking refresh succeeds then we can clear the error info
|
| 215 |
+
# on the background refresh worker, and perform refreshes in a
|
| 216 |
+
# background thread.
|
| 217 |
+
self._refresh_worker.clear_error()
|
| 218 |
+
|
| 219 |
+
def before_request(self, request, method, url, headers):
|
| 220 |
+
"""Performs credential-specific before request logic.
|
| 221 |
+
|
| 222 |
+
Refreshes the credentials if necessary, then calls :meth:`apply` to
|
| 223 |
+
apply the token to the authentication header.
|
| 224 |
+
|
| 225 |
+
Args:
|
| 226 |
+
request (google.auth.transport.Request): The object used to make
|
| 227 |
+
HTTP requests.
|
| 228 |
+
method (str): The request's HTTP method or the RPC method being
|
| 229 |
+
invoked.
|
| 230 |
+
url (str): The request's URI or the RPC service's URI.
|
| 231 |
+
headers (Mapping): The request's headers.
|
| 232 |
+
"""
|
| 233 |
+
# pylint: disable=unused-argument
|
| 234 |
+
# (Subclasses may use these arguments to ascertain information about
|
| 235 |
+
# the http request.)
|
| 236 |
+
if self._use_non_blocking_refresh:
|
| 237 |
+
self._non_blocking_refresh(request)
|
| 238 |
+
else:
|
| 239 |
+
self._blocking_refresh(request)
|
| 240 |
+
|
| 241 |
+
metrics.add_metric_header(headers, self._metric_header_for_usage())
|
| 242 |
+
self.apply(headers)
|
| 243 |
+
|
| 244 |
+
def with_non_blocking_refresh(self):
|
| 245 |
+
self._use_non_blocking_refresh = True
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
class CredentialsWithQuotaProject(Credentials):
|
| 249 |
+
"""Abstract base for credentials supporting ``with_quota_project`` factory"""
|
| 250 |
+
|
| 251 |
+
def with_quota_project(self, quota_project_id):
|
| 252 |
+
"""Returns a copy of these credentials with a modified quota project.
|
| 253 |
+
|
| 254 |
+
Args:
|
| 255 |
+
quota_project_id (str): The project to use for quota and
|
| 256 |
+
billing purposes
|
| 257 |
+
|
| 258 |
+
Returns:
|
| 259 |
+
google.auth.credentials.Credentials: A new credentials instance.
|
| 260 |
+
"""
|
| 261 |
+
raise NotImplementedError("This credential does not support quota project.")
|
| 262 |
+
|
| 263 |
+
def with_quota_project_from_environment(self):
|
| 264 |
+
quota_from_env = os.environ.get(environment_vars.GOOGLE_CLOUD_QUOTA_PROJECT)
|
| 265 |
+
if quota_from_env:
|
| 266 |
+
return self.with_quota_project(quota_from_env)
|
| 267 |
+
return self
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
class CredentialsWithTokenUri(Credentials):
|
| 271 |
+
"""Abstract base for credentials supporting ``with_token_uri`` factory"""
|
| 272 |
+
|
| 273 |
+
def with_token_uri(self, token_uri):
|
| 274 |
+
"""Returns a copy of these credentials with a modified token uri.
|
| 275 |
+
|
| 276 |
+
Args:
|
| 277 |
+
token_uri (str): The uri to use for fetching/exchanging tokens
|
| 278 |
+
|
| 279 |
+
Returns:
|
| 280 |
+
google.auth.credentials.Credentials: A new credentials instance.
|
| 281 |
+
"""
|
| 282 |
+
raise NotImplementedError("This credential does not use token uri.")
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
class CredentialsWithUniverseDomain(Credentials):
|
| 286 |
+
"""Abstract base for credentials supporting ``with_universe_domain`` factory"""
|
| 287 |
+
|
| 288 |
+
def with_universe_domain(self, universe_domain):
|
| 289 |
+
"""Returns a copy of these credentials with a modified universe domain.
|
| 290 |
+
|
| 291 |
+
Args:
|
| 292 |
+
universe_domain (str): The universe domain to use
|
| 293 |
+
|
| 294 |
+
Returns:
|
| 295 |
+
google.auth.credentials.Credentials: A new credentials instance.
|
| 296 |
+
"""
|
| 297 |
+
raise NotImplementedError(
|
| 298 |
+
"This credential does not support with_universe_domain."
|
| 299 |
+
)
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
class AnonymousCredentials(Credentials):
|
| 303 |
+
"""Credentials that do not provide any authentication information.
|
| 304 |
+
|
| 305 |
+
These are useful in the case of services that support anonymous access or
|
| 306 |
+
local service emulators that do not use credentials.
|
| 307 |
+
"""
|
| 308 |
+
|
| 309 |
+
@property
|
| 310 |
+
def expired(self):
|
| 311 |
+
"""Returns `False`, anonymous credentials never expire."""
|
| 312 |
+
return False
|
| 313 |
+
|
| 314 |
+
@property
|
| 315 |
+
def valid(self):
|
| 316 |
+
"""Returns `True`, anonymous credentials are always valid."""
|
| 317 |
+
return True
|
| 318 |
+
|
| 319 |
+
def refresh(self, request):
|
| 320 |
+
"""Raises :class:``InvalidOperation``, anonymous credentials cannot be
|
| 321 |
+
refreshed."""
|
| 322 |
+
raise exceptions.InvalidOperation("Anonymous credentials cannot be refreshed.")
|
| 323 |
+
|
| 324 |
+
def apply(self, headers, token=None):
|
| 325 |
+
"""Anonymous credentials do nothing to the request.
|
| 326 |
+
|
| 327 |
+
The optional ``token`` argument is not supported.
|
| 328 |
+
|
| 329 |
+
Raises:
|
| 330 |
+
google.auth.exceptions.InvalidValue: If a token was specified.
|
| 331 |
+
"""
|
| 332 |
+
if token is not None:
|
| 333 |
+
raise exceptions.InvalidValue("Anonymous credentials don't support tokens.")
|
| 334 |
+
|
| 335 |
+
def before_request(self, request, method, url, headers):
|
| 336 |
+
"""Anonymous credentials do nothing to the request."""
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
class ReadOnlyScoped(metaclass=abc.ABCMeta):
|
| 340 |
+
"""Interface for credentials whose scopes can be queried.
|
| 341 |
+
|
| 342 |
+
OAuth 2.0-based credentials allow limiting access using scopes as described
|
| 343 |
+
in `RFC6749 Section 3.3`_.
|
| 344 |
+
If a credential class implements this interface then the credentials either
|
| 345 |
+
use scopes in their implementation.
|
| 346 |
+
|
| 347 |
+
Some credentials require scopes in order to obtain a token. You can check
|
| 348 |
+
if scoping is necessary with :attr:`requires_scopes`::
|
| 349 |
+
|
| 350 |
+
if credentials.requires_scopes:
|
| 351 |
+
# Scoping is required.
|
| 352 |
+
credentials = credentials.with_scopes(scopes=['one', 'two'])
|
| 353 |
+
|
| 354 |
+
Credentials that require scopes must either be constructed with scopes::
|
| 355 |
+
|
| 356 |
+
credentials = SomeScopedCredentials(scopes=['one', 'two'])
|
| 357 |
+
|
| 358 |
+
Or must copy an existing instance using :meth:`with_scopes`::
|
| 359 |
+
|
| 360 |
+
scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
|
| 361 |
+
|
| 362 |
+
Some credentials have scopes but do not allow or require scopes to be set,
|
| 363 |
+
these credentials can be used as-is.
|
| 364 |
+
|
| 365 |
+
.. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
|
| 366 |
+
"""
|
| 367 |
+
|
| 368 |
+
def __init__(self):
|
| 369 |
+
super(ReadOnlyScoped, self).__init__()
|
| 370 |
+
self._scopes = None
|
| 371 |
+
self._default_scopes = None
|
| 372 |
+
|
| 373 |
+
@property
|
| 374 |
+
def scopes(self):
|
| 375 |
+
"""Sequence[str]: the credentials' current set of scopes."""
|
| 376 |
+
return self._scopes
|
| 377 |
+
|
| 378 |
+
@property
|
| 379 |
+
def default_scopes(self):
|
| 380 |
+
"""Sequence[str]: the credentials' current set of default scopes."""
|
| 381 |
+
return self._default_scopes
|
| 382 |
+
|
| 383 |
+
@abc.abstractproperty
|
| 384 |
+
def requires_scopes(self):
|
| 385 |
+
"""True if these credentials require scopes to obtain an access token.
|
| 386 |
+
"""
|
| 387 |
+
return False
|
| 388 |
+
|
| 389 |
+
def has_scopes(self, scopes):
|
| 390 |
+
"""Checks if the credentials have the given scopes.
|
| 391 |
+
|
| 392 |
+
.. warning: This method is not guaranteed to be accurate if the
|
| 393 |
+
credentials are :attr:`~Credentials.invalid`.
|
| 394 |
+
|
| 395 |
+
Args:
|
| 396 |
+
scopes (Sequence[str]): The list of scopes to check.
|
| 397 |
+
|
| 398 |
+
Returns:
|
| 399 |
+
bool: True if the credentials have the given scopes.
|
| 400 |
+
"""
|
| 401 |
+
credential_scopes = (
|
| 402 |
+
self._scopes if self._scopes is not None else self._default_scopes
|
| 403 |
+
)
|
| 404 |
+
return set(scopes).issubset(set(credential_scopes or []))
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
class Scoped(ReadOnlyScoped):
|
| 408 |
+
"""Interface for credentials whose scopes can be replaced while copying.
|
| 409 |
+
|
| 410 |
+
OAuth 2.0-based credentials allow limiting access using scopes as described
|
| 411 |
+
in `RFC6749 Section 3.3`_.
|
| 412 |
+
If a credential class implements this interface then the credentials either
|
| 413 |
+
use scopes in their implementation.
|
| 414 |
+
|
| 415 |
+
Some credentials require scopes in order to obtain a token. You can check
|
| 416 |
+
if scoping is necessary with :attr:`requires_scopes`::
|
| 417 |
+
|
| 418 |
+
if credentials.requires_scopes:
|
| 419 |
+
# Scoping is required.
|
| 420 |
+
credentials = credentials.create_scoped(['one', 'two'])
|
| 421 |
+
|
| 422 |
+
Credentials that require scopes must either be constructed with scopes::
|
| 423 |
+
|
| 424 |
+
credentials = SomeScopedCredentials(scopes=['one', 'two'])
|
| 425 |
+
|
| 426 |
+
Or must copy an existing instance using :meth:`with_scopes`::
|
| 427 |
+
|
| 428 |
+
scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
|
| 429 |
+
|
| 430 |
+
Some credentials have scopes but do not allow or require scopes to be set,
|
| 431 |
+
these credentials can be used as-is.
|
| 432 |
+
|
| 433 |
+
.. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
|
| 434 |
+
"""
|
| 435 |
+
|
| 436 |
+
@abc.abstractmethod
|
| 437 |
+
def with_scopes(self, scopes, default_scopes=None):
|
| 438 |
+
"""Create a copy of these credentials with the specified scopes.
|
| 439 |
+
|
| 440 |
+
Args:
|
| 441 |
+
scopes (Sequence[str]): The list of scopes to attach to the
|
| 442 |
+
current credentials.
|
| 443 |
+
|
| 444 |
+
Raises:
|
| 445 |
+
NotImplementedError: If the credentials' scopes can not be changed.
|
| 446 |
+
This can be avoided by checking :attr:`requires_scopes` before
|
| 447 |
+
calling this method.
|
| 448 |
+
"""
|
| 449 |
+
raise NotImplementedError("This class does not require scoping.")
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
def with_scopes_if_required(credentials, scopes, default_scopes=None):
|
| 453 |
+
"""Creates a copy of the credentials with scopes if scoping is required.
|
| 454 |
+
|
| 455 |
+
This helper function is useful when you do not know (or care to know) the
|
| 456 |
+
specific type of credentials you are using (such as when you use
|
| 457 |
+
:func:`google.auth.default`). This function will call
|
| 458 |
+
:meth:`Scoped.with_scopes` if the credentials are scoped credentials and if
|
| 459 |
+
the credentials require scoping. Otherwise, it will return the credentials
|
| 460 |
+
as-is.
|
| 461 |
+
|
| 462 |
+
Args:
|
| 463 |
+
credentials (google.auth.credentials.Credentials): The credentials to
|
| 464 |
+
scope if necessary.
|
| 465 |
+
scopes (Sequence[str]): The list of scopes to use.
|
| 466 |
+
default_scopes (Sequence[str]): Default scopes passed by a
|
| 467 |
+
Google client library. Use 'scopes' for user-defined scopes.
|
| 468 |
+
|
| 469 |
+
Returns:
|
| 470 |
+
google.auth.credentials.Credentials: Either a new set of scoped
|
| 471 |
+
credentials, or the passed in credentials instance if no scoping
|
| 472 |
+
was required.
|
| 473 |
+
"""
|
| 474 |
+
if isinstance(credentials, Scoped) and credentials.requires_scopes:
|
| 475 |
+
return credentials.with_scopes(scopes, default_scopes=default_scopes)
|
| 476 |
+
else:
|
| 477 |
+
return credentials
|
| 478 |
+
|
| 479 |
+
|
| 480 |
+
class Signing(metaclass=abc.ABCMeta):
|
| 481 |
+
"""Interface for credentials that can cryptographically sign messages."""
|
| 482 |
+
|
| 483 |
+
@abc.abstractmethod
|
| 484 |
+
def sign_bytes(self, message):
|
| 485 |
+
"""Signs the given message.
|
| 486 |
+
|
| 487 |
+
Args:
|
| 488 |
+
message (bytes): The message to sign.
|
| 489 |
+
|
| 490 |
+
Returns:
|
| 491 |
+
bytes: The message's cryptographic signature.
|
| 492 |
+
"""
|
| 493 |
+
# pylint: disable=missing-raises-doc,redundant-returns-doc
|
| 494 |
+
# (pylint doesn't recognize that this is abstract)
|
| 495 |
+
raise NotImplementedError("Sign bytes must be implemented.")
|
| 496 |
+
|
| 497 |
+
@abc.abstractproperty
|
| 498 |
+
def signer_email(self):
|
| 499 |
+
"""Optional[str]: An email address that identifies the signer."""
|
| 500 |
+
# pylint: disable=missing-raises-doc
|
| 501 |
+
# (pylint doesn't recognize that this is abstract)
|
| 502 |
+
raise NotImplementedError("Signer email must be implemented.")
|
| 503 |
+
|
| 504 |
+
@abc.abstractproperty
|
| 505 |
+
def signer(self):
|
| 506 |
+
"""google.auth.crypt.Signer: The signer used to sign bytes."""
|
| 507 |
+
# pylint: disable=missing-raises-doc
|
| 508 |
+
# (pylint doesn't recognize that this is abstract)
|
| 509 |
+
raise NotImplementedError("Signer must be implemented.")
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
class TokenState(Enum):
|
| 513 |
+
"""
|
| 514 |
+
Tracks the state of a token.
|
| 515 |
+
FRESH: The token is valid. It is not expired or close to expired, or the token has no expiry.
|
| 516 |
+
STALE: The token is close to expired, and should be refreshed. The token can be used normally.
|
| 517 |
+
INVALID: The token is expired or invalid. The token cannot be used for a normal operation.
|
| 518 |
+
"""
|
| 519 |
+
|
| 520 |
+
FRESH = 1
|
| 521 |
+
STALE = 2
|
| 522 |
+
INVALID = 3
|
lib/python3.10/site-packages/google/auth/environment_vars.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2016 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""Environment variables used by :mod:`google.auth`."""
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
PROJECT = "GOOGLE_CLOUD_PROJECT"
|
| 19 |
+
"""Environment variable defining default project.
|
| 20 |
+
|
| 21 |
+
This used by :func:`google.auth.default` to explicitly set a project ID. This
|
| 22 |
+
environment variable is also used by the Google Cloud Python Library.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
LEGACY_PROJECT = "GCLOUD_PROJECT"
|
| 26 |
+
"""Previously used environment variable defining the default project.
|
| 27 |
+
|
| 28 |
+
This environment variable is used instead of the current one in some
|
| 29 |
+
situations (such as Google App Engine).
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
GOOGLE_CLOUD_QUOTA_PROJECT = "GOOGLE_CLOUD_QUOTA_PROJECT"
|
| 33 |
+
"""Environment variable defining the project to be used for
|
| 34 |
+
quota and billing."""
|
| 35 |
+
|
| 36 |
+
CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS"
|
| 37 |
+
"""Environment variable defining the location of Google application default
|
| 38 |
+
credentials."""
|
| 39 |
+
|
| 40 |
+
# The environment variable name which can replace ~/.config if set.
|
| 41 |
+
CLOUD_SDK_CONFIG_DIR = "CLOUDSDK_CONFIG"
|
| 42 |
+
"""Environment variable defines the location of Google Cloud SDK's config
|
| 43 |
+
files."""
|
| 44 |
+
|
| 45 |
+
# These two variables allow for customization of the addresses used when
|
| 46 |
+
# contacting the GCE metadata service.
|
| 47 |
+
GCE_METADATA_HOST = "GCE_METADATA_HOST"
|
| 48 |
+
"""Environment variable providing an alternate hostname or host:port to be
|
| 49 |
+
used for GCE metadata requests.
|
| 50 |
+
|
| 51 |
+
This environment variable was originally named GCE_METADATA_ROOT. The system will
|
| 52 |
+
check this environemnt variable first; should there be no value present,
|
| 53 |
+
the system will fall back to the old variable.
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
GCE_METADATA_ROOT = "GCE_METADATA_ROOT"
|
| 57 |
+
"""Old environment variable for GCE_METADATA_HOST."""
|
| 58 |
+
|
| 59 |
+
GCE_METADATA_IP = "GCE_METADATA_IP"
|
| 60 |
+
"""Environment variable providing an alternate ip:port to be used for ip-only
|
| 61 |
+
GCE metadata requests."""
|
| 62 |
+
|
| 63 |
+
GOOGLE_API_USE_CLIENT_CERTIFICATE = "GOOGLE_API_USE_CLIENT_CERTIFICATE"
|
| 64 |
+
"""Environment variable controlling whether to use client certificate or not.
|
| 65 |
+
|
| 66 |
+
The default value is false. Users have to explicitly set this value to true
|
| 67 |
+
in order to use client certificate to establish a mutual TLS channel."""
|
| 68 |
+
|
| 69 |
+
LEGACY_APPENGINE_RUNTIME = "APPENGINE_RUNTIME"
|
| 70 |
+
"""Gen1 environment variable defining the App Engine Runtime.
|
| 71 |
+
|
| 72 |
+
Used to distinguish between GAE gen1 and GAE gen2+.
|
| 73 |
+
"""
|
| 74 |
+
|
| 75 |
+
# AWS environment variables used with AWS workload identity pools to retrieve
|
| 76 |
+
# AWS security credentials and the AWS region needed to create a serialized
|
| 77 |
+
# signed requests to the AWS STS GetCalledIdentity API that can be exchanged
|
| 78 |
+
# for a Google access tokens via the GCP STS endpoint.
|
| 79 |
+
# When not available the AWS metadata server is used to retrieve these values.
|
| 80 |
+
AWS_ACCESS_KEY_ID = "AWS_ACCESS_KEY_ID"
|
| 81 |
+
AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY"
|
| 82 |
+
AWS_SESSION_TOKEN = "AWS_SESSION_TOKEN"
|
| 83 |
+
AWS_REGION = "AWS_REGION"
|
| 84 |
+
AWS_DEFAULT_REGION = "AWS_DEFAULT_REGION"
|
lib/python3.10/site-packages/google/auth/identity_pool.py
ADDED
|
@@ -0,0 +1,439 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""Identity Pool Credentials.
|
| 16 |
+
|
| 17 |
+
This module provides credentials to access Google Cloud resources from on-prem
|
| 18 |
+
or non-Google Cloud platforms which support external credentials (e.g. OIDC ID
|
| 19 |
+
tokens) retrieved from local file locations or local servers. This includes
|
| 20 |
+
Microsoft Azure and OIDC identity providers (e.g. K8s workloads registered with
|
| 21 |
+
Hub with Hub workload identity enabled).
|
| 22 |
+
|
| 23 |
+
These credentials are recommended over the use of service account credentials
|
| 24 |
+
in on-prem/non-Google Cloud platforms as they do not involve the management of
|
| 25 |
+
long-live service account private keys.
|
| 26 |
+
|
| 27 |
+
Identity Pool Credentials are initialized using external_account
|
| 28 |
+
arguments which are typically loaded from an external credentials file or
|
| 29 |
+
an external credentials URL.
|
| 30 |
+
|
| 31 |
+
This module also provides a definition for an abstract subject token supplier.
|
| 32 |
+
This supplier can be implemented to return a valid OIDC or SAML2.0 subject token
|
| 33 |
+
and used to create Identity Pool credentials. The credentials will then call the
|
| 34 |
+
supplier instead of using pre-defined methods such as reading a local file or
|
| 35 |
+
calling a URL.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
try:
|
| 39 |
+
from collections.abc import Mapping
|
| 40 |
+
# Python 2.7 compatibility
|
| 41 |
+
except ImportError: # pragma: NO COVER
|
| 42 |
+
from collections import Mapping # type: ignore
|
| 43 |
+
import abc
|
| 44 |
+
import json
|
| 45 |
+
import os
|
| 46 |
+
from typing import NamedTuple
|
| 47 |
+
|
| 48 |
+
from google.auth import _helpers
|
| 49 |
+
from google.auth import exceptions
|
| 50 |
+
from google.auth import external_account
|
| 51 |
+
from google.auth.transport import _mtls_helper
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class SubjectTokenSupplier(metaclass=abc.ABCMeta):
|
| 55 |
+
"""Base class for subject token suppliers. This can be implemented with custom logic to retrieve
|
| 56 |
+
a subject token to exchange for a Google Cloud access token when using Workload or
|
| 57 |
+
Workforce Identity Federation. The identity pool credential does not cache the subject token,
|
| 58 |
+
so caching logic should be added in the implementation.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
@abc.abstractmethod
|
| 62 |
+
def get_subject_token(self, context, request):
|
| 63 |
+
"""Returns the requested subject token. The subject token must be valid.
|
| 64 |
+
|
| 65 |
+
.. warning: This is not cached by the calling Google credential, so caching logic should be implemented in the supplier.
|
| 66 |
+
|
| 67 |
+
Args:
|
| 68 |
+
context (google.auth.externalaccount.SupplierContext): The context object
|
| 69 |
+
containing information about the requested audience and subject token type.
|
| 70 |
+
request (google.auth.transport.Request): The object used to make
|
| 71 |
+
HTTP requests.
|
| 72 |
+
|
| 73 |
+
Raises:
|
| 74 |
+
google.auth.exceptions.RefreshError: If an error is encountered during
|
| 75 |
+
subject token retrieval logic.
|
| 76 |
+
|
| 77 |
+
Returns:
|
| 78 |
+
str: The requested subject token string.
|
| 79 |
+
"""
|
| 80 |
+
raise NotImplementedError("")
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class _TokenContent(NamedTuple):
|
| 84 |
+
"""Models the token content response from file and url internal suppliers.
|
| 85 |
+
Attributes:
|
| 86 |
+
content (str): The string content of the file or URL response.
|
| 87 |
+
location (str): The location the content was retrieved from. This will either be a file location or a URL.
|
| 88 |
+
"""
|
| 89 |
+
|
| 90 |
+
content: str
|
| 91 |
+
location: str
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class _FileSupplier(SubjectTokenSupplier):
|
| 95 |
+
""" Internal implementation of subject token supplier which supports reading a subject token from a file."""
|
| 96 |
+
|
| 97 |
+
def __init__(self, path, format_type, subject_token_field_name):
|
| 98 |
+
self._path = path
|
| 99 |
+
self._format_type = format_type
|
| 100 |
+
self._subject_token_field_name = subject_token_field_name
|
| 101 |
+
|
| 102 |
+
@_helpers.copy_docstring(SubjectTokenSupplier)
|
| 103 |
+
def get_subject_token(self, context, request):
|
| 104 |
+
if not os.path.exists(self._path):
|
| 105 |
+
raise exceptions.RefreshError("File '{}' was not found.".format(self._path))
|
| 106 |
+
|
| 107 |
+
with open(self._path, "r", encoding="utf-8") as file_obj:
|
| 108 |
+
token_content = _TokenContent(file_obj.read(), self._path)
|
| 109 |
+
|
| 110 |
+
return _parse_token_data(
|
| 111 |
+
token_content, self._format_type, self._subject_token_field_name
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class _UrlSupplier(SubjectTokenSupplier):
|
| 116 |
+
""" Internal implementation of subject token supplier which supports retrieving a subject token by calling a URL endpoint."""
|
| 117 |
+
|
| 118 |
+
def __init__(self, url, format_type, subject_token_field_name, headers):
|
| 119 |
+
self._url = url
|
| 120 |
+
self._format_type = format_type
|
| 121 |
+
self._subject_token_field_name = subject_token_field_name
|
| 122 |
+
self._headers = headers
|
| 123 |
+
|
| 124 |
+
@_helpers.copy_docstring(SubjectTokenSupplier)
|
| 125 |
+
def get_subject_token(self, context, request):
|
| 126 |
+
response = request(url=self._url, method="GET", headers=self._headers)
|
| 127 |
+
|
| 128 |
+
# support both string and bytes type response.data
|
| 129 |
+
response_body = (
|
| 130 |
+
response.data.decode("utf-8")
|
| 131 |
+
if hasattr(response.data, "decode")
|
| 132 |
+
else response.data
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
if response.status != 200:
|
| 136 |
+
raise exceptions.RefreshError(
|
| 137 |
+
"Unable to retrieve Identity Pool subject token", response_body
|
| 138 |
+
)
|
| 139 |
+
token_content = _TokenContent(response_body, self._url)
|
| 140 |
+
return _parse_token_data(
|
| 141 |
+
token_content, self._format_type, self._subject_token_field_name
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
class _X509Supplier(SubjectTokenSupplier):
|
| 146 |
+
"""Internal supplier for X509 workload credentials. This class is used internally and always returns an empty string as the subject token."""
|
| 147 |
+
|
| 148 |
+
@_helpers.copy_docstring(SubjectTokenSupplier)
|
| 149 |
+
def get_subject_token(self, context, request):
|
| 150 |
+
return ""
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def _parse_token_data(token_content, format_type="text", subject_token_field_name=None):
|
| 154 |
+
if format_type == "text":
|
| 155 |
+
token = token_content.content
|
| 156 |
+
else:
|
| 157 |
+
try:
|
| 158 |
+
# Parse file content as JSON.
|
| 159 |
+
response_data = json.loads(token_content.content)
|
| 160 |
+
# Get the subject_token.
|
| 161 |
+
token = response_data[subject_token_field_name]
|
| 162 |
+
except (KeyError, ValueError):
|
| 163 |
+
raise exceptions.RefreshError(
|
| 164 |
+
"Unable to parse subject_token from JSON file '{}' using key '{}'".format(
|
| 165 |
+
token_content.location, subject_token_field_name
|
| 166 |
+
)
|
| 167 |
+
)
|
| 168 |
+
if not token:
|
| 169 |
+
raise exceptions.RefreshError(
|
| 170 |
+
"Missing subject_token in the credential_source file"
|
| 171 |
+
)
|
| 172 |
+
return token
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
class Credentials(external_account.Credentials):
|
| 176 |
+
"""External account credentials sourced from files and URLs."""
|
| 177 |
+
|
| 178 |
+
def __init__(
|
| 179 |
+
self,
|
| 180 |
+
audience,
|
| 181 |
+
subject_token_type,
|
| 182 |
+
token_url=external_account._DEFAULT_TOKEN_URL,
|
| 183 |
+
credential_source=None,
|
| 184 |
+
subject_token_supplier=None,
|
| 185 |
+
*args,
|
| 186 |
+
**kwargs
|
| 187 |
+
):
|
| 188 |
+
"""Instantiates an external account credentials object from a file/URL.
|
| 189 |
+
|
| 190 |
+
Args:
|
| 191 |
+
audience (str): The STS audience field.
|
| 192 |
+
subject_token_type (str): The subject token type based on the Oauth2.0 token exchange spec.
|
| 193 |
+
Expected values include::
|
| 194 |
+
|
| 195 |
+
“urn:ietf:params:oauth:token-type:jwt”
|
| 196 |
+
“urn:ietf:params:oauth:token-type:id-token”
|
| 197 |
+
“urn:ietf:params:oauth:token-type:saml2”
|
| 198 |
+
|
| 199 |
+
token_url (Optional [str]): The STS endpoint URL. If not provided, will default to "https://sts.googleapis.com/v1/token".
|
| 200 |
+
credential_source (Optional [Mapping]): The credential source dictionary used to
|
| 201 |
+
provide instructions on how to retrieve external credential to be
|
| 202 |
+
exchanged for Google access tokens. Either a credential source or
|
| 203 |
+
a subject token supplier must be provided.
|
| 204 |
+
|
| 205 |
+
Example credential_source for url-sourced credential::
|
| 206 |
+
|
| 207 |
+
{
|
| 208 |
+
"url": "http://www.example.com",
|
| 209 |
+
"format": {
|
| 210 |
+
"type": "json",
|
| 211 |
+
"subject_token_field_name": "access_token",
|
| 212 |
+
},
|
| 213 |
+
"headers": {"foo": "bar"},
|
| 214 |
+
}
|
| 215 |
+
|
| 216 |
+
Example credential_source for file-sourced credential::
|
| 217 |
+
|
| 218 |
+
{
|
| 219 |
+
"file": "/path/to/token/file.txt"
|
| 220 |
+
}
|
| 221 |
+
subject_token_supplier (Optional [SubjectTokenSupplier]): Optional subject token supplier.
|
| 222 |
+
This will be called to supply a valid subject token which will then
|
| 223 |
+
be exchanged for Google access tokens. Either a subject token supplier
|
| 224 |
+
or a credential source must be provided.
|
| 225 |
+
args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
|
| 226 |
+
kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
|
| 227 |
+
|
| 228 |
+
Raises:
|
| 229 |
+
google.auth.exceptions.RefreshError: If an error is encountered during
|
| 230 |
+
access token retrieval logic.
|
| 231 |
+
ValueError: For invalid parameters.
|
| 232 |
+
|
| 233 |
+
.. note:: Typically one of the helper constructors
|
| 234 |
+
:meth:`from_file` or
|
| 235 |
+
:meth:`from_info` are used instead of calling the constructor directly.
|
| 236 |
+
"""
|
| 237 |
+
|
| 238 |
+
super(Credentials, self).__init__(
|
| 239 |
+
audience=audience,
|
| 240 |
+
subject_token_type=subject_token_type,
|
| 241 |
+
token_url=token_url,
|
| 242 |
+
credential_source=credential_source,
|
| 243 |
+
*args,
|
| 244 |
+
**kwargs
|
| 245 |
+
)
|
| 246 |
+
if credential_source is None and subject_token_supplier is None:
|
| 247 |
+
raise exceptions.InvalidValue(
|
| 248 |
+
"A valid credential source or a subject token supplier must be provided."
|
| 249 |
+
)
|
| 250 |
+
if credential_source is not None and subject_token_supplier is not None:
|
| 251 |
+
raise exceptions.InvalidValue(
|
| 252 |
+
"Identity pool credential cannot have both a credential source and a subject token supplier."
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
if subject_token_supplier is not None:
|
| 256 |
+
self._subject_token_supplier = subject_token_supplier
|
| 257 |
+
self._credential_source_file = None
|
| 258 |
+
self._credential_source_url = None
|
| 259 |
+
self._credential_source_certificate = None
|
| 260 |
+
else:
|
| 261 |
+
if not isinstance(credential_source, Mapping):
|
| 262 |
+
self._credential_source_executable = None
|
| 263 |
+
raise exceptions.MalformedError(
|
| 264 |
+
"Invalid credential_source. The credential_source is not a dict."
|
| 265 |
+
)
|
| 266 |
+
self._credential_source_file = credential_source.get("file")
|
| 267 |
+
self._credential_source_url = credential_source.get("url")
|
| 268 |
+
self._credential_source_certificate = credential_source.get("certificate")
|
| 269 |
+
|
| 270 |
+
# environment_id is only supported in AWS or dedicated future external
|
| 271 |
+
# account credentials.
|
| 272 |
+
if "environment_id" in credential_source:
|
| 273 |
+
raise exceptions.MalformedError(
|
| 274 |
+
"Invalid Identity Pool credential_source field 'environment_id'"
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
# check that only one of file, url, or certificate are provided.
|
| 278 |
+
self._validate_single_source()
|
| 279 |
+
|
| 280 |
+
if self._credential_source_certificate:
|
| 281 |
+
self._validate_certificate_config()
|
| 282 |
+
else:
|
| 283 |
+
self._validate_file_or_url_config(credential_source)
|
| 284 |
+
|
| 285 |
+
if self._credential_source_file:
|
| 286 |
+
self._subject_token_supplier = _FileSupplier(
|
| 287 |
+
self._credential_source_file,
|
| 288 |
+
self._credential_source_format_type,
|
| 289 |
+
self._credential_source_field_name,
|
| 290 |
+
)
|
| 291 |
+
elif self._credential_source_url:
|
| 292 |
+
self._subject_token_supplier = _UrlSupplier(
|
| 293 |
+
self._credential_source_url,
|
| 294 |
+
self._credential_source_format_type,
|
| 295 |
+
self._credential_source_field_name,
|
| 296 |
+
self._credential_source_headers,
|
| 297 |
+
)
|
| 298 |
+
else: # self._credential_source_certificate
|
| 299 |
+
self._subject_token_supplier = _X509Supplier()
|
| 300 |
+
|
| 301 |
+
@_helpers.copy_docstring(external_account.Credentials)
|
| 302 |
+
def retrieve_subject_token(self, request):
|
| 303 |
+
return self._subject_token_supplier.get_subject_token(
|
| 304 |
+
self._supplier_context, request
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
+
def _get_mtls_cert_and_key_paths(self):
|
| 308 |
+
if self._credential_source_certificate is None:
|
| 309 |
+
raise exceptions.RefreshError(
|
| 310 |
+
'The credential is not configured to use mtls requests. The credential should include a "certificate" section in the credential source.'
|
| 311 |
+
)
|
| 312 |
+
else:
|
| 313 |
+
return _mtls_helper._get_workload_cert_and_key_paths(
|
| 314 |
+
self._certificate_config_location
|
| 315 |
+
)
|
| 316 |
+
|
| 317 |
+
def _mtls_required(self):
|
| 318 |
+
return self._credential_source_certificate is not None
|
| 319 |
+
|
| 320 |
+
def _create_default_metrics_options(self):
|
| 321 |
+
metrics_options = super(Credentials, self)._create_default_metrics_options()
|
| 322 |
+
# Check that credential source is a dict before checking for credential type. This check needs to be done
|
| 323 |
+
# here because the external_account credential constructor needs to pass the metrics options to the
|
| 324 |
+
# impersonated credential object before the identity_pool credentials are validated.
|
| 325 |
+
if isinstance(self._credential_source, Mapping):
|
| 326 |
+
if self._credential_source.get("file"):
|
| 327 |
+
metrics_options["source"] = "file"
|
| 328 |
+
elif self._credential_source.get("url"):
|
| 329 |
+
metrics_options["source"] = "url"
|
| 330 |
+
else:
|
| 331 |
+
metrics_options["source"] = "x509"
|
| 332 |
+
else:
|
| 333 |
+
metrics_options["source"] = "programmatic"
|
| 334 |
+
return metrics_options
|
| 335 |
+
|
| 336 |
+
def _has_custom_supplier(self):
|
| 337 |
+
return self._credential_source is None
|
| 338 |
+
|
| 339 |
+
def _constructor_args(self):
|
| 340 |
+
args = super(Credentials, self)._constructor_args()
|
| 341 |
+
# If a custom supplier was used, append it to the args dict.
|
| 342 |
+
if self._has_custom_supplier():
|
| 343 |
+
args.update({"subject_token_supplier": self._subject_token_supplier})
|
| 344 |
+
return args
|
| 345 |
+
|
| 346 |
+
def _validate_certificate_config(self):
|
| 347 |
+
self._certificate_config_location = self._credential_source_certificate.get(
|
| 348 |
+
"certificate_config_location"
|
| 349 |
+
)
|
| 350 |
+
use_default = self._credential_source_certificate.get(
|
| 351 |
+
"use_default_certificate_config"
|
| 352 |
+
)
|
| 353 |
+
if self._certificate_config_location and use_default:
|
| 354 |
+
raise exceptions.MalformedError(
|
| 355 |
+
"Invalid certificate configuration, certificate_config_location cannot be specified when use_default_certificate_config = true."
|
| 356 |
+
)
|
| 357 |
+
if not self._certificate_config_location and not use_default:
|
| 358 |
+
raise exceptions.MalformedError(
|
| 359 |
+
"Invalid certificate configuration, use_default_certificate_config should be true if no certificate_config_location is provided."
|
| 360 |
+
)
|
| 361 |
+
|
| 362 |
+
def _validate_file_or_url_config(self, credential_source):
|
| 363 |
+
self._credential_source_headers = credential_source.get("headers")
|
| 364 |
+
credential_source_format = credential_source.get("format", {})
|
| 365 |
+
# Get credential_source format type. When not provided, this
|
| 366 |
+
# defaults to text.
|
| 367 |
+
self._credential_source_format_type = (
|
| 368 |
+
credential_source_format.get("type") or "text"
|
| 369 |
+
)
|
| 370 |
+
if self._credential_source_format_type not in ["text", "json"]:
|
| 371 |
+
raise exceptions.MalformedError(
|
| 372 |
+
"Invalid credential_source format '{}'".format(
|
| 373 |
+
self._credential_source_format_type
|
| 374 |
+
)
|
| 375 |
+
)
|
| 376 |
+
# For JSON types, get the required subject_token field name.
|
| 377 |
+
if self._credential_source_format_type == "json":
|
| 378 |
+
self._credential_source_field_name = credential_source_format.get(
|
| 379 |
+
"subject_token_field_name"
|
| 380 |
+
)
|
| 381 |
+
if self._credential_source_field_name is None:
|
| 382 |
+
raise exceptions.MalformedError(
|
| 383 |
+
"Missing subject_token_field_name for JSON credential_source format"
|
| 384 |
+
)
|
| 385 |
+
else:
|
| 386 |
+
self._credential_source_field_name = None
|
| 387 |
+
|
| 388 |
+
def _validate_single_source(self):
|
| 389 |
+
credential_sources = [
|
| 390 |
+
self._credential_source_file,
|
| 391 |
+
self._credential_source_url,
|
| 392 |
+
self._credential_source_certificate,
|
| 393 |
+
]
|
| 394 |
+
valid_credential_sources = list(
|
| 395 |
+
filter(lambda source: source is not None, credential_sources)
|
| 396 |
+
)
|
| 397 |
+
|
| 398 |
+
if len(valid_credential_sources) > 1:
|
| 399 |
+
raise exceptions.MalformedError(
|
| 400 |
+
"Ambiguous credential_source. 'file', 'url', and 'certificate' are mutually exclusive.."
|
| 401 |
+
)
|
| 402 |
+
if len(valid_credential_sources) != 1:
|
| 403 |
+
raise exceptions.MalformedError(
|
| 404 |
+
"Missing credential_source. A 'file', 'url', or 'certificate' must be provided."
|
| 405 |
+
)
|
| 406 |
+
|
| 407 |
+
@classmethod
|
| 408 |
+
def from_info(cls, info, **kwargs):
|
| 409 |
+
"""Creates an Identity Pool Credentials instance from parsed external account info.
|
| 410 |
+
|
| 411 |
+
Args:
|
| 412 |
+
info (Mapping[str, str]): The Identity Pool external account info in Google
|
| 413 |
+
format.
|
| 414 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 415 |
+
|
| 416 |
+
Returns:
|
| 417 |
+
google.auth.identity_pool.Credentials: The constructed
|
| 418 |
+
credentials.
|
| 419 |
+
|
| 420 |
+
Raises:
|
| 421 |
+
ValueError: For invalid parameters.
|
| 422 |
+
"""
|
| 423 |
+
subject_token_supplier = info.get("subject_token_supplier")
|
| 424 |
+
kwargs.update({"subject_token_supplier": subject_token_supplier})
|
| 425 |
+
return super(Credentials, cls).from_info(info, **kwargs)
|
| 426 |
+
|
| 427 |
+
@classmethod
|
| 428 |
+
def from_file(cls, filename, **kwargs):
|
| 429 |
+
"""Creates an IdentityPool Credentials instance from an external account json file.
|
| 430 |
+
|
| 431 |
+
Args:
|
| 432 |
+
filename (str): The path to the IdentityPool external account json file.
|
| 433 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 434 |
+
|
| 435 |
+
Returns:
|
| 436 |
+
google.auth.identity_pool.Credentials: The constructed
|
| 437 |
+
credentials.
|
| 438 |
+
"""
|
| 439 |
+
return super(Credentials, cls).from_file(filename, **kwargs)
|
lib/python3.10/site-packages/google/auth/jwt.py
ADDED
|
@@ -0,0 +1,878 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2016 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""JSON Web Tokens
|
| 16 |
+
|
| 17 |
+
Provides support for creating (encoding) and verifying (decoding) JWTs,
|
| 18 |
+
especially JWTs generated and consumed by Google infrastructure.
|
| 19 |
+
|
| 20 |
+
See `rfc7519`_ for more details on JWTs.
|
| 21 |
+
|
| 22 |
+
To encode a JWT use :func:`encode`::
|
| 23 |
+
|
| 24 |
+
from google.auth import crypt
|
| 25 |
+
from google.auth import jwt
|
| 26 |
+
|
| 27 |
+
signer = crypt.Signer(private_key)
|
| 28 |
+
payload = {'some': 'payload'}
|
| 29 |
+
encoded = jwt.encode(signer, payload)
|
| 30 |
+
|
| 31 |
+
To decode a JWT and verify claims use :func:`decode`::
|
| 32 |
+
|
| 33 |
+
claims = jwt.decode(encoded, certs=public_certs)
|
| 34 |
+
|
| 35 |
+
You can also skip verification::
|
| 36 |
+
|
| 37 |
+
claims = jwt.decode(encoded, verify=False)
|
| 38 |
+
|
| 39 |
+
.. _rfc7519: https://tools.ietf.org/html/rfc7519
|
| 40 |
+
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
try:
|
| 44 |
+
from collections.abc import Mapping
|
| 45 |
+
# Python 2.7 compatibility
|
| 46 |
+
except ImportError: # pragma: NO COVER
|
| 47 |
+
from collections import Mapping # type: ignore
|
| 48 |
+
import copy
|
| 49 |
+
import datetime
|
| 50 |
+
import json
|
| 51 |
+
import urllib
|
| 52 |
+
|
| 53 |
+
import cachetools
|
| 54 |
+
|
| 55 |
+
from google.auth import _helpers
|
| 56 |
+
from google.auth import _service_account_info
|
| 57 |
+
from google.auth import crypt
|
| 58 |
+
from google.auth import exceptions
|
| 59 |
+
import google.auth.credentials
|
| 60 |
+
|
| 61 |
+
try:
|
| 62 |
+
from google.auth.crypt import es256
|
| 63 |
+
except ImportError: # pragma: NO COVER
|
| 64 |
+
es256 = None # type: ignore
|
| 65 |
+
|
| 66 |
+
_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
|
| 67 |
+
_DEFAULT_MAX_CACHE_SIZE = 10
|
| 68 |
+
_ALGORITHM_TO_VERIFIER_CLASS = {"RS256": crypt.RSAVerifier}
|
| 69 |
+
_CRYPTOGRAPHY_BASED_ALGORITHMS = frozenset(["ES256"])
|
| 70 |
+
|
| 71 |
+
if es256 is not None: # pragma: NO COVER
|
| 72 |
+
_ALGORITHM_TO_VERIFIER_CLASS["ES256"] = es256.ES256Verifier # type: ignore
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def encode(signer, payload, header=None, key_id=None):
|
| 76 |
+
"""Make a signed JWT.
|
| 77 |
+
|
| 78 |
+
Args:
|
| 79 |
+
signer (google.auth.crypt.Signer): The signer used to sign the JWT.
|
| 80 |
+
payload (Mapping[str, str]): The JWT payload.
|
| 81 |
+
header (Mapping[str, str]): Additional JWT header payload.
|
| 82 |
+
key_id (str): The key id to add to the JWT header. If the
|
| 83 |
+
signer has a key id it will be used as the default. If this is
|
| 84 |
+
specified it will override the signer's key id.
|
| 85 |
+
|
| 86 |
+
Returns:
|
| 87 |
+
bytes: The encoded JWT.
|
| 88 |
+
"""
|
| 89 |
+
if header is None:
|
| 90 |
+
header = {}
|
| 91 |
+
|
| 92 |
+
if key_id is None:
|
| 93 |
+
key_id = signer.key_id
|
| 94 |
+
|
| 95 |
+
header.update({"typ": "JWT"})
|
| 96 |
+
|
| 97 |
+
if "alg" not in header:
|
| 98 |
+
if es256 is not None and isinstance(signer, es256.ES256Signer):
|
| 99 |
+
header.update({"alg": "ES256"})
|
| 100 |
+
else:
|
| 101 |
+
header.update({"alg": "RS256"})
|
| 102 |
+
|
| 103 |
+
if key_id is not None:
|
| 104 |
+
header["kid"] = key_id
|
| 105 |
+
|
| 106 |
+
segments = [
|
| 107 |
+
_helpers.unpadded_urlsafe_b64encode(json.dumps(header).encode("utf-8")),
|
| 108 |
+
_helpers.unpadded_urlsafe_b64encode(json.dumps(payload).encode("utf-8")),
|
| 109 |
+
]
|
| 110 |
+
|
| 111 |
+
signing_input = b".".join(segments)
|
| 112 |
+
signature = signer.sign(signing_input)
|
| 113 |
+
segments.append(_helpers.unpadded_urlsafe_b64encode(signature))
|
| 114 |
+
|
| 115 |
+
return b".".join(segments)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def _decode_jwt_segment(encoded_section):
|
| 119 |
+
"""Decodes a single JWT segment."""
|
| 120 |
+
section_bytes = _helpers.padded_urlsafe_b64decode(encoded_section)
|
| 121 |
+
try:
|
| 122 |
+
return json.loads(section_bytes.decode("utf-8"))
|
| 123 |
+
except ValueError as caught_exc:
|
| 124 |
+
new_exc = exceptions.MalformedError(
|
| 125 |
+
"Can't parse segment: {0}".format(section_bytes)
|
| 126 |
+
)
|
| 127 |
+
raise new_exc from caught_exc
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def _unverified_decode(token):
|
| 131 |
+
"""Decodes a token and does no verification.
|
| 132 |
+
|
| 133 |
+
Args:
|
| 134 |
+
token (Union[str, bytes]): The encoded JWT.
|
| 135 |
+
|
| 136 |
+
Returns:
|
| 137 |
+
Tuple[Mapping, Mapping, str, str]: header, payload, signed_section, and
|
| 138 |
+
signature.
|
| 139 |
+
|
| 140 |
+
Raises:
|
| 141 |
+
google.auth.exceptions.MalformedError: if there are an incorrect amount of segments in the token or segments of the wrong type.
|
| 142 |
+
"""
|
| 143 |
+
token = _helpers.to_bytes(token)
|
| 144 |
+
|
| 145 |
+
if token.count(b".") != 2:
|
| 146 |
+
raise exceptions.MalformedError(
|
| 147 |
+
"Wrong number of segments in token: {0}".format(token)
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
encoded_header, encoded_payload, signature = token.split(b".")
|
| 151 |
+
signed_section = encoded_header + b"." + encoded_payload
|
| 152 |
+
signature = _helpers.padded_urlsafe_b64decode(signature)
|
| 153 |
+
|
| 154 |
+
# Parse segments
|
| 155 |
+
header = _decode_jwt_segment(encoded_header)
|
| 156 |
+
payload = _decode_jwt_segment(encoded_payload)
|
| 157 |
+
|
| 158 |
+
if not isinstance(header, Mapping):
|
| 159 |
+
raise exceptions.MalformedError(
|
| 160 |
+
"Header segment should be a JSON object: {0}".format(encoded_header)
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
if not isinstance(payload, Mapping):
|
| 164 |
+
raise exceptions.MalformedError(
|
| 165 |
+
"Payload segment should be a JSON object: {0}".format(encoded_payload)
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
return header, payload, signed_section, signature
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def decode_header(token):
|
| 172 |
+
"""Return the decoded header of a token.
|
| 173 |
+
|
| 174 |
+
No verification is done. This is useful to extract the key id from
|
| 175 |
+
the header in order to acquire the appropriate certificate to verify
|
| 176 |
+
the token.
|
| 177 |
+
|
| 178 |
+
Args:
|
| 179 |
+
token (Union[str, bytes]): the encoded JWT.
|
| 180 |
+
|
| 181 |
+
Returns:
|
| 182 |
+
Mapping: The decoded JWT header.
|
| 183 |
+
"""
|
| 184 |
+
header, _, _, _ = _unverified_decode(token)
|
| 185 |
+
return header
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def _verify_iat_and_exp(payload, clock_skew_in_seconds=0):
|
| 189 |
+
"""Verifies the ``iat`` (Issued At) and ``exp`` (Expires) claims in a token
|
| 190 |
+
payload.
|
| 191 |
+
|
| 192 |
+
Args:
|
| 193 |
+
payload (Mapping[str, str]): The JWT payload.
|
| 194 |
+
clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
|
| 195 |
+
validation.
|
| 196 |
+
|
| 197 |
+
Raises:
|
| 198 |
+
google.auth.exceptions.InvalidValue: if value validation failed.
|
| 199 |
+
google.auth.exceptions.MalformedError: if schema validation failed.
|
| 200 |
+
"""
|
| 201 |
+
now = _helpers.datetime_to_secs(_helpers.utcnow())
|
| 202 |
+
|
| 203 |
+
# Make sure the iat and exp claims are present.
|
| 204 |
+
for key in ("iat", "exp"):
|
| 205 |
+
if key not in payload:
|
| 206 |
+
raise exceptions.MalformedError(
|
| 207 |
+
"Token does not contain required claim {}".format(key)
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
# Make sure the token wasn't issued in the future.
|
| 211 |
+
iat = payload["iat"]
|
| 212 |
+
# Err on the side of accepting a token that is slightly early to account
|
| 213 |
+
# for clock skew.
|
| 214 |
+
earliest = iat - clock_skew_in_seconds
|
| 215 |
+
if now < earliest:
|
| 216 |
+
raise exceptions.InvalidValue(
|
| 217 |
+
"Token used too early, {} < {}. Check that your computer's clock is set correctly.".format(
|
| 218 |
+
now, iat
|
| 219 |
+
)
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
# Make sure the token wasn't issued in the past.
|
| 223 |
+
exp = payload["exp"]
|
| 224 |
+
# Err on the side of accepting a token that is slightly out of date
|
| 225 |
+
# to account for clow skew.
|
| 226 |
+
latest = exp + clock_skew_in_seconds
|
| 227 |
+
if latest < now:
|
| 228 |
+
raise exceptions.InvalidValue("Token expired, {} < {}".format(latest, now))
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def decode(token, certs=None, verify=True, audience=None, clock_skew_in_seconds=0):
|
| 232 |
+
"""Decode and verify a JWT.
|
| 233 |
+
|
| 234 |
+
Args:
|
| 235 |
+
token (str): The encoded JWT.
|
| 236 |
+
certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The
|
| 237 |
+
certificate used to validate the JWT signature. If bytes or string,
|
| 238 |
+
it must the the public key certificate in PEM format. If a mapping,
|
| 239 |
+
it must be a mapping of key IDs to public key certificates in PEM
|
| 240 |
+
format. The mapping must contain the same key ID that's specified
|
| 241 |
+
in the token's header.
|
| 242 |
+
verify (bool): Whether to perform signature and claim validation.
|
| 243 |
+
Verification is done by default.
|
| 244 |
+
audience (str or list): The audience claim, 'aud', that this JWT should
|
| 245 |
+
contain. Or a list of audience claims. If None then the JWT's 'aud'
|
| 246 |
+
parameter is not verified.
|
| 247 |
+
clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
|
| 248 |
+
validation.
|
| 249 |
+
|
| 250 |
+
Returns:
|
| 251 |
+
Mapping[str, str]: The deserialized JSON payload in the JWT.
|
| 252 |
+
|
| 253 |
+
Raises:
|
| 254 |
+
google.auth.exceptions.InvalidValue: if value validation failed.
|
| 255 |
+
google.auth.exceptions.MalformedError: if schema validation failed.
|
| 256 |
+
"""
|
| 257 |
+
header, payload, signed_section, signature = _unverified_decode(token)
|
| 258 |
+
|
| 259 |
+
if not verify:
|
| 260 |
+
return payload
|
| 261 |
+
|
| 262 |
+
# Pluck the key id and algorithm from the header and make sure we have
|
| 263 |
+
# a verifier that can support it.
|
| 264 |
+
key_alg = header.get("alg")
|
| 265 |
+
key_id = header.get("kid")
|
| 266 |
+
|
| 267 |
+
try:
|
| 268 |
+
verifier_cls = _ALGORITHM_TO_VERIFIER_CLASS[key_alg]
|
| 269 |
+
except KeyError as exc:
|
| 270 |
+
if key_alg in _CRYPTOGRAPHY_BASED_ALGORITHMS:
|
| 271 |
+
raise exceptions.InvalidValue(
|
| 272 |
+
"The key algorithm {} requires the cryptography package to be installed.".format(
|
| 273 |
+
key_alg
|
| 274 |
+
)
|
| 275 |
+
) from exc
|
| 276 |
+
else:
|
| 277 |
+
raise exceptions.InvalidValue(
|
| 278 |
+
"Unsupported signature algorithm {}".format(key_alg)
|
| 279 |
+
) from exc
|
| 280 |
+
# If certs is specified as a dictionary of key IDs to certificates, then
|
| 281 |
+
# use the certificate identified by the key ID in the token header.
|
| 282 |
+
if isinstance(certs, Mapping):
|
| 283 |
+
if key_id:
|
| 284 |
+
if key_id not in certs:
|
| 285 |
+
raise exceptions.MalformedError(
|
| 286 |
+
"Certificate for key id {} not found.".format(key_id)
|
| 287 |
+
)
|
| 288 |
+
certs_to_check = [certs[key_id]]
|
| 289 |
+
# If there's no key id in the header, check against all of the certs.
|
| 290 |
+
else:
|
| 291 |
+
certs_to_check = certs.values()
|
| 292 |
+
else:
|
| 293 |
+
certs_to_check = certs
|
| 294 |
+
|
| 295 |
+
# Verify that the signature matches the message.
|
| 296 |
+
if not crypt.verify_signature(
|
| 297 |
+
signed_section, signature, certs_to_check, verifier_cls
|
| 298 |
+
):
|
| 299 |
+
raise exceptions.MalformedError("Could not verify token signature.")
|
| 300 |
+
|
| 301 |
+
# Verify the issued at and created times in the payload.
|
| 302 |
+
_verify_iat_and_exp(payload, clock_skew_in_seconds)
|
| 303 |
+
|
| 304 |
+
# Check audience.
|
| 305 |
+
if audience is not None:
|
| 306 |
+
claim_audience = payload.get("aud")
|
| 307 |
+
if isinstance(audience, str):
|
| 308 |
+
audience = [audience]
|
| 309 |
+
if claim_audience not in audience:
|
| 310 |
+
raise exceptions.InvalidValue(
|
| 311 |
+
"Token has wrong audience {}, expected one of {}".format(
|
| 312 |
+
claim_audience, audience
|
| 313 |
+
)
|
| 314 |
+
)
|
| 315 |
+
|
| 316 |
+
return payload
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
class Credentials(
|
| 320 |
+
google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
|
| 321 |
+
):
|
| 322 |
+
"""Credentials that use a JWT as the bearer token.
|
| 323 |
+
|
| 324 |
+
These credentials require an "audience" claim. This claim identifies the
|
| 325 |
+
intended recipient of the bearer token.
|
| 326 |
+
|
| 327 |
+
The constructor arguments determine the claims for the JWT that is
|
| 328 |
+
sent with requests. Usually, you'll construct these credentials with
|
| 329 |
+
one of the helper constructors as shown in the next section.
|
| 330 |
+
|
| 331 |
+
To create JWT credentials using a Google service account private key
|
| 332 |
+
JSON file::
|
| 333 |
+
|
| 334 |
+
audience = 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher'
|
| 335 |
+
credentials = jwt.Credentials.from_service_account_file(
|
| 336 |
+
'service-account.json',
|
| 337 |
+
audience=audience)
|
| 338 |
+
|
| 339 |
+
If you already have the service account file loaded and parsed::
|
| 340 |
+
|
| 341 |
+
service_account_info = json.load(open('service_account.json'))
|
| 342 |
+
credentials = jwt.Credentials.from_service_account_info(
|
| 343 |
+
service_account_info,
|
| 344 |
+
audience=audience)
|
| 345 |
+
|
| 346 |
+
Both helper methods pass on arguments to the constructor, so you can
|
| 347 |
+
specify the JWT claims::
|
| 348 |
+
|
| 349 |
+
credentials = jwt.Credentials.from_service_account_file(
|
| 350 |
+
'service-account.json',
|
| 351 |
+
audience=audience,
|
| 352 |
+
additional_claims={'meta': 'data'})
|
| 353 |
+
|
| 354 |
+
You can also construct the credentials directly if you have a
|
| 355 |
+
:class:`~google.auth.crypt.Signer` instance::
|
| 356 |
+
|
| 357 |
+
credentials = jwt.Credentials(
|
| 358 |
+
signer,
|
| 359 |
+
issuer='your-issuer',
|
| 360 |
+
subject='your-subject',
|
| 361 |
+
audience=audience)
|
| 362 |
+
|
| 363 |
+
The claims are considered immutable. If you want to modify the claims,
|
| 364 |
+
you can easily create another instance using :meth:`with_claims`::
|
| 365 |
+
|
| 366 |
+
new_audience = (
|
| 367 |
+
'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber')
|
| 368 |
+
new_credentials = credentials.with_claims(audience=new_audience)
|
| 369 |
+
"""
|
| 370 |
+
|
| 371 |
+
def __init__(
|
| 372 |
+
self,
|
| 373 |
+
signer,
|
| 374 |
+
issuer,
|
| 375 |
+
subject,
|
| 376 |
+
audience,
|
| 377 |
+
additional_claims=None,
|
| 378 |
+
token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
|
| 379 |
+
quota_project_id=None,
|
| 380 |
+
):
|
| 381 |
+
"""
|
| 382 |
+
Args:
|
| 383 |
+
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
|
| 384 |
+
issuer (str): The `iss` claim.
|
| 385 |
+
subject (str): The `sub` claim.
|
| 386 |
+
audience (str): the `aud` claim. The intended audience for the
|
| 387 |
+
credentials.
|
| 388 |
+
additional_claims (Mapping[str, str]): Any additional claims for
|
| 389 |
+
the JWT payload.
|
| 390 |
+
token_lifetime (int): The amount of time in seconds for
|
| 391 |
+
which the token is valid. Defaults to 1 hour.
|
| 392 |
+
quota_project_id (Optional[str]): The project ID used for quota
|
| 393 |
+
and billing.
|
| 394 |
+
"""
|
| 395 |
+
super(Credentials, self).__init__()
|
| 396 |
+
self._signer = signer
|
| 397 |
+
self._issuer = issuer
|
| 398 |
+
self._subject = subject
|
| 399 |
+
self._audience = audience
|
| 400 |
+
self._token_lifetime = token_lifetime
|
| 401 |
+
self._quota_project_id = quota_project_id
|
| 402 |
+
|
| 403 |
+
if additional_claims is None:
|
| 404 |
+
additional_claims = {}
|
| 405 |
+
|
| 406 |
+
self._additional_claims = additional_claims
|
| 407 |
+
|
| 408 |
+
@classmethod
|
| 409 |
+
def _from_signer_and_info(cls, signer, info, **kwargs):
|
| 410 |
+
"""Creates a Credentials instance from a signer and service account
|
| 411 |
+
info.
|
| 412 |
+
|
| 413 |
+
Args:
|
| 414 |
+
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
|
| 415 |
+
info (Mapping[str, str]): The service account info.
|
| 416 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 417 |
+
|
| 418 |
+
Returns:
|
| 419 |
+
google.auth.jwt.Credentials: The constructed credentials.
|
| 420 |
+
|
| 421 |
+
Raises:
|
| 422 |
+
google.auth.exceptions.MalformedError: If the info is not in the expected format.
|
| 423 |
+
"""
|
| 424 |
+
kwargs.setdefault("subject", info["client_email"])
|
| 425 |
+
kwargs.setdefault("issuer", info["client_email"])
|
| 426 |
+
return cls(signer, **kwargs)
|
| 427 |
+
|
| 428 |
+
@classmethod
|
| 429 |
+
def from_service_account_info(cls, info, **kwargs):
|
| 430 |
+
"""Creates an Credentials instance from a dictionary.
|
| 431 |
+
|
| 432 |
+
Args:
|
| 433 |
+
info (Mapping[str, str]): The service account info in Google
|
| 434 |
+
format.
|
| 435 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 436 |
+
|
| 437 |
+
Returns:
|
| 438 |
+
google.auth.jwt.Credentials: The constructed credentials.
|
| 439 |
+
|
| 440 |
+
Raises:
|
| 441 |
+
google.auth.exceptions.MalformedError: If the info is not in the expected format.
|
| 442 |
+
"""
|
| 443 |
+
signer = _service_account_info.from_dict(info, require=["client_email"])
|
| 444 |
+
return cls._from_signer_and_info(signer, info, **kwargs)
|
| 445 |
+
|
| 446 |
+
@classmethod
|
| 447 |
+
def from_service_account_file(cls, filename, **kwargs):
|
| 448 |
+
"""Creates a Credentials instance from a service account .json file
|
| 449 |
+
in Google format.
|
| 450 |
+
|
| 451 |
+
Args:
|
| 452 |
+
filename (str): The path to the service account .json file.
|
| 453 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 454 |
+
|
| 455 |
+
Returns:
|
| 456 |
+
google.auth.jwt.Credentials: The constructed credentials.
|
| 457 |
+
"""
|
| 458 |
+
info, signer = _service_account_info.from_filename(
|
| 459 |
+
filename, require=["client_email"]
|
| 460 |
+
)
|
| 461 |
+
return cls._from_signer_and_info(signer, info, **kwargs)
|
| 462 |
+
|
| 463 |
+
@classmethod
|
| 464 |
+
def from_signing_credentials(cls, credentials, audience, **kwargs):
|
| 465 |
+
"""Creates a new :class:`google.auth.jwt.Credentials` instance from an
|
| 466 |
+
existing :class:`google.auth.credentials.Signing` instance.
|
| 467 |
+
|
| 468 |
+
The new instance will use the same signer as the existing instance and
|
| 469 |
+
will use the existing instance's signer email as the issuer and
|
| 470 |
+
subject by default.
|
| 471 |
+
|
| 472 |
+
Example::
|
| 473 |
+
|
| 474 |
+
svc_creds = service_account.Credentials.from_service_account_file(
|
| 475 |
+
'service_account.json')
|
| 476 |
+
audience = (
|
| 477 |
+
'https://pubsub.googleapis.com/google.pubsub.v1.Publisher')
|
| 478 |
+
jwt_creds = jwt.Credentials.from_signing_credentials(
|
| 479 |
+
svc_creds, audience=audience)
|
| 480 |
+
|
| 481 |
+
Args:
|
| 482 |
+
credentials (google.auth.credentials.Signing): The credentials to
|
| 483 |
+
use to construct the new credentials.
|
| 484 |
+
audience (str): the `aud` claim. The intended audience for the
|
| 485 |
+
credentials.
|
| 486 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 487 |
+
|
| 488 |
+
Returns:
|
| 489 |
+
google.auth.jwt.Credentials: A new Credentials instance.
|
| 490 |
+
"""
|
| 491 |
+
kwargs.setdefault("issuer", credentials.signer_email)
|
| 492 |
+
kwargs.setdefault("subject", credentials.signer_email)
|
| 493 |
+
return cls(credentials.signer, audience=audience, **kwargs)
|
| 494 |
+
|
| 495 |
+
def with_claims(
|
| 496 |
+
self, issuer=None, subject=None, audience=None, additional_claims=None
|
| 497 |
+
):
|
| 498 |
+
"""Returns a copy of these credentials with modified claims.
|
| 499 |
+
|
| 500 |
+
Args:
|
| 501 |
+
issuer (str): The `iss` claim. If unspecified the current issuer
|
| 502 |
+
claim will be used.
|
| 503 |
+
subject (str): The `sub` claim. If unspecified the current subject
|
| 504 |
+
claim will be used.
|
| 505 |
+
audience (str): the `aud` claim. If unspecified the current
|
| 506 |
+
audience claim will be used.
|
| 507 |
+
additional_claims (Mapping[str, str]): Any additional claims for
|
| 508 |
+
the JWT payload. This will be merged with the current
|
| 509 |
+
additional claims.
|
| 510 |
+
|
| 511 |
+
Returns:
|
| 512 |
+
google.auth.jwt.Credentials: A new credentials instance.
|
| 513 |
+
"""
|
| 514 |
+
new_additional_claims = copy.deepcopy(self._additional_claims)
|
| 515 |
+
new_additional_claims.update(additional_claims or {})
|
| 516 |
+
|
| 517 |
+
return self.__class__(
|
| 518 |
+
self._signer,
|
| 519 |
+
issuer=issuer if issuer is not None else self._issuer,
|
| 520 |
+
subject=subject if subject is not None else self._subject,
|
| 521 |
+
audience=audience if audience is not None else self._audience,
|
| 522 |
+
additional_claims=new_additional_claims,
|
| 523 |
+
quota_project_id=self._quota_project_id,
|
| 524 |
+
)
|
| 525 |
+
|
| 526 |
+
@_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
|
| 527 |
+
def with_quota_project(self, quota_project_id):
|
| 528 |
+
return self.__class__(
|
| 529 |
+
self._signer,
|
| 530 |
+
issuer=self._issuer,
|
| 531 |
+
subject=self._subject,
|
| 532 |
+
audience=self._audience,
|
| 533 |
+
additional_claims=self._additional_claims,
|
| 534 |
+
quota_project_id=quota_project_id,
|
| 535 |
+
)
|
| 536 |
+
|
| 537 |
+
def _make_jwt(self):
|
| 538 |
+
"""Make a signed JWT.
|
| 539 |
+
|
| 540 |
+
Returns:
|
| 541 |
+
Tuple[bytes, datetime]: The encoded JWT and the expiration.
|
| 542 |
+
"""
|
| 543 |
+
now = _helpers.utcnow()
|
| 544 |
+
lifetime = datetime.timedelta(seconds=self._token_lifetime)
|
| 545 |
+
expiry = now + lifetime
|
| 546 |
+
|
| 547 |
+
payload = {
|
| 548 |
+
"iss": self._issuer,
|
| 549 |
+
"sub": self._subject,
|
| 550 |
+
"iat": _helpers.datetime_to_secs(now),
|
| 551 |
+
"exp": _helpers.datetime_to_secs(expiry),
|
| 552 |
+
}
|
| 553 |
+
if self._audience:
|
| 554 |
+
payload["aud"] = self._audience
|
| 555 |
+
|
| 556 |
+
payload.update(self._additional_claims)
|
| 557 |
+
|
| 558 |
+
jwt = encode(self._signer, payload)
|
| 559 |
+
|
| 560 |
+
return jwt, expiry
|
| 561 |
+
|
| 562 |
+
def refresh(self, request):
|
| 563 |
+
"""Refreshes the access token.
|
| 564 |
+
|
| 565 |
+
Args:
|
| 566 |
+
request (Any): Unused.
|
| 567 |
+
"""
|
| 568 |
+
# pylint: disable=unused-argument
|
| 569 |
+
# (pylint doesn't correctly recognize overridden methods.)
|
| 570 |
+
self.token, self.expiry = self._make_jwt()
|
| 571 |
+
|
| 572 |
+
@_helpers.copy_docstring(google.auth.credentials.Signing)
|
| 573 |
+
def sign_bytes(self, message):
|
| 574 |
+
return self._signer.sign(message)
|
| 575 |
+
|
| 576 |
+
@property # type: ignore
|
| 577 |
+
@_helpers.copy_docstring(google.auth.credentials.Signing)
|
| 578 |
+
def signer_email(self):
|
| 579 |
+
return self._issuer
|
| 580 |
+
|
| 581 |
+
@property # type: ignore
|
| 582 |
+
@_helpers.copy_docstring(google.auth.credentials.Signing)
|
| 583 |
+
def signer(self):
|
| 584 |
+
return self._signer
|
| 585 |
+
|
| 586 |
+
@property # type: ignore
|
| 587 |
+
def additional_claims(self):
|
| 588 |
+
""" Additional claims the JWT object was created with."""
|
| 589 |
+
return self._additional_claims
|
| 590 |
+
|
| 591 |
+
|
| 592 |
+
class OnDemandCredentials(
|
| 593 |
+
google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
|
| 594 |
+
):
|
| 595 |
+
"""On-demand JWT credentials.
|
| 596 |
+
|
| 597 |
+
Like :class:`Credentials`, this class uses a JWT as the bearer token for
|
| 598 |
+
authentication. However, this class does not require the audience at
|
| 599 |
+
construction time. Instead, it will generate a new token on-demand for
|
| 600 |
+
each request using the request URI as the audience. It caches tokens
|
| 601 |
+
so that multiple requests to the same URI do not incur the overhead
|
| 602 |
+
of generating a new token every time.
|
| 603 |
+
|
| 604 |
+
This behavior is especially useful for `gRPC`_ clients. A gRPC service may
|
| 605 |
+
have multiple audience and gRPC clients may not know all of the audiences
|
| 606 |
+
required for accessing a particular service. With these credentials,
|
| 607 |
+
no knowledge of the audiences is required ahead of time.
|
| 608 |
+
|
| 609 |
+
.. _grpc: http://www.grpc.io/
|
| 610 |
+
"""
|
| 611 |
+
|
| 612 |
+
def __init__(
|
| 613 |
+
self,
|
| 614 |
+
signer,
|
| 615 |
+
issuer,
|
| 616 |
+
subject,
|
| 617 |
+
additional_claims=None,
|
| 618 |
+
token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
|
| 619 |
+
max_cache_size=_DEFAULT_MAX_CACHE_SIZE,
|
| 620 |
+
quota_project_id=None,
|
| 621 |
+
):
|
| 622 |
+
"""
|
| 623 |
+
Args:
|
| 624 |
+
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
|
| 625 |
+
issuer (str): The `iss` claim.
|
| 626 |
+
subject (str): The `sub` claim.
|
| 627 |
+
additional_claims (Mapping[str, str]): Any additional claims for
|
| 628 |
+
the JWT payload.
|
| 629 |
+
token_lifetime (int): The amount of time in seconds for
|
| 630 |
+
which the token is valid. Defaults to 1 hour.
|
| 631 |
+
max_cache_size (int): The maximum number of JWT tokens to keep in
|
| 632 |
+
cache. Tokens are cached using :class:`cachetools.LRUCache`.
|
| 633 |
+
quota_project_id (Optional[str]): The project ID used for quota
|
| 634 |
+
and billing.
|
| 635 |
+
|
| 636 |
+
"""
|
| 637 |
+
super(OnDemandCredentials, self).__init__()
|
| 638 |
+
self._signer = signer
|
| 639 |
+
self._issuer = issuer
|
| 640 |
+
self._subject = subject
|
| 641 |
+
self._token_lifetime = token_lifetime
|
| 642 |
+
self._quota_project_id = quota_project_id
|
| 643 |
+
|
| 644 |
+
if additional_claims is None:
|
| 645 |
+
additional_claims = {}
|
| 646 |
+
|
| 647 |
+
self._additional_claims = additional_claims
|
| 648 |
+
self._cache = cachetools.LRUCache(maxsize=max_cache_size)
|
| 649 |
+
|
| 650 |
+
@classmethod
|
| 651 |
+
def _from_signer_and_info(cls, signer, info, **kwargs):
|
| 652 |
+
"""Creates an OnDemandCredentials instance from a signer and service
|
| 653 |
+
account info.
|
| 654 |
+
|
| 655 |
+
Args:
|
| 656 |
+
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
|
| 657 |
+
info (Mapping[str, str]): The service account info.
|
| 658 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 659 |
+
|
| 660 |
+
Returns:
|
| 661 |
+
google.auth.jwt.OnDemandCredentials: The constructed credentials.
|
| 662 |
+
|
| 663 |
+
Raises:
|
| 664 |
+
google.auth.exceptions.MalformedError: If the info is not in the expected format.
|
| 665 |
+
"""
|
| 666 |
+
kwargs.setdefault("subject", info["client_email"])
|
| 667 |
+
kwargs.setdefault("issuer", info["client_email"])
|
| 668 |
+
return cls(signer, **kwargs)
|
| 669 |
+
|
| 670 |
+
@classmethod
|
| 671 |
+
def from_service_account_info(cls, info, **kwargs):
|
| 672 |
+
"""Creates an OnDemandCredentials instance from a dictionary.
|
| 673 |
+
|
| 674 |
+
Args:
|
| 675 |
+
info (Mapping[str, str]): The service account info in Google
|
| 676 |
+
format.
|
| 677 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 678 |
+
|
| 679 |
+
Returns:
|
| 680 |
+
google.auth.jwt.OnDemandCredentials: The constructed credentials.
|
| 681 |
+
|
| 682 |
+
Raises:
|
| 683 |
+
google.auth.exceptions.MalformedError: If the info is not in the expected format.
|
| 684 |
+
"""
|
| 685 |
+
signer = _service_account_info.from_dict(info, require=["client_email"])
|
| 686 |
+
return cls._from_signer_and_info(signer, info, **kwargs)
|
| 687 |
+
|
| 688 |
+
@classmethod
|
| 689 |
+
def from_service_account_file(cls, filename, **kwargs):
|
| 690 |
+
"""Creates an OnDemandCredentials instance from a service account .json
|
| 691 |
+
file in Google format.
|
| 692 |
+
|
| 693 |
+
Args:
|
| 694 |
+
filename (str): The path to the service account .json file.
|
| 695 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 696 |
+
|
| 697 |
+
Returns:
|
| 698 |
+
google.auth.jwt.OnDemandCredentials: The constructed credentials.
|
| 699 |
+
"""
|
| 700 |
+
info, signer = _service_account_info.from_filename(
|
| 701 |
+
filename, require=["client_email"]
|
| 702 |
+
)
|
| 703 |
+
return cls._from_signer_and_info(signer, info, **kwargs)
|
| 704 |
+
|
| 705 |
+
@classmethod
|
| 706 |
+
def from_signing_credentials(cls, credentials, **kwargs):
|
| 707 |
+
"""Creates a new :class:`google.auth.jwt.OnDemandCredentials` instance
|
| 708 |
+
from an existing :class:`google.auth.credentials.Signing` instance.
|
| 709 |
+
|
| 710 |
+
The new instance will use the same signer as the existing instance and
|
| 711 |
+
will use the existing instance's signer email as the issuer and
|
| 712 |
+
subject by default.
|
| 713 |
+
|
| 714 |
+
Example::
|
| 715 |
+
|
| 716 |
+
svc_creds = service_account.Credentials.from_service_account_file(
|
| 717 |
+
'service_account.json')
|
| 718 |
+
jwt_creds = jwt.OnDemandCredentials.from_signing_credentials(
|
| 719 |
+
svc_creds)
|
| 720 |
+
|
| 721 |
+
Args:
|
| 722 |
+
credentials (google.auth.credentials.Signing): The credentials to
|
| 723 |
+
use to construct the new credentials.
|
| 724 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 725 |
+
|
| 726 |
+
Returns:
|
| 727 |
+
google.auth.jwt.Credentials: A new Credentials instance.
|
| 728 |
+
"""
|
| 729 |
+
kwargs.setdefault("issuer", credentials.signer_email)
|
| 730 |
+
kwargs.setdefault("subject", credentials.signer_email)
|
| 731 |
+
return cls(credentials.signer, **kwargs)
|
| 732 |
+
|
| 733 |
+
def with_claims(self, issuer=None, subject=None, additional_claims=None):
|
| 734 |
+
"""Returns a copy of these credentials with modified claims.
|
| 735 |
+
|
| 736 |
+
Args:
|
| 737 |
+
issuer (str): The `iss` claim. If unspecified the current issuer
|
| 738 |
+
claim will be used.
|
| 739 |
+
subject (str): The `sub` claim. If unspecified the current subject
|
| 740 |
+
claim will be used.
|
| 741 |
+
additional_claims (Mapping[str, str]): Any additional claims for
|
| 742 |
+
the JWT payload. This will be merged with the current
|
| 743 |
+
additional claims.
|
| 744 |
+
|
| 745 |
+
Returns:
|
| 746 |
+
google.auth.jwt.OnDemandCredentials: A new credentials instance.
|
| 747 |
+
"""
|
| 748 |
+
new_additional_claims = copy.deepcopy(self._additional_claims)
|
| 749 |
+
new_additional_claims.update(additional_claims or {})
|
| 750 |
+
|
| 751 |
+
return self.__class__(
|
| 752 |
+
self._signer,
|
| 753 |
+
issuer=issuer if issuer is not None else self._issuer,
|
| 754 |
+
subject=subject if subject is not None else self._subject,
|
| 755 |
+
additional_claims=new_additional_claims,
|
| 756 |
+
max_cache_size=self._cache.maxsize,
|
| 757 |
+
quota_project_id=self._quota_project_id,
|
| 758 |
+
)
|
| 759 |
+
|
| 760 |
+
@_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
|
| 761 |
+
def with_quota_project(self, quota_project_id):
|
| 762 |
+
|
| 763 |
+
return self.__class__(
|
| 764 |
+
self._signer,
|
| 765 |
+
issuer=self._issuer,
|
| 766 |
+
subject=self._subject,
|
| 767 |
+
additional_claims=self._additional_claims,
|
| 768 |
+
max_cache_size=self._cache.maxsize,
|
| 769 |
+
quota_project_id=quota_project_id,
|
| 770 |
+
)
|
| 771 |
+
|
| 772 |
+
@property
|
| 773 |
+
def valid(self):
|
| 774 |
+
"""Checks the validity of the credentials.
|
| 775 |
+
|
| 776 |
+
These credentials are always valid because it generates tokens on
|
| 777 |
+
demand.
|
| 778 |
+
"""
|
| 779 |
+
return True
|
| 780 |
+
|
| 781 |
+
def _make_jwt_for_audience(self, audience):
|
| 782 |
+
"""Make a new JWT for the given audience.
|
| 783 |
+
|
| 784 |
+
Args:
|
| 785 |
+
audience (str): The intended audience.
|
| 786 |
+
|
| 787 |
+
Returns:
|
| 788 |
+
Tuple[bytes, datetime]: The encoded JWT and the expiration.
|
| 789 |
+
"""
|
| 790 |
+
now = _helpers.utcnow()
|
| 791 |
+
lifetime = datetime.timedelta(seconds=self._token_lifetime)
|
| 792 |
+
expiry = now + lifetime
|
| 793 |
+
|
| 794 |
+
payload = {
|
| 795 |
+
"iss": self._issuer,
|
| 796 |
+
"sub": self._subject,
|
| 797 |
+
"iat": _helpers.datetime_to_secs(now),
|
| 798 |
+
"exp": _helpers.datetime_to_secs(expiry),
|
| 799 |
+
"aud": audience,
|
| 800 |
+
}
|
| 801 |
+
|
| 802 |
+
payload.update(self._additional_claims)
|
| 803 |
+
|
| 804 |
+
jwt = encode(self._signer, payload)
|
| 805 |
+
|
| 806 |
+
return jwt, expiry
|
| 807 |
+
|
| 808 |
+
def _get_jwt_for_audience(self, audience):
|
| 809 |
+
"""Get a JWT For a given audience.
|
| 810 |
+
|
| 811 |
+
If there is already an existing, non-expired token in the cache for
|
| 812 |
+
the audience, that token is used. Otherwise, a new token will be
|
| 813 |
+
created.
|
| 814 |
+
|
| 815 |
+
Args:
|
| 816 |
+
audience (str): The intended audience.
|
| 817 |
+
|
| 818 |
+
Returns:
|
| 819 |
+
bytes: The encoded JWT.
|
| 820 |
+
"""
|
| 821 |
+
token, expiry = self._cache.get(audience, (None, None))
|
| 822 |
+
|
| 823 |
+
if token is None or expiry < _helpers.utcnow():
|
| 824 |
+
token, expiry = self._make_jwt_for_audience(audience)
|
| 825 |
+
self._cache[audience] = token, expiry
|
| 826 |
+
|
| 827 |
+
return token
|
| 828 |
+
|
| 829 |
+
def refresh(self, request):
|
| 830 |
+
"""Raises an exception, these credentials can not be directly
|
| 831 |
+
refreshed.
|
| 832 |
+
|
| 833 |
+
Args:
|
| 834 |
+
request (Any): Unused.
|
| 835 |
+
|
| 836 |
+
Raises:
|
| 837 |
+
google.auth.RefreshError
|
| 838 |
+
"""
|
| 839 |
+
# pylint: disable=unused-argument
|
| 840 |
+
# (pylint doesn't correctly recognize overridden methods.)
|
| 841 |
+
raise exceptions.RefreshError(
|
| 842 |
+
"OnDemandCredentials can not be directly refreshed."
|
| 843 |
+
)
|
| 844 |
+
|
| 845 |
+
def before_request(self, request, method, url, headers):
|
| 846 |
+
"""Performs credential-specific before request logic.
|
| 847 |
+
|
| 848 |
+
Args:
|
| 849 |
+
request (Any): Unused. JWT credentials do not need to make an
|
| 850 |
+
HTTP request to refresh.
|
| 851 |
+
method (str): The request's HTTP method.
|
| 852 |
+
url (str): The request's URI. This is used as the audience claim
|
| 853 |
+
when generating the JWT.
|
| 854 |
+
headers (Mapping): The request's headers.
|
| 855 |
+
"""
|
| 856 |
+
# pylint: disable=unused-argument
|
| 857 |
+
# (pylint doesn't correctly recognize overridden methods.)
|
| 858 |
+
parts = urllib.parse.urlsplit(url)
|
| 859 |
+
# Strip query string and fragment
|
| 860 |
+
audience = urllib.parse.urlunsplit(
|
| 861 |
+
(parts.scheme, parts.netloc, parts.path, "", "")
|
| 862 |
+
)
|
| 863 |
+
token = self._get_jwt_for_audience(audience)
|
| 864 |
+
self.apply(headers, token=token)
|
| 865 |
+
|
| 866 |
+
@_helpers.copy_docstring(google.auth.credentials.Signing)
|
| 867 |
+
def sign_bytes(self, message):
|
| 868 |
+
return self._signer.sign(message)
|
| 869 |
+
|
| 870 |
+
@property # type: ignore
|
| 871 |
+
@_helpers.copy_docstring(google.auth.credentials.Signing)
|
| 872 |
+
def signer_email(self):
|
| 873 |
+
return self._issuer
|
| 874 |
+
|
| 875 |
+
@property # type: ignore
|
| 876 |
+
@_helpers.copy_docstring(google.auth.credentials.Signing)
|
| 877 |
+
def signer(self):
|
| 878 |
+
return self._signer
|
lib/python3.10/site-packages/google/auth/pluggable.py
ADDED
|
@@ -0,0 +1,429 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""Pluggable Credentials.
|
| 16 |
+
Pluggable Credentials are initialized using external_account arguments which
|
| 17 |
+
are typically loaded from third-party executables. Unlike other
|
| 18 |
+
credentials that can be initialized with a list of explicit arguments, secrets
|
| 19 |
+
or credentials, external account clients use the environment and hints/guidelines
|
| 20 |
+
provided by the external_account JSON file to retrieve credentials and exchange
|
| 21 |
+
them for Google access tokens.
|
| 22 |
+
|
| 23 |
+
Example credential_source for pluggable credential:
|
| 24 |
+
{
|
| 25 |
+
"executable": {
|
| 26 |
+
"command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
|
| 27 |
+
"timeout_millis": 5000,
|
| 28 |
+
"output_file": "/path/to/generated/cached/credentials"
|
| 29 |
+
}
|
| 30 |
+
}
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
from collections.abc import Mapping
|
| 35 |
+
# Python 2.7 compatibility
|
| 36 |
+
except ImportError: # pragma: NO COVER
|
| 37 |
+
from collections import Mapping # type: ignore
|
| 38 |
+
import json
|
| 39 |
+
import os
|
| 40 |
+
import subprocess
|
| 41 |
+
import sys
|
| 42 |
+
import time
|
| 43 |
+
|
| 44 |
+
from google.auth import _helpers
|
| 45 |
+
from google.auth import exceptions
|
| 46 |
+
from google.auth import external_account
|
| 47 |
+
|
| 48 |
+
# The max supported executable spec version.
|
| 49 |
+
EXECUTABLE_SUPPORTED_MAX_VERSION = 1
|
| 50 |
+
|
| 51 |
+
EXECUTABLE_TIMEOUT_MILLIS_DEFAULT = 30 * 1000 # 30 seconds
|
| 52 |
+
EXECUTABLE_TIMEOUT_MILLIS_LOWER_BOUND = 5 * 1000 # 5 seconds
|
| 53 |
+
EXECUTABLE_TIMEOUT_MILLIS_UPPER_BOUND = 120 * 1000 # 2 minutes
|
| 54 |
+
|
| 55 |
+
EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_LOWER_BOUND = 30 * 1000 # 30 seconds
|
| 56 |
+
EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_UPPER_BOUND = 30 * 60 * 1000 # 30 minutes
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class Credentials(external_account.Credentials):
|
| 60 |
+
"""External account credentials sourced from executables."""
|
| 61 |
+
|
| 62 |
+
def __init__(
|
| 63 |
+
self,
|
| 64 |
+
audience,
|
| 65 |
+
subject_token_type,
|
| 66 |
+
token_url,
|
| 67 |
+
credential_source,
|
| 68 |
+
*args,
|
| 69 |
+
**kwargs
|
| 70 |
+
):
|
| 71 |
+
"""Instantiates an external account credentials object from a executables.
|
| 72 |
+
|
| 73 |
+
Args:
|
| 74 |
+
audience (str): The STS audience field.
|
| 75 |
+
subject_token_type (str): The subject token type.
|
| 76 |
+
token_url (str): The STS endpoint URL.
|
| 77 |
+
credential_source (Mapping): The credential source dictionary used to
|
| 78 |
+
provide instructions on how to retrieve external credential to be
|
| 79 |
+
exchanged for Google access tokens.
|
| 80 |
+
|
| 81 |
+
Example credential_source for pluggable credential:
|
| 82 |
+
|
| 83 |
+
{
|
| 84 |
+
"executable": {
|
| 85 |
+
"command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
|
| 86 |
+
"timeout_millis": 5000,
|
| 87 |
+
"output_file": "/path/to/generated/cached/credentials"
|
| 88 |
+
}
|
| 89 |
+
}
|
| 90 |
+
args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
|
| 91 |
+
kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
|
| 92 |
+
|
| 93 |
+
Raises:
|
| 94 |
+
google.auth.exceptions.RefreshError: If an error is encountered during
|
| 95 |
+
access token retrieval logic.
|
| 96 |
+
google.auth.exceptions.InvalidValue: For invalid parameters.
|
| 97 |
+
google.auth.exceptions.MalformedError: For invalid parameters.
|
| 98 |
+
|
| 99 |
+
.. note:: Typically one of the helper constructors
|
| 100 |
+
:meth:`from_file` or
|
| 101 |
+
:meth:`from_info` are used instead of calling the constructor directly.
|
| 102 |
+
"""
|
| 103 |
+
|
| 104 |
+
self.interactive = kwargs.pop("interactive", False)
|
| 105 |
+
super(Credentials, self).__init__(
|
| 106 |
+
audience=audience,
|
| 107 |
+
subject_token_type=subject_token_type,
|
| 108 |
+
token_url=token_url,
|
| 109 |
+
credential_source=credential_source,
|
| 110 |
+
*args,
|
| 111 |
+
**kwargs
|
| 112 |
+
)
|
| 113 |
+
if not isinstance(credential_source, Mapping):
|
| 114 |
+
self._credential_source_executable = None
|
| 115 |
+
raise exceptions.MalformedError(
|
| 116 |
+
"Missing credential_source. The credential_source is not a dict."
|
| 117 |
+
)
|
| 118 |
+
self._credential_source_executable = credential_source.get("executable")
|
| 119 |
+
if not self._credential_source_executable:
|
| 120 |
+
raise exceptions.MalformedError(
|
| 121 |
+
"Missing credential_source. An 'executable' must be provided."
|
| 122 |
+
)
|
| 123 |
+
self._credential_source_executable_command = self._credential_source_executable.get(
|
| 124 |
+
"command"
|
| 125 |
+
)
|
| 126 |
+
self._credential_source_executable_timeout_millis = self._credential_source_executable.get(
|
| 127 |
+
"timeout_millis"
|
| 128 |
+
)
|
| 129 |
+
self._credential_source_executable_interactive_timeout_millis = self._credential_source_executable.get(
|
| 130 |
+
"interactive_timeout_millis"
|
| 131 |
+
)
|
| 132 |
+
self._credential_source_executable_output_file = self._credential_source_executable.get(
|
| 133 |
+
"output_file"
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
# Dummy value. This variable is only used via injection, not exposed to ctor
|
| 137 |
+
self._tokeninfo_username = ""
|
| 138 |
+
|
| 139 |
+
if not self._credential_source_executable_command:
|
| 140 |
+
raise exceptions.MalformedError(
|
| 141 |
+
"Missing command field. Executable command must be provided."
|
| 142 |
+
)
|
| 143 |
+
if not self._credential_source_executable_timeout_millis:
|
| 144 |
+
self._credential_source_executable_timeout_millis = (
|
| 145 |
+
EXECUTABLE_TIMEOUT_MILLIS_DEFAULT
|
| 146 |
+
)
|
| 147 |
+
elif (
|
| 148 |
+
self._credential_source_executable_timeout_millis
|
| 149 |
+
< EXECUTABLE_TIMEOUT_MILLIS_LOWER_BOUND
|
| 150 |
+
or self._credential_source_executable_timeout_millis
|
| 151 |
+
> EXECUTABLE_TIMEOUT_MILLIS_UPPER_BOUND
|
| 152 |
+
):
|
| 153 |
+
raise exceptions.InvalidValue("Timeout must be between 5 and 120 seconds.")
|
| 154 |
+
|
| 155 |
+
if self._credential_source_executable_interactive_timeout_millis:
|
| 156 |
+
if (
|
| 157 |
+
self._credential_source_executable_interactive_timeout_millis
|
| 158 |
+
< EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_LOWER_BOUND
|
| 159 |
+
or self._credential_source_executable_interactive_timeout_millis
|
| 160 |
+
> EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_UPPER_BOUND
|
| 161 |
+
):
|
| 162 |
+
raise exceptions.InvalidValue(
|
| 163 |
+
"Interactive timeout must be between 30 seconds and 30 minutes."
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
@_helpers.copy_docstring(external_account.Credentials)
|
| 167 |
+
def retrieve_subject_token(self, request):
|
| 168 |
+
self._validate_running_mode()
|
| 169 |
+
|
| 170 |
+
# Check output file.
|
| 171 |
+
if self._credential_source_executable_output_file is not None:
|
| 172 |
+
try:
|
| 173 |
+
with open(
|
| 174 |
+
self._credential_source_executable_output_file, encoding="utf-8"
|
| 175 |
+
) as output_file:
|
| 176 |
+
response = json.load(output_file)
|
| 177 |
+
except Exception:
|
| 178 |
+
pass
|
| 179 |
+
else:
|
| 180 |
+
try:
|
| 181 |
+
# If the cached response is expired, _parse_subject_token will raise an error which will be ignored and we will call the executable again.
|
| 182 |
+
subject_token = self._parse_subject_token(response)
|
| 183 |
+
if (
|
| 184 |
+
"expiration_time" not in response
|
| 185 |
+
): # Always treat missing expiration_time as expired and proceed to executable run.
|
| 186 |
+
raise exceptions.RefreshError
|
| 187 |
+
except (exceptions.MalformedError, exceptions.InvalidValue):
|
| 188 |
+
raise
|
| 189 |
+
except exceptions.RefreshError:
|
| 190 |
+
pass
|
| 191 |
+
else:
|
| 192 |
+
return subject_token
|
| 193 |
+
|
| 194 |
+
if not _helpers.is_python_3():
|
| 195 |
+
raise exceptions.RefreshError(
|
| 196 |
+
"Pluggable auth is only supported for python 3.7+"
|
| 197 |
+
)
|
| 198 |
+
|
| 199 |
+
# Inject env vars.
|
| 200 |
+
env = os.environ.copy()
|
| 201 |
+
self._inject_env_variables(env)
|
| 202 |
+
env["GOOGLE_EXTERNAL_ACCOUNT_REVOKE"] = "0"
|
| 203 |
+
|
| 204 |
+
# Run executable.
|
| 205 |
+
exe_timeout = (
|
| 206 |
+
self._credential_source_executable_interactive_timeout_millis / 1000
|
| 207 |
+
if self.interactive
|
| 208 |
+
else self._credential_source_executable_timeout_millis / 1000
|
| 209 |
+
)
|
| 210 |
+
exe_stdin = sys.stdin if self.interactive else None
|
| 211 |
+
exe_stdout = sys.stdout if self.interactive else subprocess.PIPE
|
| 212 |
+
exe_stderr = sys.stdout if self.interactive else subprocess.STDOUT
|
| 213 |
+
|
| 214 |
+
result = subprocess.run(
|
| 215 |
+
self._credential_source_executable_command.split(),
|
| 216 |
+
timeout=exe_timeout,
|
| 217 |
+
stdin=exe_stdin,
|
| 218 |
+
stdout=exe_stdout,
|
| 219 |
+
stderr=exe_stderr,
|
| 220 |
+
env=env,
|
| 221 |
+
)
|
| 222 |
+
if result.returncode != 0:
|
| 223 |
+
raise exceptions.RefreshError(
|
| 224 |
+
"Executable exited with non-zero return code {}. Error: {}".format(
|
| 225 |
+
result.returncode, result.stdout
|
| 226 |
+
)
|
| 227 |
+
)
|
| 228 |
+
|
| 229 |
+
# Handle executable output.
|
| 230 |
+
response = json.loads(result.stdout.decode("utf-8")) if result.stdout else None
|
| 231 |
+
if not response and self._credential_source_executable_output_file is not None:
|
| 232 |
+
response = json.load(
|
| 233 |
+
open(self._credential_source_executable_output_file, encoding="utf-8")
|
| 234 |
+
)
|
| 235 |
+
|
| 236 |
+
subject_token = self._parse_subject_token(response)
|
| 237 |
+
return subject_token
|
| 238 |
+
|
| 239 |
+
def revoke(self, request):
|
| 240 |
+
"""Revokes the subject token using the credential_source object.
|
| 241 |
+
|
| 242 |
+
Args:
|
| 243 |
+
request (google.auth.transport.Request): A callable used to make
|
| 244 |
+
HTTP requests.
|
| 245 |
+
Raises:
|
| 246 |
+
google.auth.exceptions.RefreshError: If the executable revocation
|
| 247 |
+
not properly executed.
|
| 248 |
+
|
| 249 |
+
"""
|
| 250 |
+
if not self.interactive:
|
| 251 |
+
raise exceptions.InvalidValue(
|
| 252 |
+
"Revoke is only enabled under interactive mode."
|
| 253 |
+
)
|
| 254 |
+
self._validate_running_mode()
|
| 255 |
+
|
| 256 |
+
if not _helpers.is_python_3():
|
| 257 |
+
raise exceptions.RefreshError(
|
| 258 |
+
"Pluggable auth is only supported for python 3.7+"
|
| 259 |
+
)
|
| 260 |
+
|
| 261 |
+
# Inject variables
|
| 262 |
+
env = os.environ.copy()
|
| 263 |
+
self._inject_env_variables(env)
|
| 264 |
+
env["GOOGLE_EXTERNAL_ACCOUNT_REVOKE"] = "1"
|
| 265 |
+
|
| 266 |
+
# Run executable
|
| 267 |
+
result = subprocess.run(
|
| 268 |
+
self._credential_source_executable_command.split(),
|
| 269 |
+
timeout=self._credential_source_executable_interactive_timeout_millis
|
| 270 |
+
/ 1000,
|
| 271 |
+
stdout=subprocess.PIPE,
|
| 272 |
+
stderr=subprocess.STDOUT,
|
| 273 |
+
env=env,
|
| 274 |
+
)
|
| 275 |
+
|
| 276 |
+
if result.returncode != 0:
|
| 277 |
+
raise exceptions.RefreshError(
|
| 278 |
+
"Auth revoke failed on executable. Exit with non-zero return code {}. Error: {}".format(
|
| 279 |
+
result.returncode, result.stdout
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
|
| 283 |
+
response = json.loads(result.stdout.decode("utf-8"))
|
| 284 |
+
self._validate_revoke_response(response)
|
| 285 |
+
|
| 286 |
+
@property
|
| 287 |
+
def external_account_id(self):
|
| 288 |
+
"""Returns the external account identifier.
|
| 289 |
+
|
| 290 |
+
When service account impersonation is used the identifier is the service
|
| 291 |
+
account email.
|
| 292 |
+
|
| 293 |
+
Without service account impersonation, this returns None, unless it is
|
| 294 |
+
being used by the Google Cloud CLI which populates this field.
|
| 295 |
+
"""
|
| 296 |
+
|
| 297 |
+
return self.service_account_email or self._tokeninfo_username
|
| 298 |
+
|
| 299 |
+
@classmethod
|
| 300 |
+
def from_info(cls, info, **kwargs):
|
| 301 |
+
"""Creates a Pluggable Credentials instance from parsed external account info.
|
| 302 |
+
|
| 303 |
+
Args:
|
| 304 |
+
info (Mapping[str, str]): The Pluggable external account info in Google
|
| 305 |
+
format.
|
| 306 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 307 |
+
|
| 308 |
+
Returns:
|
| 309 |
+
google.auth.pluggable.Credentials: The constructed
|
| 310 |
+
credentials.
|
| 311 |
+
|
| 312 |
+
Raises:
|
| 313 |
+
google.auth.exceptions.InvalidValue: For invalid parameters.
|
| 314 |
+
google.auth.exceptions.MalformedError: For invalid parameters.
|
| 315 |
+
"""
|
| 316 |
+
return super(Credentials, cls).from_info(info, **kwargs)
|
| 317 |
+
|
| 318 |
+
@classmethod
|
| 319 |
+
def from_file(cls, filename, **kwargs):
|
| 320 |
+
"""Creates an Pluggable Credentials instance from an external account json file.
|
| 321 |
+
|
| 322 |
+
Args:
|
| 323 |
+
filename (str): The path to the Pluggable external account json file.
|
| 324 |
+
kwargs: Additional arguments to pass to the constructor.
|
| 325 |
+
|
| 326 |
+
Returns:
|
| 327 |
+
google.auth.pluggable.Credentials: The constructed
|
| 328 |
+
credentials.
|
| 329 |
+
"""
|
| 330 |
+
return super(Credentials, cls).from_file(filename, **kwargs)
|
| 331 |
+
|
| 332 |
+
def _inject_env_variables(self, env):
|
| 333 |
+
env["GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE"] = self._audience
|
| 334 |
+
env["GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE"] = self._subject_token_type
|
| 335 |
+
env["GOOGLE_EXTERNAL_ACCOUNT_ID"] = self.external_account_id
|
| 336 |
+
env["GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE"] = "1" if self.interactive else "0"
|
| 337 |
+
|
| 338 |
+
if self._service_account_impersonation_url is not None:
|
| 339 |
+
env[
|
| 340 |
+
"GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL"
|
| 341 |
+
] = self.service_account_email
|
| 342 |
+
if self._credential_source_executable_output_file is not None:
|
| 343 |
+
env[
|
| 344 |
+
"GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE"
|
| 345 |
+
] = self._credential_source_executable_output_file
|
| 346 |
+
|
| 347 |
+
def _parse_subject_token(self, response):
|
| 348 |
+
self._validate_response_schema(response)
|
| 349 |
+
if not response["success"]:
|
| 350 |
+
if "code" not in response or "message" not in response:
|
| 351 |
+
raise exceptions.MalformedError(
|
| 352 |
+
"Error code and message fields are required in the response."
|
| 353 |
+
)
|
| 354 |
+
raise exceptions.RefreshError(
|
| 355 |
+
"Executable returned unsuccessful response: code: {}, message: {}.".format(
|
| 356 |
+
response["code"], response["message"]
|
| 357 |
+
)
|
| 358 |
+
)
|
| 359 |
+
if "expiration_time" in response and response["expiration_time"] < time.time():
|
| 360 |
+
raise exceptions.RefreshError(
|
| 361 |
+
"The token returned by the executable is expired."
|
| 362 |
+
)
|
| 363 |
+
if "token_type" not in response:
|
| 364 |
+
raise exceptions.MalformedError(
|
| 365 |
+
"The executable response is missing the token_type field."
|
| 366 |
+
)
|
| 367 |
+
if (
|
| 368 |
+
response["token_type"] == "urn:ietf:params:oauth:token-type:jwt"
|
| 369 |
+
or response["token_type"] == "urn:ietf:params:oauth:token-type:id_token"
|
| 370 |
+
): # OIDC
|
| 371 |
+
return response["id_token"]
|
| 372 |
+
elif response["token_type"] == "urn:ietf:params:oauth:token-type:saml2": # SAML
|
| 373 |
+
return response["saml_response"]
|
| 374 |
+
else:
|
| 375 |
+
raise exceptions.RefreshError("Executable returned unsupported token type.")
|
| 376 |
+
|
| 377 |
+
def _validate_revoke_response(self, response):
|
| 378 |
+
self._validate_response_schema(response)
|
| 379 |
+
if not response["success"]:
|
| 380 |
+
raise exceptions.RefreshError("Revoke failed with unsuccessful response.")
|
| 381 |
+
|
| 382 |
+
def _validate_response_schema(self, response):
|
| 383 |
+
if "version" not in response:
|
| 384 |
+
raise exceptions.MalformedError(
|
| 385 |
+
"The executable response is missing the version field."
|
| 386 |
+
)
|
| 387 |
+
if response["version"] > EXECUTABLE_SUPPORTED_MAX_VERSION:
|
| 388 |
+
raise exceptions.RefreshError(
|
| 389 |
+
"Executable returned unsupported version {}.".format(
|
| 390 |
+
response["version"]
|
| 391 |
+
)
|
| 392 |
+
)
|
| 393 |
+
|
| 394 |
+
if "success" not in response:
|
| 395 |
+
raise exceptions.MalformedError(
|
| 396 |
+
"The executable response is missing the success field."
|
| 397 |
+
)
|
| 398 |
+
|
| 399 |
+
def _validate_running_mode(self):
|
| 400 |
+
env_allow_executables = os.environ.get(
|
| 401 |
+
"GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES"
|
| 402 |
+
)
|
| 403 |
+
if env_allow_executables != "1":
|
| 404 |
+
raise exceptions.MalformedError(
|
| 405 |
+
"Executables need to be explicitly allowed (set GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES to '1') to run."
|
| 406 |
+
)
|
| 407 |
+
|
| 408 |
+
if self.interactive and not self._credential_source_executable_output_file:
|
| 409 |
+
raise exceptions.MalformedError(
|
| 410 |
+
"An output_file must be specified in the credential configuration for interactive mode."
|
| 411 |
+
)
|
| 412 |
+
|
| 413 |
+
if (
|
| 414 |
+
self.interactive
|
| 415 |
+
and not self._credential_source_executable_interactive_timeout_millis
|
| 416 |
+
):
|
| 417 |
+
raise exceptions.InvalidOperation(
|
| 418 |
+
"Interactive mode cannot run without an interactive timeout."
|
| 419 |
+
)
|
| 420 |
+
|
| 421 |
+
if self.interactive and not self.is_workforce_pool:
|
| 422 |
+
raise exceptions.InvalidValue(
|
| 423 |
+
"Interactive mode is only enabled for workforce pool."
|
| 424 |
+
)
|
| 425 |
+
|
| 426 |
+
def _create_default_metrics_options(self):
|
| 427 |
+
metrics_options = super(Credentials, self)._create_default_metrics_options()
|
| 428 |
+
metrics_options["source"] = "executable"
|
| 429 |
+
return metrics_options
|
lib/python3.10/site-packages/google/oauth2/_credentials_async.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""OAuth 2.0 Async Credentials.
|
| 16 |
+
|
| 17 |
+
This module provides credentials based on OAuth 2.0 access and refresh tokens.
|
| 18 |
+
These credentials usually access resources on behalf of a user (resource
|
| 19 |
+
owner).
|
| 20 |
+
|
| 21 |
+
Specifically, this is intended to use access tokens acquired using the
|
| 22 |
+
`Authorization Code grant`_ and can refresh those tokens using a
|
| 23 |
+
optional `refresh token`_.
|
| 24 |
+
|
| 25 |
+
Obtaining the initial access and refresh token is outside of the scope of this
|
| 26 |
+
module. Consult `rfc6749 section 4.1`_ for complete details on the
|
| 27 |
+
Authorization Code grant flow.
|
| 28 |
+
|
| 29 |
+
.. _Authorization Code grant: https://tools.ietf.org/html/rfc6749#section-1.3.1
|
| 30 |
+
.. _refresh token: https://tools.ietf.org/html/rfc6749#section-6
|
| 31 |
+
.. _rfc6749 section 4.1: https://tools.ietf.org/html/rfc6749#section-4.1
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
from google.auth import _credentials_async as credentials
|
| 35 |
+
from google.auth import _helpers
|
| 36 |
+
from google.auth import exceptions
|
| 37 |
+
from google.oauth2 import _reauth_async as reauth
|
| 38 |
+
from google.oauth2 import credentials as oauth2_credentials
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class Credentials(oauth2_credentials.Credentials):
|
| 42 |
+
"""Credentials using OAuth 2.0 access and refresh tokens.
|
| 43 |
+
|
| 44 |
+
The credentials are considered immutable. If you want to modify the
|
| 45 |
+
quota project, use :meth:`with_quota_project` or ::
|
| 46 |
+
|
| 47 |
+
credentials = credentials.with_quota_project('myproject-123)
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
@_helpers.copy_docstring(credentials.Credentials)
|
| 51 |
+
async def refresh(self, request):
|
| 52 |
+
if (
|
| 53 |
+
self._refresh_token is None
|
| 54 |
+
or self._token_uri is None
|
| 55 |
+
or self._client_id is None
|
| 56 |
+
or self._client_secret is None
|
| 57 |
+
):
|
| 58 |
+
raise exceptions.RefreshError(
|
| 59 |
+
"The credentials do not contain the necessary fields need to "
|
| 60 |
+
"refresh the access token. You must specify refresh_token, "
|
| 61 |
+
"token_uri, client_id, and client_secret."
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
(
|
| 65 |
+
access_token,
|
| 66 |
+
refresh_token,
|
| 67 |
+
expiry,
|
| 68 |
+
grant_response,
|
| 69 |
+
rapt_token,
|
| 70 |
+
) = await reauth.refresh_grant(
|
| 71 |
+
request,
|
| 72 |
+
self._token_uri,
|
| 73 |
+
self._refresh_token,
|
| 74 |
+
self._client_id,
|
| 75 |
+
self._client_secret,
|
| 76 |
+
scopes=self._scopes,
|
| 77 |
+
rapt_token=self._rapt_token,
|
| 78 |
+
enable_reauth_refresh=self._enable_reauth_refresh,
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
self.token = access_token
|
| 82 |
+
self.expiry = expiry
|
| 83 |
+
self._refresh_token = refresh_token
|
| 84 |
+
self._id_token = grant_response.get("id_token")
|
| 85 |
+
self._rapt_token = rapt_token
|
| 86 |
+
|
| 87 |
+
if self._scopes and "scope" in grant_response:
|
| 88 |
+
requested_scopes = frozenset(self._scopes)
|
| 89 |
+
granted_scopes = frozenset(grant_response["scope"].split())
|
| 90 |
+
scopes_requested_but_not_granted = requested_scopes - granted_scopes
|
| 91 |
+
if scopes_requested_but_not_granted:
|
| 92 |
+
raise exceptions.RefreshError(
|
| 93 |
+
"Not all requested scopes were granted by the "
|
| 94 |
+
"authorization server, missing scopes {}.".format(
|
| 95 |
+
", ".join(scopes_requested_but_not_granted)
|
| 96 |
+
)
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
@_helpers.copy_docstring(credentials.Credentials)
|
| 100 |
+
async def before_request(self, request, method, url, headers):
|
| 101 |
+
if not self.valid:
|
| 102 |
+
await self.refresh(request)
|
| 103 |
+
self.apply(headers)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class UserAccessTokenCredentials(oauth2_credentials.UserAccessTokenCredentials):
|
| 107 |
+
"""Access token credentials for user account.
|
| 108 |
+
|
| 109 |
+
Obtain the access token for a given user account or the current active
|
| 110 |
+
user account with the ``gcloud auth print-access-token`` command.
|
| 111 |
+
|
| 112 |
+
Args:
|
| 113 |
+
account (Optional[str]): Account to get the access token for. If not
|
| 114 |
+
specified, the current active account will be used.
|
| 115 |
+
quota_project_id (Optional[str]): The project ID used for quota
|
| 116 |
+
and billing.
|
| 117 |
+
|
| 118 |
+
"""
|
lib/python3.10/site-packages/google/oauth2/_service_account_async.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""Service Accounts: JSON Web Token (JWT) Profile for OAuth 2.0
|
| 16 |
+
|
| 17 |
+
NOTE: This file adds asynchronous refresh methods to both credentials
|
| 18 |
+
classes, and therefore async/await syntax is required when calling this
|
| 19 |
+
method when using service account credentials with asynchronous functionality.
|
| 20 |
+
Otherwise, all other methods are inherited from the regular service account
|
| 21 |
+
credentials file google.oauth2.service_account
|
| 22 |
+
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
from google.auth import _credentials_async as credentials_async
|
| 26 |
+
from google.auth import _helpers
|
| 27 |
+
from google.oauth2 import _client_async
|
| 28 |
+
from google.oauth2 import service_account
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class Credentials(
|
| 32 |
+
service_account.Credentials, credentials_async.Scoped, credentials_async.Credentials
|
| 33 |
+
):
|
| 34 |
+
"""Service account credentials
|
| 35 |
+
|
| 36 |
+
Usually, you'll create these credentials with one of the helper
|
| 37 |
+
constructors. To create credentials using a Google service account
|
| 38 |
+
private key JSON file::
|
| 39 |
+
|
| 40 |
+
credentials = _service_account_async.Credentials.from_service_account_file(
|
| 41 |
+
'service-account.json')
|
| 42 |
+
|
| 43 |
+
Or if you already have the service account file loaded::
|
| 44 |
+
|
| 45 |
+
service_account_info = json.load(open('service_account.json'))
|
| 46 |
+
credentials = _service_account_async.Credentials.from_service_account_info(
|
| 47 |
+
service_account_info)
|
| 48 |
+
|
| 49 |
+
Both helper methods pass on arguments to the constructor, so you can
|
| 50 |
+
specify additional scopes and a subject if necessary::
|
| 51 |
+
|
| 52 |
+
credentials = _service_account_async.Credentials.from_service_account_file(
|
| 53 |
+
'service-account.json',
|
| 54 |
+
scopes=['email'],
|
| 55 |
+
subject='user@example.com')
|
| 56 |
+
|
| 57 |
+
The credentials are considered immutable. If you want to modify the scopes
|
| 58 |
+
or the subject used for delegation, use :meth:`with_scopes` or
|
| 59 |
+
:meth:`with_subject`::
|
| 60 |
+
|
| 61 |
+
scoped_credentials = credentials.with_scopes(['email'])
|
| 62 |
+
delegated_credentials = credentials.with_subject(subject)
|
| 63 |
+
|
| 64 |
+
To add a quota project, use :meth:`with_quota_project`::
|
| 65 |
+
|
| 66 |
+
credentials = credentials.with_quota_project('myproject-123')
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
@_helpers.copy_docstring(credentials_async.Credentials)
|
| 70 |
+
async def refresh(self, request):
|
| 71 |
+
assertion = self._make_authorization_grant_assertion()
|
| 72 |
+
access_token, expiry, _ = await _client_async.jwt_grant(
|
| 73 |
+
request, self._token_uri, assertion
|
| 74 |
+
)
|
| 75 |
+
self.token = access_token
|
| 76 |
+
self.expiry = expiry
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class IDTokenCredentials(
|
| 80 |
+
service_account.IDTokenCredentials,
|
| 81 |
+
credentials_async.Signing,
|
| 82 |
+
credentials_async.Credentials,
|
| 83 |
+
):
|
| 84 |
+
"""Open ID Connect ID Token-based service account credentials.
|
| 85 |
+
|
| 86 |
+
These credentials are largely similar to :class:`.Credentials`, but instead
|
| 87 |
+
of using an OAuth 2.0 Access Token as the bearer token, they use an Open
|
| 88 |
+
ID Connect ID Token as the bearer token. These credentials are useful when
|
| 89 |
+
communicating to services that require ID Tokens and can not accept access
|
| 90 |
+
tokens.
|
| 91 |
+
|
| 92 |
+
Usually, you'll create these credentials with one of the helper
|
| 93 |
+
constructors. To create credentials using a Google service account
|
| 94 |
+
private key JSON file::
|
| 95 |
+
|
| 96 |
+
credentials = (
|
| 97 |
+
_service_account_async.IDTokenCredentials.from_service_account_file(
|
| 98 |
+
'service-account.json'))
|
| 99 |
+
|
| 100 |
+
Or if you already have the service account file loaded::
|
| 101 |
+
|
| 102 |
+
service_account_info = json.load(open('service_account.json'))
|
| 103 |
+
credentials = (
|
| 104 |
+
_service_account_async.IDTokenCredentials.from_service_account_info(
|
| 105 |
+
service_account_info))
|
| 106 |
+
|
| 107 |
+
Both helper methods pass on arguments to the constructor, so you can
|
| 108 |
+
specify additional scopes and a subject if necessary::
|
| 109 |
+
|
| 110 |
+
credentials = (
|
| 111 |
+
_service_account_async.IDTokenCredentials.from_service_account_file(
|
| 112 |
+
'service-account.json',
|
| 113 |
+
scopes=['email'],
|
| 114 |
+
subject='user@example.com'))
|
| 115 |
+
|
| 116 |
+
The credentials are considered immutable. If you want to modify the scopes
|
| 117 |
+
or the subject used for delegation, use :meth:`with_scopes` or
|
| 118 |
+
:meth:`with_subject`::
|
| 119 |
+
|
| 120 |
+
scoped_credentials = credentials.with_scopes(['email'])
|
| 121 |
+
delegated_credentials = credentials.with_subject(subject)
|
| 122 |
+
|
| 123 |
+
"""
|
| 124 |
+
|
| 125 |
+
@_helpers.copy_docstring(credentials_async.Credentials)
|
| 126 |
+
async def refresh(self, request):
|
| 127 |
+
assertion = self._make_authorization_grant_assertion()
|
| 128 |
+
access_token, expiry, _ = await _client_async.id_token_jwt_grant(
|
| 129 |
+
request, self._token_uri, assertion
|
| 130 |
+
)
|
| 131 |
+
self.token = access_token
|
| 132 |
+
self.expiry = expiry
|
lib/python3.10/site-packages/google/oauth2/sts.py
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
"""OAuth 2.0 Token Exchange Spec.
|
| 16 |
+
|
| 17 |
+
This module defines a token exchange utility based on the `OAuth 2.0 Token
|
| 18 |
+
Exchange`_ spec. This will be mainly used to exchange external credentials
|
| 19 |
+
for GCP access tokens in workload identity pools to access Google APIs.
|
| 20 |
+
|
| 21 |
+
The implementation will support various types of client authentication as
|
| 22 |
+
allowed in the spec.
|
| 23 |
+
|
| 24 |
+
A deviation on the spec will be for additional Google specific options that
|
| 25 |
+
cannot be easily mapped to parameters defined in the RFC.
|
| 26 |
+
|
| 27 |
+
The returned dictionary response will be based on the `rfc8693 section 2.2.1`_
|
| 28 |
+
spec JSON response.
|
| 29 |
+
|
| 30 |
+
.. _OAuth 2.0 Token Exchange: https://tools.ietf.org/html/rfc8693
|
| 31 |
+
.. _rfc8693 section 2.2.1: https://tools.ietf.org/html/rfc8693#section-2.2.1
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
import http.client as http_client
|
| 35 |
+
import json
|
| 36 |
+
import urllib
|
| 37 |
+
|
| 38 |
+
from google.oauth2 import utils
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
_URLENCODED_HEADERS = {"Content-Type": "application/x-www-form-urlencoded"}
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class Client(utils.OAuthClientAuthHandler):
|
| 45 |
+
"""Implements the OAuth 2.0 token exchange spec based on
|
| 46 |
+
https://tools.ietf.org/html/rfc8693.
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
def __init__(self, token_exchange_endpoint, client_authentication=None):
|
| 50 |
+
"""Initializes an STS client instance.
|
| 51 |
+
|
| 52 |
+
Args:
|
| 53 |
+
token_exchange_endpoint (str): The token exchange endpoint.
|
| 54 |
+
client_authentication (Optional(google.oauth2.oauth2_utils.ClientAuthentication)):
|
| 55 |
+
The optional OAuth client authentication credentials if available.
|
| 56 |
+
"""
|
| 57 |
+
super(Client, self).__init__(client_authentication)
|
| 58 |
+
self._token_exchange_endpoint = token_exchange_endpoint
|
| 59 |
+
|
| 60 |
+
def _make_request(self, request, headers, request_body):
|
| 61 |
+
# Initialize request headers.
|
| 62 |
+
request_headers = _URLENCODED_HEADERS.copy()
|
| 63 |
+
|
| 64 |
+
# Inject additional headers.
|
| 65 |
+
if headers:
|
| 66 |
+
for k, v in dict(headers).items():
|
| 67 |
+
request_headers[k] = v
|
| 68 |
+
|
| 69 |
+
# Apply OAuth client authentication.
|
| 70 |
+
self.apply_client_authentication_options(request_headers, request_body)
|
| 71 |
+
|
| 72 |
+
# Execute request.
|
| 73 |
+
response = request(
|
| 74 |
+
url=self._token_exchange_endpoint,
|
| 75 |
+
method="POST",
|
| 76 |
+
headers=request_headers,
|
| 77 |
+
body=urllib.parse.urlencode(request_body).encode("utf-8"),
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
response_body = (
|
| 81 |
+
response.data.decode("utf-8")
|
| 82 |
+
if hasattr(response.data, "decode")
|
| 83 |
+
else response.data
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
# If non-200 response received, translate to OAuthError exception.
|
| 87 |
+
if response.status != http_client.OK:
|
| 88 |
+
utils.handle_error_response(response_body)
|
| 89 |
+
|
| 90 |
+
response_data = json.loads(response_body)
|
| 91 |
+
|
| 92 |
+
# Return successful response.
|
| 93 |
+
return response_data
|
| 94 |
+
|
| 95 |
+
def exchange_token(
|
| 96 |
+
self,
|
| 97 |
+
request,
|
| 98 |
+
grant_type,
|
| 99 |
+
subject_token,
|
| 100 |
+
subject_token_type,
|
| 101 |
+
resource=None,
|
| 102 |
+
audience=None,
|
| 103 |
+
scopes=None,
|
| 104 |
+
requested_token_type=None,
|
| 105 |
+
actor_token=None,
|
| 106 |
+
actor_token_type=None,
|
| 107 |
+
additional_options=None,
|
| 108 |
+
additional_headers=None,
|
| 109 |
+
):
|
| 110 |
+
"""Exchanges the provided token for another type of token based on the
|
| 111 |
+
rfc8693 spec.
|
| 112 |
+
|
| 113 |
+
Args:
|
| 114 |
+
request (google.auth.transport.Request): A callable used to make
|
| 115 |
+
HTTP requests.
|
| 116 |
+
grant_type (str): The OAuth 2.0 token exchange grant type.
|
| 117 |
+
subject_token (str): The OAuth 2.0 token exchange subject token.
|
| 118 |
+
subject_token_type (str): The OAuth 2.0 token exchange subject token type.
|
| 119 |
+
resource (Optional[str]): The optional OAuth 2.0 token exchange resource field.
|
| 120 |
+
audience (Optional[str]): The optional OAuth 2.0 token exchange audience field.
|
| 121 |
+
scopes (Optional[Sequence[str]]): The optional list of scopes to use.
|
| 122 |
+
requested_token_type (Optional[str]): The optional OAuth 2.0 token exchange requested
|
| 123 |
+
token type.
|
| 124 |
+
actor_token (Optional[str]): The optional OAuth 2.0 token exchange actor token.
|
| 125 |
+
actor_token_type (Optional[str]): The optional OAuth 2.0 token exchange actor token type.
|
| 126 |
+
additional_options (Optional[Mapping[str, str]]): The optional additional
|
| 127 |
+
non-standard Google specific options.
|
| 128 |
+
additional_headers (Optional[Mapping[str, str]]): The optional additional
|
| 129 |
+
headers to pass to the token exchange endpoint.
|
| 130 |
+
|
| 131 |
+
Returns:
|
| 132 |
+
Mapping[str, str]: The token exchange JSON-decoded response data containing
|
| 133 |
+
the requested token and its expiration time.
|
| 134 |
+
|
| 135 |
+
Raises:
|
| 136 |
+
google.auth.exceptions.OAuthError: If the token endpoint returned
|
| 137 |
+
an error.
|
| 138 |
+
"""
|
| 139 |
+
# Initialize request body.
|
| 140 |
+
request_body = {
|
| 141 |
+
"grant_type": grant_type,
|
| 142 |
+
"resource": resource,
|
| 143 |
+
"audience": audience,
|
| 144 |
+
"scope": " ".join(scopes or []),
|
| 145 |
+
"requested_token_type": requested_token_type,
|
| 146 |
+
"subject_token": subject_token,
|
| 147 |
+
"subject_token_type": subject_token_type,
|
| 148 |
+
"actor_token": actor_token,
|
| 149 |
+
"actor_token_type": actor_token_type,
|
| 150 |
+
"options": None,
|
| 151 |
+
}
|
| 152 |
+
# Add additional non-standard options.
|
| 153 |
+
if additional_options:
|
| 154 |
+
request_body["options"] = urllib.parse.quote(json.dumps(additional_options))
|
| 155 |
+
# Remove empty fields in request body.
|
| 156 |
+
for k, v in dict(request_body).items():
|
| 157 |
+
if v is None or v == "":
|
| 158 |
+
del request_body[k]
|
| 159 |
+
|
| 160 |
+
return self._make_request(request, additional_headers, request_body)
|
| 161 |
+
|
| 162 |
+
def refresh_token(self, request, refresh_token):
|
| 163 |
+
"""Exchanges a refresh token for an access token based on the
|
| 164 |
+
RFC6749 spec.
|
| 165 |
+
|
| 166 |
+
Args:
|
| 167 |
+
request (google.auth.transport.Request): A callable used to make
|
| 168 |
+
HTTP requests.
|
| 169 |
+
subject_token (str): The OAuth 2.0 refresh token.
|
| 170 |
+
"""
|
| 171 |
+
|
| 172 |
+
return self._make_request(
|
| 173 |
+
request,
|
| 174 |
+
None,
|
| 175 |
+
{"grant_type": "refresh_token", "refresh_token": refresh_token},
|
| 176 |
+
)
|
lib/python3.10/site-packages/google/oauth2/webauthn_handler.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import os
|
| 3 |
+
import struct
|
| 4 |
+
import subprocess
|
| 5 |
+
|
| 6 |
+
from google.auth import exceptions
|
| 7 |
+
from google.oauth2.webauthn_types import GetRequest, GetResponse
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class WebAuthnHandler(abc.ABC):
|
| 11 |
+
@abc.abstractmethod
|
| 12 |
+
def is_available(self) -> bool:
|
| 13 |
+
"""Check whether this WebAuthn handler is available"""
|
| 14 |
+
raise NotImplementedError("is_available method must be implemented")
|
| 15 |
+
|
| 16 |
+
@abc.abstractmethod
|
| 17 |
+
def get(self, get_request: GetRequest) -> GetResponse:
|
| 18 |
+
"""WebAuthn get (assertion)"""
|
| 19 |
+
raise NotImplementedError("get method must be implemented")
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class PluginHandler(WebAuthnHandler):
|
| 23 |
+
"""Offloads WebAuthn get reqeust to a pluggable command-line tool.
|
| 24 |
+
|
| 25 |
+
Offloads WebAuthn get to a plugin which takes the form of a
|
| 26 |
+
command-line tool. The command-line tool is configurable via the
|
| 27 |
+
PluginHandler._ENV_VAR environment variable.
|
| 28 |
+
|
| 29 |
+
The WebAuthn plugin should implement the following interface:
|
| 30 |
+
|
| 31 |
+
Communication occurs over stdin/stdout, and messages are both sent and
|
| 32 |
+
received in the form:
|
| 33 |
+
|
| 34 |
+
[4 bytes - payload size (little-endian)][variable bytes - json payload]
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
_ENV_VAR = "GOOGLE_AUTH_WEBAUTHN_PLUGIN"
|
| 38 |
+
|
| 39 |
+
def is_available(self) -> bool:
|
| 40 |
+
try:
|
| 41 |
+
self._find_plugin()
|
| 42 |
+
except Exception:
|
| 43 |
+
return False
|
| 44 |
+
else:
|
| 45 |
+
return True
|
| 46 |
+
|
| 47 |
+
def get(self, get_request: GetRequest) -> GetResponse:
|
| 48 |
+
request_json = get_request.to_json()
|
| 49 |
+
cmd = self._find_plugin()
|
| 50 |
+
response_json = self._call_plugin(cmd, request_json)
|
| 51 |
+
return GetResponse.from_json(response_json)
|
| 52 |
+
|
| 53 |
+
def _call_plugin(self, cmd: str, input_json: str) -> str:
|
| 54 |
+
# Calculate length of input
|
| 55 |
+
input_length = len(input_json)
|
| 56 |
+
length_bytes_le = struct.pack("<I", input_length)
|
| 57 |
+
request = length_bytes_le + input_json.encode()
|
| 58 |
+
|
| 59 |
+
# Call plugin
|
| 60 |
+
process_result = subprocess.run(
|
| 61 |
+
[cmd], input=request, capture_output=True, check=True
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
# Check length of response
|
| 65 |
+
response_len_le = process_result.stdout[:4]
|
| 66 |
+
response_len = struct.unpack("<I", response_len_le)[0]
|
| 67 |
+
response = process_result.stdout[4:]
|
| 68 |
+
if response_len != len(response):
|
| 69 |
+
raise exceptions.MalformedError(
|
| 70 |
+
"Plugin response length {} does not match data {}".format(
|
| 71 |
+
response_len, len(response)
|
| 72 |
+
)
|
| 73 |
+
)
|
| 74 |
+
return response.decode()
|
| 75 |
+
|
| 76 |
+
def _find_plugin(self) -> str:
|
| 77 |
+
plugin_cmd = os.environ.get(PluginHandler._ENV_VAR)
|
| 78 |
+
if plugin_cmd is None:
|
| 79 |
+
raise exceptions.InvalidResource(
|
| 80 |
+
"{} env var is not set".format(PluginHandler._ENV_VAR)
|
| 81 |
+
)
|
| 82 |
+
return plugin_cmd
|
lib/python3.10/site-packages/google/oauth2/webauthn_handler_factory.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
from google.oauth2.webauthn_handler import PluginHandler, WebAuthnHandler
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class WebauthnHandlerFactory:
|
| 7 |
+
handlers: List[WebAuthnHandler]
|
| 8 |
+
|
| 9 |
+
def __init__(self):
|
| 10 |
+
self.handlers = [PluginHandler()]
|
| 11 |
+
|
| 12 |
+
def get_handler(self) -> Optional[WebAuthnHandler]:
|
| 13 |
+
for handler in self.handlers:
|
| 14 |
+
if handler.is_available():
|
| 15 |
+
return handler
|
| 16 |
+
return None
|
lib/python3.10/site-packages/google/oauth2/webauthn_types.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from dataclasses import dataclass
|
| 2 |
+
import json
|
| 3 |
+
from typing import Any, Dict, List, Optional
|
| 4 |
+
|
| 5 |
+
from google.auth import exceptions
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@dataclass(frozen=True)
|
| 9 |
+
class PublicKeyCredentialDescriptor:
|
| 10 |
+
"""Descriptor for a security key based credential.
|
| 11 |
+
|
| 12 |
+
https://www.w3.org/TR/webauthn-3/#dictionary-credential-descriptor
|
| 13 |
+
|
| 14 |
+
Args:
|
| 15 |
+
id: <url-safe base64-encoded> credential id (key handle).
|
| 16 |
+
transports: <'usb'|'nfc'|'ble'|'internal'> List of supported transports.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
id: str
|
| 20 |
+
transports: Optional[List[str]] = None
|
| 21 |
+
|
| 22 |
+
def to_dict(self):
|
| 23 |
+
cred = {"type": "public-key", "id": self.id}
|
| 24 |
+
if self.transports:
|
| 25 |
+
cred["transports"] = self.transports
|
| 26 |
+
return cred
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@dataclass
|
| 30 |
+
class AuthenticationExtensionsClientInputs:
|
| 31 |
+
"""Client extensions inputs for WebAuthn extensions.
|
| 32 |
+
|
| 33 |
+
Args:
|
| 34 |
+
appid: app id that can be asserted with in addition to rpid.
|
| 35 |
+
https://www.w3.org/TR/webauthn-3/#sctn-appid-extension
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
appid: Optional[str] = None
|
| 39 |
+
|
| 40 |
+
def to_dict(self):
|
| 41 |
+
extensions = {}
|
| 42 |
+
if self.appid:
|
| 43 |
+
extensions["appid"] = self.appid
|
| 44 |
+
return extensions
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@dataclass
|
| 48 |
+
class GetRequest:
|
| 49 |
+
"""WebAuthn get request
|
| 50 |
+
|
| 51 |
+
Args:
|
| 52 |
+
origin: Origin where the WebAuthn get assertion takes place.
|
| 53 |
+
rpid: Relying Party ID.
|
| 54 |
+
challenge: <url-safe base64-encoded> raw challenge.
|
| 55 |
+
timeout_ms: Timeout number in millisecond.
|
| 56 |
+
allow_credentials: List of allowed credentials.
|
| 57 |
+
user_verification: <'required'|'preferred'|'discouraged'> User verification requirement.
|
| 58 |
+
extensions: WebAuthn authentication extensions inputs.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
origin: str
|
| 62 |
+
rpid: str
|
| 63 |
+
challenge: str
|
| 64 |
+
timeout_ms: Optional[int] = None
|
| 65 |
+
allow_credentials: Optional[List[PublicKeyCredentialDescriptor]] = None
|
| 66 |
+
user_verification: Optional[str] = None
|
| 67 |
+
extensions: Optional[AuthenticationExtensionsClientInputs] = None
|
| 68 |
+
|
| 69 |
+
def to_json(self) -> str:
|
| 70 |
+
req_options: Dict[str, Any] = {"rpid": self.rpid, "challenge": self.challenge}
|
| 71 |
+
if self.timeout_ms:
|
| 72 |
+
req_options["timeout"] = self.timeout_ms
|
| 73 |
+
if self.allow_credentials:
|
| 74 |
+
req_options["allowCredentials"] = [
|
| 75 |
+
c.to_dict() for c in self.allow_credentials
|
| 76 |
+
]
|
| 77 |
+
if self.user_verification:
|
| 78 |
+
req_options["userVerification"] = self.user_verification
|
| 79 |
+
if self.extensions:
|
| 80 |
+
req_options["extensions"] = self.extensions.to_dict()
|
| 81 |
+
return json.dumps(
|
| 82 |
+
{"type": "get", "origin": self.origin, "requestData": req_options}
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
@dataclass(frozen=True)
|
| 87 |
+
class AuthenticatorAssertionResponse:
|
| 88 |
+
"""Authenticator response to a WebAuthn get (assertion) request.
|
| 89 |
+
|
| 90 |
+
https://www.w3.org/TR/webauthn-3/#authenticatorassertionresponse
|
| 91 |
+
|
| 92 |
+
Args:
|
| 93 |
+
client_data_json: <url-safe base64-encoded> client data JSON.
|
| 94 |
+
authenticator_data: <url-safe base64-encoded> authenticator data.
|
| 95 |
+
signature: <url-safe base64-encoded> signature.
|
| 96 |
+
user_handle: <url-safe base64-encoded> user handle.
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
client_data_json: str
|
| 100 |
+
authenticator_data: str
|
| 101 |
+
signature: str
|
| 102 |
+
user_handle: Optional[str]
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
@dataclass(frozen=True)
|
| 106 |
+
class GetResponse:
|
| 107 |
+
"""WebAuthn get (assertion) response.
|
| 108 |
+
|
| 109 |
+
Args:
|
| 110 |
+
id: <url-safe base64-encoded> credential id (key handle).
|
| 111 |
+
response: The authenticator assertion response.
|
| 112 |
+
authenticator_attachment: <'cross-platform'|'platform'> The attachment status of the authenticator.
|
| 113 |
+
client_extension_results: WebAuthn authentication extensions output results in a dictionary.
|
| 114 |
+
"""
|
| 115 |
+
|
| 116 |
+
id: str
|
| 117 |
+
response: AuthenticatorAssertionResponse
|
| 118 |
+
authenticator_attachment: Optional[str]
|
| 119 |
+
client_extension_results: Optional[Dict]
|
| 120 |
+
|
| 121 |
+
@staticmethod
|
| 122 |
+
def from_json(json_str: str):
|
| 123 |
+
"""Verify and construct GetResponse from a JSON string."""
|
| 124 |
+
try:
|
| 125 |
+
resp_json = json.loads(json_str)
|
| 126 |
+
except ValueError:
|
| 127 |
+
raise exceptions.MalformedError("Invalid Get JSON response")
|
| 128 |
+
if resp_json.get("type") != "getResponse":
|
| 129 |
+
raise exceptions.MalformedError(
|
| 130 |
+
"Invalid Get response type: {}".format(resp_json.get("type"))
|
| 131 |
+
)
|
| 132 |
+
pk_cred = resp_json.get("responseData")
|
| 133 |
+
if pk_cred is None:
|
| 134 |
+
if resp_json.get("error"):
|
| 135 |
+
raise exceptions.ReauthFailError(
|
| 136 |
+
"WebAuthn.get failure: {}".format(resp_json["error"])
|
| 137 |
+
)
|
| 138 |
+
else:
|
| 139 |
+
raise exceptions.MalformedError("Get response is empty")
|
| 140 |
+
if pk_cred.get("type") != "public-key":
|
| 141 |
+
raise exceptions.MalformedError(
|
| 142 |
+
"Invalid credential type: {}".format(pk_cred.get("type"))
|
| 143 |
+
)
|
| 144 |
+
assertion_json = pk_cred["response"]
|
| 145 |
+
assertion_resp = AuthenticatorAssertionResponse(
|
| 146 |
+
client_data_json=assertion_json["clientDataJSON"],
|
| 147 |
+
authenticator_data=assertion_json["authenticatorData"],
|
| 148 |
+
signature=assertion_json["signature"],
|
| 149 |
+
user_handle=assertion_json.get("userHandle"),
|
| 150 |
+
)
|
| 151 |
+
return GetResponse(
|
| 152 |
+
id=pk_cred["id"],
|
| 153 |
+
response=assertion_resp,
|
| 154 |
+
authenticator_attachment=pk_cred.get("authenticatorAttachment"),
|
| 155 |
+
client_extension_results=pk_cred.get("clientExtensionResults"),
|
| 156 |
+
)
|
lib/python3.10/site-packages/google/protobuf/__init__.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
# Copyright 2007 Google Inc. All Rights Reserved.
|
| 9 |
+
|
| 10 |
+
__version__ = '4.25.6'
|
lib/python3.10/site-packages/google/protobuf/any_pb2.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/any.proto
|
| 4 |
+
# Protobuf Python Version: 4.25.6
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"6\n\x03\x41ny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x01(\x0cR\x05valueBv\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 18 |
+
|
| 19 |
+
_globals = globals()
|
| 20 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 21 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', _globals)
|
| 22 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 23 |
+
_globals['DESCRIPTOR']._options = None
|
| 24 |
+
_globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 25 |
+
_globals['_ANY']._serialized_start=46
|
| 26 |
+
_globals['_ANY']._serialized_end=100
|
| 27 |
+
# @@protoc_insertion_point(module_scope)
|
lib/python3.10/site-packages/google/protobuf/api_pb2.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/api.proto
|
| 4 |
+
# Protobuf Python Version: 4.25.6
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
|
| 16 |
+
from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\xc1\x02\n\x03\x41pi\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x31\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.MethodR\x07methods\x12\x31\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12\x18\n\x07version\x18\x04 \x01(\tR\x07version\x12\x45\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContextR\rsourceContext\x12.\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.MixinR\x06mixins\x12/\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.SyntaxR\x06syntax\"\xb2\x02\n\x06Method\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12(\n\x10request_type_url\x18\x02 \x01(\tR\x0erequestTypeUrl\x12+\n\x11request_streaming\x18\x03 \x01(\x08R\x10requestStreaming\x12*\n\x11response_type_url\x18\x04 \x01(\tR\x0fresponseTypeUrl\x12-\n\x12response_streaming\x18\x05 \x01(\x08R\x11responseStreaming\x12\x31\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12/\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.SyntaxR\x06syntax\"/\n\x05Mixin\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x12\n\x04root\x18\x02 \x01(\tR\x04rootBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 20 |
+
|
| 21 |
+
_globals = globals()
|
| 22 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 23 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', _globals)
|
| 24 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 25 |
+
_globals['DESCRIPTOR']._options = None
|
| 26 |
+
_globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 27 |
+
_globals['_API']._serialized_start=113
|
| 28 |
+
_globals['_API']._serialized_end=434
|
| 29 |
+
_globals['_METHOD']._serialized_start=437
|
| 30 |
+
_globals['_METHOD']._serialized_end=743
|
| 31 |
+
_globals['_MIXIN']._serialized_start=745
|
| 32 |
+
_globals['_MIXIN']._serialized_end=792
|
| 33 |
+
# @@protoc_insertion_point(module_scope)
|
lib/python3.10/site-packages/google/protobuf/compiler/__init__.py
ADDED
|
File without changes
|
lib/python3.10/site-packages/google/protobuf/compiler/plugin_pb2.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/compiler/plugin.proto
|
| 4 |
+
# Protobuf Python Version: 4.25.6
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"c\n\x07Version\x12\x14\n\x05major\x18\x01 \x01(\x05R\x05major\x12\x14\n\x05minor\x18\x02 \x01(\x05R\x05minor\x12\x14\n\x05patch\x18\x03 \x01(\x05R\x05patch\x12\x16\n\x06suffix\x18\x04 \x01(\tR\x06suffix\"\xcf\x02\n\x14\x43odeGeneratorRequest\x12(\n\x10\x66ile_to_generate\x18\x01 \x03(\tR\x0e\x66ileToGenerate\x12\x1c\n\tparameter\x18\x02 \x01(\tR\tparameter\x12\x43\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\tprotoFile\x12\\\n\x17source_file_descriptors\x18\x11 \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\x15sourceFileDescriptors\x12L\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.VersionR\x0f\x63ompilerVersion\"\xb3\x03\n\x15\x43odeGeneratorResponse\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12-\n\x12supported_features\x18\x02 \x01(\x04R\x11supportedFeatures\x12H\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.FileR\x04\x66ile\x1a\xb1\x01\n\x04\x46ile\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\'\n\x0finsertion_point\x18\x02 \x01(\tR\x0einsertionPoint\x12\x18\n\x07\x63ontent\x18\x0f \x01(\tR\x07\x63ontent\x12R\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfoR\x11generatedCodeInfo\"W\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x12\x1d\n\x19\x46\x45\x41TURE_SUPPORTS_EDITIONS\x10\x02\x42r\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb\xaa\x02\x18Google.Protobuf.Compiler')
|
| 19 |
+
|
| 20 |
+
_globals = globals()
|
| 21 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 22 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', _globals)
|
| 23 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 24 |
+
_globals['DESCRIPTOR']._options = None
|
| 25 |
+
_globals['DESCRIPTOR']._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb\252\002\030Google.Protobuf.Compiler'
|
| 26 |
+
_globals['_VERSION']._serialized_start=101
|
| 27 |
+
_globals['_VERSION']._serialized_end=200
|
| 28 |
+
_globals['_CODEGENERATORREQUEST']._serialized_start=203
|
| 29 |
+
_globals['_CODEGENERATORREQUEST']._serialized_end=538
|
| 30 |
+
_globals['_CODEGENERATORRESPONSE']._serialized_start=541
|
| 31 |
+
_globals['_CODEGENERATORRESPONSE']._serialized_end=976
|
| 32 |
+
_globals['_CODEGENERATORRESPONSE_FILE']._serialized_start=710
|
| 33 |
+
_globals['_CODEGENERATORRESPONSE_FILE']._serialized_end=887
|
| 34 |
+
_globals['_CODEGENERATORRESPONSE_FEATURE']._serialized_start=889
|
| 35 |
+
_globals['_CODEGENERATORRESPONSE_FEATURE']._serialized_end=976
|
| 36 |
+
# @@protoc_insertion_point(module_scope)
|
lib/python3.10/site-packages/google/protobuf/descriptor.py
ADDED
|
@@ -0,0 +1,1282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Descriptors essentially contain exactly the information found in a .proto
|
| 9 |
+
file, in types that make this information accessible in Python.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
__author__ = 'robinson@google.com (Will Robinson)'
|
| 13 |
+
|
| 14 |
+
import threading
|
| 15 |
+
import warnings
|
| 16 |
+
|
| 17 |
+
from google.protobuf.internal import api_implementation
|
| 18 |
+
|
| 19 |
+
_USE_C_DESCRIPTORS = False
|
| 20 |
+
if api_implementation.Type() != 'python':
|
| 21 |
+
# Used by MakeDescriptor in cpp mode
|
| 22 |
+
import binascii
|
| 23 |
+
import os
|
| 24 |
+
# pylint: disable=protected-access
|
| 25 |
+
_message = api_implementation._c_module
|
| 26 |
+
# TODO: Remove this import after fix api_implementation
|
| 27 |
+
if _message is None:
|
| 28 |
+
from google.protobuf.pyext import _message
|
| 29 |
+
_USE_C_DESCRIPTORS = True
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Error(Exception):
|
| 33 |
+
"""Base error for this module."""
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class TypeTransformationError(Error):
|
| 37 |
+
"""Error transforming between python proto type and corresponding C++ type."""
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
if _USE_C_DESCRIPTORS:
|
| 41 |
+
# This metaclass allows to override the behavior of code like
|
| 42 |
+
# isinstance(my_descriptor, FieldDescriptor)
|
| 43 |
+
# and make it return True when the descriptor is an instance of the extension
|
| 44 |
+
# type written in C++.
|
| 45 |
+
class DescriptorMetaclass(type):
|
| 46 |
+
|
| 47 |
+
def __instancecheck__(cls, obj):
|
| 48 |
+
if super(DescriptorMetaclass, cls).__instancecheck__(obj):
|
| 49 |
+
return True
|
| 50 |
+
if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
|
| 51 |
+
return True
|
| 52 |
+
return False
|
| 53 |
+
else:
|
| 54 |
+
# The standard metaclass; nothing changes.
|
| 55 |
+
DescriptorMetaclass = type
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class _Lock(object):
|
| 59 |
+
"""Wrapper class of threading.Lock(), which is allowed by 'with'."""
|
| 60 |
+
|
| 61 |
+
def __new__(cls):
|
| 62 |
+
self = object.__new__(cls)
|
| 63 |
+
self._lock = threading.Lock() # pylint: disable=protected-access
|
| 64 |
+
return self
|
| 65 |
+
|
| 66 |
+
def __enter__(self):
|
| 67 |
+
self._lock.acquire()
|
| 68 |
+
|
| 69 |
+
def __exit__(self, exc_type, exc_value, exc_tb):
|
| 70 |
+
self._lock.release()
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
_lock = threading.Lock()
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def _Deprecated(name):
|
| 77 |
+
if _Deprecated.count > 0:
|
| 78 |
+
_Deprecated.count -= 1
|
| 79 |
+
warnings.warn(
|
| 80 |
+
'Call to deprecated create function %s(). Note: Create unlinked '
|
| 81 |
+
'descriptors is going to go away. Please use get/find descriptors from '
|
| 82 |
+
'generated code or query the descriptor_pool.'
|
| 83 |
+
% name,
|
| 84 |
+
category=DeprecationWarning, stacklevel=3)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
# Deprecated warnings will print 100 times at most which should be enough for
|
| 88 |
+
# users to notice and do not cause timeout.
|
| 89 |
+
_Deprecated.count = 100
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
_internal_create_key = object()
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class DescriptorBase(metaclass=DescriptorMetaclass):
|
| 96 |
+
|
| 97 |
+
"""Descriptors base class.
|
| 98 |
+
|
| 99 |
+
This class is the base of all descriptor classes. It provides common options
|
| 100 |
+
related functionality.
|
| 101 |
+
|
| 102 |
+
Attributes:
|
| 103 |
+
has_options: True if the descriptor has non-default options. Usually it is
|
| 104 |
+
not necessary to read this -- just call GetOptions() which will happily
|
| 105 |
+
return the default instance. However, it's sometimes useful for
|
| 106 |
+
efficiency, and also useful inside the protobuf implementation to avoid
|
| 107 |
+
some bootstrapping issues.
|
| 108 |
+
file (FileDescriptor): Reference to file info.
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
if _USE_C_DESCRIPTORS:
|
| 112 |
+
# The class, or tuple of classes, that are considered as "virtual
|
| 113 |
+
# subclasses" of this descriptor class.
|
| 114 |
+
_C_DESCRIPTOR_CLASS = ()
|
| 115 |
+
|
| 116 |
+
def __init__(self, file, options, serialized_options, options_class_name):
|
| 117 |
+
"""Initialize the descriptor given its options message and the name of the
|
| 118 |
+
class of the options message. The name of the class is required in case
|
| 119 |
+
the options message is None and has to be created.
|
| 120 |
+
"""
|
| 121 |
+
self.file = file
|
| 122 |
+
self._options = options
|
| 123 |
+
self._options_class_name = options_class_name
|
| 124 |
+
self._serialized_options = serialized_options
|
| 125 |
+
|
| 126 |
+
# Does this descriptor have non-default options?
|
| 127 |
+
self.has_options = (self._options is not None) or (
|
| 128 |
+
self._serialized_options is not None
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
def _SetOptions(self, options, options_class_name):
|
| 132 |
+
"""Sets the descriptor's options
|
| 133 |
+
|
| 134 |
+
This function is used in generated proto2 files to update descriptor
|
| 135 |
+
options. It must not be used outside proto2.
|
| 136 |
+
"""
|
| 137 |
+
self._options = options
|
| 138 |
+
self._options_class_name = options_class_name
|
| 139 |
+
|
| 140 |
+
# Does this descriptor have non-default options?
|
| 141 |
+
self.has_options = options is not None
|
| 142 |
+
|
| 143 |
+
def GetOptions(self):
|
| 144 |
+
"""Retrieves descriptor options.
|
| 145 |
+
|
| 146 |
+
This method returns the options set or creates the default options for the
|
| 147 |
+
descriptor.
|
| 148 |
+
"""
|
| 149 |
+
if self._options:
|
| 150 |
+
return self._options
|
| 151 |
+
|
| 152 |
+
from google.protobuf import descriptor_pb2
|
| 153 |
+
try:
|
| 154 |
+
options_class = getattr(descriptor_pb2,
|
| 155 |
+
self._options_class_name)
|
| 156 |
+
except AttributeError:
|
| 157 |
+
raise RuntimeError('Unknown options class name %s!' %
|
| 158 |
+
(self._options_class_name))
|
| 159 |
+
|
| 160 |
+
if self._serialized_options is None:
|
| 161 |
+
with _lock:
|
| 162 |
+
self._options = options_class()
|
| 163 |
+
else:
|
| 164 |
+
options = _ParseOptions(options_class(), self._serialized_options)
|
| 165 |
+
with _lock:
|
| 166 |
+
self._options = options
|
| 167 |
+
|
| 168 |
+
return self._options
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
class _NestedDescriptorBase(DescriptorBase):
|
| 172 |
+
"""Common class for descriptors that can be nested."""
|
| 173 |
+
|
| 174 |
+
def __init__(self, options, options_class_name, name, full_name,
|
| 175 |
+
file, containing_type, serialized_start=None,
|
| 176 |
+
serialized_end=None, serialized_options=None):
|
| 177 |
+
"""Constructor.
|
| 178 |
+
|
| 179 |
+
Args:
|
| 180 |
+
options: Protocol message options or None to use default message options.
|
| 181 |
+
options_class_name (str): The class name of the above options.
|
| 182 |
+
name (str): Name of this protocol message type.
|
| 183 |
+
full_name (str): Fully-qualified name of this protocol message type, which
|
| 184 |
+
will include protocol "package" name and the name of any enclosing
|
| 185 |
+
types.
|
| 186 |
+
containing_type: if provided, this is a nested descriptor, with this
|
| 187 |
+
descriptor as parent, otherwise None.
|
| 188 |
+
serialized_start: The start index (inclusive) in block in the
|
| 189 |
+
file.serialized_pb that describes this descriptor.
|
| 190 |
+
serialized_end: The end index (exclusive) in block in the
|
| 191 |
+
file.serialized_pb that describes this descriptor.
|
| 192 |
+
serialized_options: Protocol message serialized options or None.
|
| 193 |
+
"""
|
| 194 |
+
super(_NestedDescriptorBase, self).__init__(
|
| 195 |
+
file, options, serialized_options, options_class_name
|
| 196 |
+
)
|
| 197 |
+
|
| 198 |
+
self.name = name
|
| 199 |
+
# TODO: Add function to calculate full_name instead of having it in
|
| 200 |
+
# memory?
|
| 201 |
+
self.full_name = full_name
|
| 202 |
+
self.containing_type = containing_type
|
| 203 |
+
|
| 204 |
+
self._serialized_start = serialized_start
|
| 205 |
+
self._serialized_end = serialized_end
|
| 206 |
+
|
| 207 |
+
def CopyToProto(self, proto):
|
| 208 |
+
"""Copies this to the matching proto in descriptor_pb2.
|
| 209 |
+
|
| 210 |
+
Args:
|
| 211 |
+
proto: An empty proto instance from descriptor_pb2.
|
| 212 |
+
|
| 213 |
+
Raises:
|
| 214 |
+
Error: If self couldn't be serialized, due to to few constructor
|
| 215 |
+
arguments.
|
| 216 |
+
"""
|
| 217 |
+
if (self.file is not None and
|
| 218 |
+
self._serialized_start is not None and
|
| 219 |
+
self._serialized_end is not None):
|
| 220 |
+
proto.ParseFromString(self.file.serialized_pb[
|
| 221 |
+
self._serialized_start:self._serialized_end])
|
| 222 |
+
else:
|
| 223 |
+
raise Error('Descriptor does not contain serialization.')
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
class Descriptor(_NestedDescriptorBase):
|
| 227 |
+
|
| 228 |
+
"""Descriptor for a protocol message type.
|
| 229 |
+
|
| 230 |
+
Attributes:
|
| 231 |
+
name (str): Name of this protocol message type.
|
| 232 |
+
full_name (str): Fully-qualified name of this protocol message type,
|
| 233 |
+
which will include protocol "package" name and the name of any
|
| 234 |
+
enclosing types.
|
| 235 |
+
containing_type (Descriptor): Reference to the descriptor of the type
|
| 236 |
+
containing us, or None if this is top-level.
|
| 237 |
+
fields (list[FieldDescriptor]): Field descriptors for all fields in
|
| 238 |
+
this type.
|
| 239 |
+
fields_by_number (dict(int, FieldDescriptor)): Same
|
| 240 |
+
:class:`FieldDescriptor` objects as in :attr:`fields`, but indexed
|
| 241 |
+
by "number" attribute in each FieldDescriptor.
|
| 242 |
+
fields_by_name (dict(str, FieldDescriptor)): Same
|
| 243 |
+
:class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by
|
| 244 |
+
"name" attribute in each :class:`FieldDescriptor`.
|
| 245 |
+
nested_types (list[Descriptor]): Descriptor references
|
| 246 |
+
for all protocol message types nested within this one.
|
| 247 |
+
nested_types_by_name (dict(str, Descriptor)): Same Descriptor
|
| 248 |
+
objects as in :attr:`nested_types`, but indexed by "name" attribute
|
| 249 |
+
in each Descriptor.
|
| 250 |
+
enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references
|
| 251 |
+
for all enums contained within this type.
|
| 252 |
+
enum_types_by_name (dict(str, EnumDescriptor)): Same
|
| 253 |
+
:class:`EnumDescriptor` objects as in :attr:`enum_types`, but
|
| 254 |
+
indexed by "name" attribute in each EnumDescriptor.
|
| 255 |
+
enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping
|
| 256 |
+
from enum value name to :class:`EnumValueDescriptor` for that value.
|
| 257 |
+
extensions (list[FieldDescriptor]): All extensions defined directly
|
| 258 |
+
within this message type (NOT within a nested type).
|
| 259 |
+
extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor
|
| 260 |
+
objects as :attr:`extensions`, but indexed by "name" attribute of each
|
| 261 |
+
FieldDescriptor.
|
| 262 |
+
is_extendable (bool): Does this type define any extension ranges?
|
| 263 |
+
oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields
|
| 264 |
+
in this message.
|
| 265 |
+
oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in
|
| 266 |
+
:attr:`oneofs`, but indexed by "name" attribute.
|
| 267 |
+
file (FileDescriptor): Reference to file descriptor.
|
| 268 |
+
is_map_entry: If the message type is a map entry.
|
| 269 |
+
|
| 270 |
+
"""
|
| 271 |
+
|
| 272 |
+
if _USE_C_DESCRIPTORS:
|
| 273 |
+
_C_DESCRIPTOR_CLASS = _message.Descriptor
|
| 274 |
+
|
| 275 |
+
def __new__(
|
| 276 |
+
cls,
|
| 277 |
+
name=None,
|
| 278 |
+
full_name=None,
|
| 279 |
+
filename=None,
|
| 280 |
+
containing_type=None,
|
| 281 |
+
fields=None,
|
| 282 |
+
nested_types=None,
|
| 283 |
+
enum_types=None,
|
| 284 |
+
extensions=None,
|
| 285 |
+
options=None,
|
| 286 |
+
serialized_options=None,
|
| 287 |
+
is_extendable=True,
|
| 288 |
+
extension_ranges=None,
|
| 289 |
+
oneofs=None,
|
| 290 |
+
file=None, # pylint: disable=redefined-builtin
|
| 291 |
+
serialized_start=None,
|
| 292 |
+
serialized_end=None,
|
| 293 |
+
syntax=None,
|
| 294 |
+
is_map_entry=False,
|
| 295 |
+
create_key=None):
|
| 296 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 297 |
+
return _message.default_pool.FindMessageTypeByName(full_name)
|
| 298 |
+
|
| 299 |
+
# NOTE: The file argument redefining a builtin is nothing we can
|
| 300 |
+
# fix right now since we don't know how many clients already rely on the
|
| 301 |
+
# name of the argument.
|
| 302 |
+
def __init__(self, name, full_name, filename, containing_type, fields,
|
| 303 |
+
nested_types, enum_types, extensions, options=None,
|
| 304 |
+
serialized_options=None,
|
| 305 |
+
is_extendable=True, extension_ranges=None, oneofs=None,
|
| 306 |
+
file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin
|
| 307 |
+
syntax=None, is_map_entry=False, create_key=None):
|
| 308 |
+
"""Arguments to __init__() are as described in the description
|
| 309 |
+
of Descriptor fields above.
|
| 310 |
+
|
| 311 |
+
Note that filename is an obsolete argument, that is not used anymore.
|
| 312 |
+
Please use file.name to access this as an attribute.
|
| 313 |
+
"""
|
| 314 |
+
if create_key is not _internal_create_key:
|
| 315 |
+
_Deprecated('Descriptor')
|
| 316 |
+
|
| 317 |
+
super(Descriptor, self).__init__(
|
| 318 |
+
options, 'MessageOptions', name, full_name, file,
|
| 319 |
+
containing_type, serialized_start=serialized_start,
|
| 320 |
+
serialized_end=serialized_end, serialized_options=serialized_options)
|
| 321 |
+
|
| 322 |
+
# We have fields in addition to fields_by_name and fields_by_number,
|
| 323 |
+
# so that:
|
| 324 |
+
# 1. Clients can index fields by "order in which they're listed."
|
| 325 |
+
# 2. Clients can easily iterate over all fields with the terse
|
| 326 |
+
# syntax: for f in descriptor.fields: ...
|
| 327 |
+
self.fields = fields
|
| 328 |
+
for field in self.fields:
|
| 329 |
+
field.containing_type = self
|
| 330 |
+
self.fields_by_number = dict((f.number, f) for f in fields)
|
| 331 |
+
self.fields_by_name = dict((f.name, f) for f in fields)
|
| 332 |
+
self._fields_by_camelcase_name = None
|
| 333 |
+
|
| 334 |
+
self.nested_types = nested_types
|
| 335 |
+
for nested_type in nested_types:
|
| 336 |
+
nested_type.containing_type = self
|
| 337 |
+
self.nested_types_by_name = dict((t.name, t) for t in nested_types)
|
| 338 |
+
|
| 339 |
+
self.enum_types = enum_types
|
| 340 |
+
for enum_type in self.enum_types:
|
| 341 |
+
enum_type.containing_type = self
|
| 342 |
+
self.enum_types_by_name = dict((t.name, t) for t in enum_types)
|
| 343 |
+
self.enum_values_by_name = dict(
|
| 344 |
+
(v.name, v) for t in enum_types for v in t.values)
|
| 345 |
+
|
| 346 |
+
self.extensions = extensions
|
| 347 |
+
for extension in self.extensions:
|
| 348 |
+
extension.extension_scope = self
|
| 349 |
+
self.extensions_by_name = dict((f.name, f) for f in extensions)
|
| 350 |
+
self.is_extendable = is_extendable
|
| 351 |
+
self.extension_ranges = extension_ranges
|
| 352 |
+
self.oneofs = oneofs if oneofs is not None else []
|
| 353 |
+
self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
|
| 354 |
+
for oneof in self.oneofs:
|
| 355 |
+
oneof.containing_type = self
|
| 356 |
+
self._deprecated_syntax = syntax or "proto2"
|
| 357 |
+
self._is_map_entry = is_map_entry
|
| 358 |
+
|
| 359 |
+
@property
|
| 360 |
+
def syntax(self):
|
| 361 |
+
warnings.warn(
|
| 362 |
+
'descriptor.syntax is deprecated. It will be removed'
|
| 363 |
+
' soon. Most usages are checking field descriptors. Consider to use'
|
| 364 |
+
' has_presence, is_packed on field descriptors.'
|
| 365 |
+
)
|
| 366 |
+
return self._deprecated_syntax
|
| 367 |
+
|
| 368 |
+
@property
|
| 369 |
+
def fields_by_camelcase_name(self):
|
| 370 |
+
"""Same FieldDescriptor objects as in :attr:`fields`, but indexed by
|
| 371 |
+
:attr:`FieldDescriptor.camelcase_name`.
|
| 372 |
+
"""
|
| 373 |
+
if self._fields_by_camelcase_name is None:
|
| 374 |
+
self._fields_by_camelcase_name = dict(
|
| 375 |
+
(f.camelcase_name, f) for f in self.fields)
|
| 376 |
+
return self._fields_by_camelcase_name
|
| 377 |
+
|
| 378 |
+
def EnumValueName(self, enum, value):
|
| 379 |
+
"""Returns the string name of an enum value.
|
| 380 |
+
|
| 381 |
+
This is just a small helper method to simplify a common operation.
|
| 382 |
+
|
| 383 |
+
Args:
|
| 384 |
+
enum: string name of the Enum.
|
| 385 |
+
value: int, value of the enum.
|
| 386 |
+
|
| 387 |
+
Returns:
|
| 388 |
+
string name of the enum value.
|
| 389 |
+
|
| 390 |
+
Raises:
|
| 391 |
+
KeyError if either the Enum doesn't exist or the value is not a valid
|
| 392 |
+
value for the enum.
|
| 393 |
+
"""
|
| 394 |
+
return self.enum_types_by_name[enum].values_by_number[value].name
|
| 395 |
+
|
| 396 |
+
def CopyToProto(self, proto):
|
| 397 |
+
"""Copies this to a descriptor_pb2.DescriptorProto.
|
| 398 |
+
|
| 399 |
+
Args:
|
| 400 |
+
proto: An empty descriptor_pb2.DescriptorProto.
|
| 401 |
+
"""
|
| 402 |
+
# This function is overridden to give a better doc comment.
|
| 403 |
+
super(Descriptor, self).CopyToProto(proto)
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
# TODO: We should have aggressive checking here,
|
| 407 |
+
# for example:
|
| 408 |
+
# * If you specify a repeated field, you should not be allowed
|
| 409 |
+
# to specify a default value.
|
| 410 |
+
# * [Other examples here as needed].
|
| 411 |
+
#
|
| 412 |
+
# TODO: for this and other *Descriptor classes, we
|
| 413 |
+
# might also want to lock things down aggressively (e.g.,
|
| 414 |
+
# prevent clients from setting the attributes). Having
|
| 415 |
+
# stronger invariants here in general will reduce the number
|
| 416 |
+
# of runtime checks we must do in reflection.py...
|
| 417 |
+
class FieldDescriptor(DescriptorBase):
|
| 418 |
+
|
| 419 |
+
"""Descriptor for a single field in a .proto file.
|
| 420 |
+
|
| 421 |
+
Attributes:
|
| 422 |
+
name (str): Name of this field, exactly as it appears in .proto.
|
| 423 |
+
full_name (str): Name of this field, including containing scope. This is
|
| 424 |
+
particularly relevant for extensions.
|
| 425 |
+
index (int): Dense, 0-indexed index giving the order that this
|
| 426 |
+
field textually appears within its message in the .proto file.
|
| 427 |
+
number (int): Tag number declared for this field in the .proto file.
|
| 428 |
+
|
| 429 |
+
type (int): (One of the TYPE_* constants below) Declared type.
|
| 430 |
+
cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to
|
| 431 |
+
represent this field.
|
| 432 |
+
|
| 433 |
+
label (int): (One of the LABEL_* constants below) Tells whether this
|
| 434 |
+
field is optional, required, or repeated.
|
| 435 |
+
has_default_value (bool): True if this field has a default value defined,
|
| 436 |
+
otherwise false.
|
| 437 |
+
default_value (Varies): Default value of this field. Only
|
| 438 |
+
meaningful for non-repeated scalar fields. Repeated fields
|
| 439 |
+
should always set this to [], and non-repeated composite
|
| 440 |
+
fields should always set this to None.
|
| 441 |
+
|
| 442 |
+
containing_type (Descriptor): Descriptor of the protocol message
|
| 443 |
+
type that contains this field. Set by the Descriptor constructor
|
| 444 |
+
if we're passed into one.
|
| 445 |
+
Somewhat confusingly, for extension fields, this is the
|
| 446 |
+
descriptor of the EXTENDED message, not the descriptor
|
| 447 |
+
of the message containing this field. (See is_extension and
|
| 448 |
+
extension_scope below).
|
| 449 |
+
message_type (Descriptor): If a composite field, a descriptor
|
| 450 |
+
of the message type contained in this field. Otherwise, this is None.
|
| 451 |
+
enum_type (EnumDescriptor): If this field contains an enum, a
|
| 452 |
+
descriptor of that enum. Otherwise, this is None.
|
| 453 |
+
|
| 454 |
+
is_extension: True iff this describes an extension field.
|
| 455 |
+
extension_scope (Descriptor): Only meaningful if is_extension is True.
|
| 456 |
+
Gives the message that immediately contains this extension field.
|
| 457 |
+
Will be None iff we're a top-level (file-level) extension field.
|
| 458 |
+
|
| 459 |
+
options (descriptor_pb2.FieldOptions): Protocol message field options or
|
| 460 |
+
None to use default field options.
|
| 461 |
+
|
| 462 |
+
containing_oneof (OneofDescriptor): If the field is a member of a oneof
|
| 463 |
+
union, contains its descriptor. Otherwise, None.
|
| 464 |
+
|
| 465 |
+
file (FileDescriptor): Reference to file descriptor.
|
| 466 |
+
"""
|
| 467 |
+
|
| 468 |
+
# Must be consistent with C++ FieldDescriptor::Type enum in
|
| 469 |
+
# descriptor.h.
|
| 470 |
+
#
|
| 471 |
+
# TODO: Find a way to eliminate this repetition.
|
| 472 |
+
TYPE_DOUBLE = 1
|
| 473 |
+
TYPE_FLOAT = 2
|
| 474 |
+
TYPE_INT64 = 3
|
| 475 |
+
TYPE_UINT64 = 4
|
| 476 |
+
TYPE_INT32 = 5
|
| 477 |
+
TYPE_FIXED64 = 6
|
| 478 |
+
TYPE_FIXED32 = 7
|
| 479 |
+
TYPE_BOOL = 8
|
| 480 |
+
TYPE_STRING = 9
|
| 481 |
+
TYPE_GROUP = 10
|
| 482 |
+
TYPE_MESSAGE = 11
|
| 483 |
+
TYPE_BYTES = 12
|
| 484 |
+
TYPE_UINT32 = 13
|
| 485 |
+
TYPE_ENUM = 14
|
| 486 |
+
TYPE_SFIXED32 = 15
|
| 487 |
+
TYPE_SFIXED64 = 16
|
| 488 |
+
TYPE_SINT32 = 17
|
| 489 |
+
TYPE_SINT64 = 18
|
| 490 |
+
MAX_TYPE = 18
|
| 491 |
+
|
| 492 |
+
# Must be consistent with C++ FieldDescriptor::CppType enum in
|
| 493 |
+
# descriptor.h.
|
| 494 |
+
#
|
| 495 |
+
# TODO: Find a way to eliminate this repetition.
|
| 496 |
+
CPPTYPE_INT32 = 1
|
| 497 |
+
CPPTYPE_INT64 = 2
|
| 498 |
+
CPPTYPE_UINT32 = 3
|
| 499 |
+
CPPTYPE_UINT64 = 4
|
| 500 |
+
CPPTYPE_DOUBLE = 5
|
| 501 |
+
CPPTYPE_FLOAT = 6
|
| 502 |
+
CPPTYPE_BOOL = 7
|
| 503 |
+
CPPTYPE_ENUM = 8
|
| 504 |
+
CPPTYPE_STRING = 9
|
| 505 |
+
CPPTYPE_MESSAGE = 10
|
| 506 |
+
MAX_CPPTYPE = 10
|
| 507 |
+
|
| 508 |
+
_PYTHON_TO_CPP_PROTO_TYPE_MAP = {
|
| 509 |
+
TYPE_DOUBLE: CPPTYPE_DOUBLE,
|
| 510 |
+
TYPE_FLOAT: CPPTYPE_FLOAT,
|
| 511 |
+
TYPE_ENUM: CPPTYPE_ENUM,
|
| 512 |
+
TYPE_INT64: CPPTYPE_INT64,
|
| 513 |
+
TYPE_SINT64: CPPTYPE_INT64,
|
| 514 |
+
TYPE_SFIXED64: CPPTYPE_INT64,
|
| 515 |
+
TYPE_UINT64: CPPTYPE_UINT64,
|
| 516 |
+
TYPE_FIXED64: CPPTYPE_UINT64,
|
| 517 |
+
TYPE_INT32: CPPTYPE_INT32,
|
| 518 |
+
TYPE_SFIXED32: CPPTYPE_INT32,
|
| 519 |
+
TYPE_SINT32: CPPTYPE_INT32,
|
| 520 |
+
TYPE_UINT32: CPPTYPE_UINT32,
|
| 521 |
+
TYPE_FIXED32: CPPTYPE_UINT32,
|
| 522 |
+
TYPE_BYTES: CPPTYPE_STRING,
|
| 523 |
+
TYPE_STRING: CPPTYPE_STRING,
|
| 524 |
+
TYPE_BOOL: CPPTYPE_BOOL,
|
| 525 |
+
TYPE_MESSAGE: CPPTYPE_MESSAGE,
|
| 526 |
+
TYPE_GROUP: CPPTYPE_MESSAGE
|
| 527 |
+
}
|
| 528 |
+
|
| 529 |
+
# Must be consistent with C++ FieldDescriptor::Label enum in
|
| 530 |
+
# descriptor.h.
|
| 531 |
+
#
|
| 532 |
+
# TODO: Find a way to eliminate this repetition.
|
| 533 |
+
LABEL_OPTIONAL = 1
|
| 534 |
+
LABEL_REQUIRED = 2
|
| 535 |
+
LABEL_REPEATED = 3
|
| 536 |
+
MAX_LABEL = 3
|
| 537 |
+
|
| 538 |
+
# Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
|
| 539 |
+
# and kLastReservedNumber in descriptor.h
|
| 540 |
+
MAX_FIELD_NUMBER = (1 << 29) - 1
|
| 541 |
+
FIRST_RESERVED_FIELD_NUMBER = 19000
|
| 542 |
+
LAST_RESERVED_FIELD_NUMBER = 19999
|
| 543 |
+
|
| 544 |
+
if _USE_C_DESCRIPTORS:
|
| 545 |
+
_C_DESCRIPTOR_CLASS = _message.FieldDescriptor
|
| 546 |
+
|
| 547 |
+
def __new__(cls, name, full_name, index, number, type, cpp_type, label,
|
| 548 |
+
default_value, message_type, enum_type, containing_type,
|
| 549 |
+
is_extension, extension_scope, options=None,
|
| 550 |
+
serialized_options=None,
|
| 551 |
+
has_default_value=True, containing_oneof=None, json_name=None,
|
| 552 |
+
file=None, create_key=None): # pylint: disable=redefined-builtin
|
| 553 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 554 |
+
if is_extension:
|
| 555 |
+
return _message.default_pool.FindExtensionByName(full_name)
|
| 556 |
+
else:
|
| 557 |
+
return _message.default_pool.FindFieldByName(full_name)
|
| 558 |
+
|
| 559 |
+
def __init__(self, name, full_name, index, number, type, cpp_type, label,
|
| 560 |
+
default_value, message_type, enum_type, containing_type,
|
| 561 |
+
is_extension, extension_scope, options=None,
|
| 562 |
+
serialized_options=None,
|
| 563 |
+
has_default_value=True, containing_oneof=None, json_name=None,
|
| 564 |
+
file=None, create_key=None): # pylint: disable=redefined-builtin
|
| 565 |
+
"""The arguments are as described in the description of FieldDescriptor
|
| 566 |
+
attributes above.
|
| 567 |
+
|
| 568 |
+
Note that containing_type may be None, and may be set later if necessary
|
| 569 |
+
(to deal with circular references between message types, for example).
|
| 570 |
+
Likewise for extension_scope.
|
| 571 |
+
"""
|
| 572 |
+
if create_key is not _internal_create_key:
|
| 573 |
+
_Deprecated('FieldDescriptor')
|
| 574 |
+
|
| 575 |
+
super(FieldDescriptor, self).__init__(
|
| 576 |
+
file, options, serialized_options, 'FieldOptions'
|
| 577 |
+
)
|
| 578 |
+
self.name = name
|
| 579 |
+
self.full_name = full_name
|
| 580 |
+
self._camelcase_name = None
|
| 581 |
+
if json_name is None:
|
| 582 |
+
self.json_name = _ToJsonName(name)
|
| 583 |
+
else:
|
| 584 |
+
self.json_name = json_name
|
| 585 |
+
self.index = index
|
| 586 |
+
self.number = number
|
| 587 |
+
self.type = type
|
| 588 |
+
self.cpp_type = cpp_type
|
| 589 |
+
self.label = label
|
| 590 |
+
self.has_default_value = has_default_value
|
| 591 |
+
self.default_value = default_value
|
| 592 |
+
self.containing_type = containing_type
|
| 593 |
+
self.message_type = message_type
|
| 594 |
+
self.enum_type = enum_type
|
| 595 |
+
self.is_extension = is_extension
|
| 596 |
+
self.extension_scope = extension_scope
|
| 597 |
+
self.containing_oneof = containing_oneof
|
| 598 |
+
if api_implementation.Type() == 'python':
|
| 599 |
+
self._cdescriptor = None
|
| 600 |
+
else:
|
| 601 |
+
if is_extension:
|
| 602 |
+
self._cdescriptor = _message.default_pool.FindExtensionByName(full_name)
|
| 603 |
+
else:
|
| 604 |
+
self._cdescriptor = _message.default_pool.FindFieldByName(full_name)
|
| 605 |
+
|
| 606 |
+
@property
|
| 607 |
+
def camelcase_name(self):
|
| 608 |
+
"""Camelcase name of this field.
|
| 609 |
+
|
| 610 |
+
Returns:
|
| 611 |
+
str: the name in CamelCase.
|
| 612 |
+
"""
|
| 613 |
+
if self._camelcase_name is None:
|
| 614 |
+
self._camelcase_name = _ToCamelCase(self.name)
|
| 615 |
+
return self._camelcase_name
|
| 616 |
+
|
| 617 |
+
@property
|
| 618 |
+
def has_presence(self):
|
| 619 |
+
"""Whether the field distinguishes between unpopulated and default values.
|
| 620 |
+
|
| 621 |
+
Raises:
|
| 622 |
+
RuntimeError: singular field that is not linked with message nor file.
|
| 623 |
+
"""
|
| 624 |
+
if self.label == FieldDescriptor.LABEL_REPEATED:
|
| 625 |
+
return False
|
| 626 |
+
if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or
|
| 627 |
+
self.containing_oneof):
|
| 628 |
+
return True
|
| 629 |
+
# self.containing_type is used here instead of self.file for legacy
|
| 630 |
+
# compatibility. FieldDescriptor.file was added in cl/153110619
|
| 631 |
+
# Some old/generated code didn't link file to FieldDescriptor.
|
| 632 |
+
# TODO: remove syntax usage b/240619313
|
| 633 |
+
return self.containing_type._deprecated_syntax == 'proto2'
|
| 634 |
+
|
| 635 |
+
@property
|
| 636 |
+
def is_packed(self):
|
| 637 |
+
"""Returns if the field is packed."""
|
| 638 |
+
if self.label != FieldDescriptor.LABEL_REPEATED:
|
| 639 |
+
return False
|
| 640 |
+
field_type = self.type
|
| 641 |
+
if (field_type == FieldDescriptor.TYPE_STRING or
|
| 642 |
+
field_type == FieldDescriptor.TYPE_GROUP or
|
| 643 |
+
field_type == FieldDescriptor.TYPE_MESSAGE or
|
| 644 |
+
field_type == FieldDescriptor.TYPE_BYTES):
|
| 645 |
+
return False
|
| 646 |
+
if self.containing_type._deprecated_syntax == 'proto2':
|
| 647 |
+
return self.has_options and self.GetOptions().packed
|
| 648 |
+
else:
|
| 649 |
+
return (not self.has_options or
|
| 650 |
+
not self.GetOptions().HasField('packed') or
|
| 651 |
+
self.GetOptions().packed)
|
| 652 |
+
|
| 653 |
+
@staticmethod
|
| 654 |
+
def ProtoTypeToCppProtoType(proto_type):
|
| 655 |
+
"""Converts from a Python proto type to a C++ Proto Type.
|
| 656 |
+
|
| 657 |
+
The Python ProtocolBuffer classes specify both the 'Python' datatype and the
|
| 658 |
+
'C++' datatype - and they're not the same. This helper method should
|
| 659 |
+
translate from one to another.
|
| 660 |
+
|
| 661 |
+
Args:
|
| 662 |
+
proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
|
| 663 |
+
Returns:
|
| 664 |
+
int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
|
| 665 |
+
Raises:
|
| 666 |
+
TypeTransformationError: when the Python proto type isn't known.
|
| 667 |
+
"""
|
| 668 |
+
try:
|
| 669 |
+
return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
|
| 670 |
+
except KeyError:
|
| 671 |
+
raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
|
| 672 |
+
|
| 673 |
+
|
| 674 |
+
class EnumDescriptor(_NestedDescriptorBase):
|
| 675 |
+
|
| 676 |
+
"""Descriptor for an enum defined in a .proto file.
|
| 677 |
+
|
| 678 |
+
Attributes:
|
| 679 |
+
name (str): Name of the enum type.
|
| 680 |
+
full_name (str): Full name of the type, including package name
|
| 681 |
+
and any enclosing type(s).
|
| 682 |
+
|
| 683 |
+
values (list[EnumValueDescriptor]): List of the values
|
| 684 |
+
in this enum.
|
| 685 |
+
values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`,
|
| 686 |
+
but indexed by the "name" field of each EnumValueDescriptor.
|
| 687 |
+
values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`,
|
| 688 |
+
but indexed by the "number" field of each EnumValueDescriptor.
|
| 689 |
+
containing_type (Descriptor): Descriptor of the immediate containing
|
| 690 |
+
type of this enum, or None if this is an enum defined at the
|
| 691 |
+
top level in a .proto file. Set by Descriptor's constructor
|
| 692 |
+
if we're passed into one.
|
| 693 |
+
file (FileDescriptor): Reference to file descriptor.
|
| 694 |
+
options (descriptor_pb2.EnumOptions): Enum options message or
|
| 695 |
+
None to use default enum options.
|
| 696 |
+
"""
|
| 697 |
+
|
| 698 |
+
if _USE_C_DESCRIPTORS:
|
| 699 |
+
_C_DESCRIPTOR_CLASS = _message.EnumDescriptor
|
| 700 |
+
|
| 701 |
+
def __new__(cls, name, full_name, filename, values,
|
| 702 |
+
containing_type=None, options=None,
|
| 703 |
+
serialized_options=None, file=None, # pylint: disable=redefined-builtin
|
| 704 |
+
serialized_start=None, serialized_end=None, create_key=None):
|
| 705 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 706 |
+
return _message.default_pool.FindEnumTypeByName(full_name)
|
| 707 |
+
|
| 708 |
+
def __init__(self, name, full_name, filename, values,
|
| 709 |
+
containing_type=None, options=None,
|
| 710 |
+
serialized_options=None, file=None, # pylint: disable=redefined-builtin
|
| 711 |
+
serialized_start=None, serialized_end=None, create_key=None):
|
| 712 |
+
"""Arguments are as described in the attribute description above.
|
| 713 |
+
|
| 714 |
+
Note that filename is an obsolete argument, that is not used anymore.
|
| 715 |
+
Please use file.name to access this as an attribute.
|
| 716 |
+
"""
|
| 717 |
+
if create_key is not _internal_create_key:
|
| 718 |
+
_Deprecated('EnumDescriptor')
|
| 719 |
+
|
| 720 |
+
super(EnumDescriptor, self).__init__(
|
| 721 |
+
options, 'EnumOptions', name, full_name, file,
|
| 722 |
+
containing_type, serialized_start=serialized_start,
|
| 723 |
+
serialized_end=serialized_end, serialized_options=serialized_options)
|
| 724 |
+
|
| 725 |
+
self.values = values
|
| 726 |
+
for value in self.values:
|
| 727 |
+
value.file = file
|
| 728 |
+
value.type = self
|
| 729 |
+
self.values_by_name = dict((v.name, v) for v in values)
|
| 730 |
+
# Values are reversed to ensure that the first alias is retained.
|
| 731 |
+
self.values_by_number = dict((v.number, v) for v in reversed(values))
|
| 732 |
+
|
| 733 |
+
@property
|
| 734 |
+
def is_closed(self):
|
| 735 |
+
"""Returns true whether this is a "closed" enum.
|
| 736 |
+
|
| 737 |
+
This means that it:
|
| 738 |
+
- Has a fixed set of values, rather than being equivalent to an int32.
|
| 739 |
+
- Encountering values not in this set causes them to be treated as unknown
|
| 740 |
+
fields.
|
| 741 |
+
- The first value (i.e., the default) may be nonzero.
|
| 742 |
+
|
| 743 |
+
WARNING: Some runtimes currently have a quirk where non-closed enums are
|
| 744 |
+
treated as closed when used as the type of fields defined in a
|
| 745 |
+
`syntax = proto2;` file. This quirk is not present in all runtimes; as of
|
| 746 |
+
writing, we know that:
|
| 747 |
+
|
| 748 |
+
- C++, Java, and C++-based Python share this quirk.
|
| 749 |
+
- UPB and UPB-based Python do not.
|
| 750 |
+
- PHP and Ruby treat all enums as open regardless of declaration.
|
| 751 |
+
|
| 752 |
+
Care should be taken when using this function to respect the target
|
| 753 |
+
runtime's enum handling quirks.
|
| 754 |
+
"""
|
| 755 |
+
return self.file._deprecated_syntax == 'proto2'
|
| 756 |
+
|
| 757 |
+
def CopyToProto(self, proto):
|
| 758 |
+
"""Copies this to a descriptor_pb2.EnumDescriptorProto.
|
| 759 |
+
|
| 760 |
+
Args:
|
| 761 |
+
proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto.
|
| 762 |
+
"""
|
| 763 |
+
# This function is overridden to give a better doc comment.
|
| 764 |
+
super(EnumDescriptor, self).CopyToProto(proto)
|
| 765 |
+
|
| 766 |
+
|
| 767 |
+
class EnumValueDescriptor(DescriptorBase):
|
| 768 |
+
|
| 769 |
+
"""Descriptor for a single value within an enum.
|
| 770 |
+
|
| 771 |
+
Attributes:
|
| 772 |
+
name (str): Name of this value.
|
| 773 |
+
index (int): Dense, 0-indexed index giving the order that this
|
| 774 |
+
value appears textually within its enum in the .proto file.
|
| 775 |
+
number (int): Actual number assigned to this enum value.
|
| 776 |
+
type (EnumDescriptor): :class:`EnumDescriptor` to which this value
|
| 777 |
+
belongs. Set by :class:`EnumDescriptor`'s constructor if we're
|
| 778 |
+
passed into one.
|
| 779 |
+
options (descriptor_pb2.EnumValueOptions): Enum value options message or
|
| 780 |
+
None to use default enum value options options.
|
| 781 |
+
"""
|
| 782 |
+
|
| 783 |
+
if _USE_C_DESCRIPTORS:
|
| 784 |
+
_C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
|
| 785 |
+
|
| 786 |
+
def __new__(cls, name, index, number,
|
| 787 |
+
type=None, # pylint: disable=redefined-builtin
|
| 788 |
+
options=None, serialized_options=None, create_key=None):
|
| 789 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 790 |
+
# There is no way we can build a complete EnumValueDescriptor with the
|
| 791 |
+
# given parameters (the name of the Enum is not known, for example).
|
| 792 |
+
# Fortunately generated files just pass it to the EnumDescriptor()
|
| 793 |
+
# constructor, which will ignore it, so returning None is good enough.
|
| 794 |
+
return None
|
| 795 |
+
|
| 796 |
+
def __init__(self, name, index, number,
|
| 797 |
+
type=None, # pylint: disable=redefined-builtin
|
| 798 |
+
options=None, serialized_options=None, create_key=None):
|
| 799 |
+
"""Arguments are as described in the attribute description above."""
|
| 800 |
+
if create_key is not _internal_create_key:
|
| 801 |
+
_Deprecated('EnumValueDescriptor')
|
| 802 |
+
|
| 803 |
+
super(EnumValueDescriptor, self).__init__(
|
| 804 |
+
type.file if type else None,
|
| 805 |
+
options,
|
| 806 |
+
serialized_options,
|
| 807 |
+
'EnumValueOptions',
|
| 808 |
+
)
|
| 809 |
+
self.name = name
|
| 810 |
+
self.index = index
|
| 811 |
+
self.number = number
|
| 812 |
+
self.type = type
|
| 813 |
+
|
| 814 |
+
|
| 815 |
+
class OneofDescriptor(DescriptorBase):
|
| 816 |
+
"""Descriptor for a oneof field.
|
| 817 |
+
|
| 818 |
+
Attributes:
|
| 819 |
+
name (str): Name of the oneof field.
|
| 820 |
+
full_name (str): Full name of the oneof field, including package name.
|
| 821 |
+
index (int): 0-based index giving the order of the oneof field inside
|
| 822 |
+
its containing type.
|
| 823 |
+
containing_type (Descriptor): :class:`Descriptor` of the protocol message
|
| 824 |
+
type that contains this field. Set by the :class:`Descriptor` constructor
|
| 825 |
+
if we're passed into one.
|
| 826 |
+
fields (list[FieldDescriptor]): The list of field descriptors this
|
| 827 |
+
oneof can contain.
|
| 828 |
+
"""
|
| 829 |
+
|
| 830 |
+
if _USE_C_DESCRIPTORS:
|
| 831 |
+
_C_DESCRIPTOR_CLASS = _message.OneofDescriptor
|
| 832 |
+
|
| 833 |
+
def __new__(
|
| 834 |
+
cls, name, full_name, index, containing_type, fields, options=None,
|
| 835 |
+
serialized_options=None, create_key=None):
|
| 836 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 837 |
+
return _message.default_pool.FindOneofByName(full_name)
|
| 838 |
+
|
| 839 |
+
def __init__(
|
| 840 |
+
self, name, full_name, index, containing_type, fields, options=None,
|
| 841 |
+
serialized_options=None, create_key=None):
|
| 842 |
+
"""Arguments are as described in the attribute description above."""
|
| 843 |
+
if create_key is not _internal_create_key:
|
| 844 |
+
_Deprecated('OneofDescriptor')
|
| 845 |
+
|
| 846 |
+
super(OneofDescriptor, self).__init__(
|
| 847 |
+
containing_type.file if containing_type else None,
|
| 848 |
+
options,
|
| 849 |
+
serialized_options,
|
| 850 |
+
'OneofOptions',
|
| 851 |
+
)
|
| 852 |
+
self.name = name
|
| 853 |
+
self.full_name = full_name
|
| 854 |
+
self.index = index
|
| 855 |
+
self.containing_type = containing_type
|
| 856 |
+
self.fields = fields
|
| 857 |
+
|
| 858 |
+
|
| 859 |
+
class ServiceDescriptor(_NestedDescriptorBase):
|
| 860 |
+
|
| 861 |
+
"""Descriptor for a service.
|
| 862 |
+
|
| 863 |
+
Attributes:
|
| 864 |
+
name (str): Name of the service.
|
| 865 |
+
full_name (str): Full name of the service, including package name.
|
| 866 |
+
index (int): 0-indexed index giving the order that this services
|
| 867 |
+
definition appears within the .proto file.
|
| 868 |
+
methods (list[MethodDescriptor]): List of methods provided by this
|
| 869 |
+
service.
|
| 870 |
+
methods_by_name (dict(str, MethodDescriptor)): Same
|
| 871 |
+
:class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but
|
| 872 |
+
indexed by "name" attribute in each :class:`MethodDescriptor`.
|
| 873 |
+
options (descriptor_pb2.ServiceOptions): Service options message or
|
| 874 |
+
None to use default service options.
|
| 875 |
+
file (FileDescriptor): Reference to file info.
|
| 876 |
+
"""
|
| 877 |
+
|
| 878 |
+
if _USE_C_DESCRIPTORS:
|
| 879 |
+
_C_DESCRIPTOR_CLASS = _message.ServiceDescriptor
|
| 880 |
+
|
| 881 |
+
def __new__(
|
| 882 |
+
cls,
|
| 883 |
+
name=None,
|
| 884 |
+
full_name=None,
|
| 885 |
+
index=None,
|
| 886 |
+
methods=None,
|
| 887 |
+
options=None,
|
| 888 |
+
serialized_options=None,
|
| 889 |
+
file=None, # pylint: disable=redefined-builtin
|
| 890 |
+
serialized_start=None,
|
| 891 |
+
serialized_end=None,
|
| 892 |
+
create_key=None):
|
| 893 |
+
_message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
|
| 894 |
+
return _message.default_pool.FindServiceByName(full_name)
|
| 895 |
+
|
| 896 |
+
def __init__(self, name, full_name, index, methods, options=None,
|
| 897 |
+
serialized_options=None, file=None, # pylint: disable=redefined-builtin
|
| 898 |
+
serialized_start=None, serialized_end=None, create_key=None):
|
| 899 |
+
if create_key is not _internal_create_key:
|
| 900 |
+
_Deprecated('ServiceDescriptor')
|
| 901 |
+
|
| 902 |
+
super(ServiceDescriptor, self).__init__(
|
| 903 |
+
options, 'ServiceOptions', name, full_name, file,
|
| 904 |
+
None, serialized_start=serialized_start,
|
| 905 |
+
serialized_end=serialized_end, serialized_options=serialized_options)
|
| 906 |
+
self.index = index
|
| 907 |
+
self.methods = methods
|
| 908 |
+
self.methods_by_name = dict((m.name, m) for m in methods)
|
| 909 |
+
# Set the containing service for each method in this service.
|
| 910 |
+
for method in self.methods:
|
| 911 |
+
method.file = self.file
|
| 912 |
+
method.containing_service = self
|
| 913 |
+
|
| 914 |
+
def FindMethodByName(self, name):
|
| 915 |
+
"""Searches for the specified method, and returns its descriptor.
|
| 916 |
+
|
| 917 |
+
Args:
|
| 918 |
+
name (str): Name of the method.
|
| 919 |
+
|
| 920 |
+
Returns:
|
| 921 |
+
MethodDescriptor: The descriptor for the requested method.
|
| 922 |
+
|
| 923 |
+
Raises:
|
| 924 |
+
KeyError: if the method cannot be found in the service.
|
| 925 |
+
"""
|
| 926 |
+
return self.methods_by_name[name]
|
| 927 |
+
|
| 928 |
+
def CopyToProto(self, proto):
|
| 929 |
+
"""Copies this to a descriptor_pb2.ServiceDescriptorProto.
|
| 930 |
+
|
| 931 |
+
Args:
|
| 932 |
+
proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto.
|
| 933 |
+
"""
|
| 934 |
+
# This function is overridden to give a better doc comment.
|
| 935 |
+
super(ServiceDescriptor, self).CopyToProto(proto)
|
| 936 |
+
|
| 937 |
+
|
| 938 |
+
class MethodDescriptor(DescriptorBase):
|
| 939 |
+
|
| 940 |
+
"""Descriptor for a method in a service.
|
| 941 |
+
|
| 942 |
+
Attributes:
|
| 943 |
+
name (str): Name of the method within the service.
|
| 944 |
+
full_name (str): Full name of method.
|
| 945 |
+
index (int): 0-indexed index of the method inside the service.
|
| 946 |
+
containing_service (ServiceDescriptor): The service that contains this
|
| 947 |
+
method.
|
| 948 |
+
input_type (Descriptor): The descriptor of the message that this method
|
| 949 |
+
accepts.
|
| 950 |
+
output_type (Descriptor): The descriptor of the message that this method
|
| 951 |
+
returns.
|
| 952 |
+
client_streaming (bool): Whether this method uses client streaming.
|
| 953 |
+
server_streaming (bool): Whether this method uses server streaming.
|
| 954 |
+
options (descriptor_pb2.MethodOptions or None): Method options message, or
|
| 955 |
+
None to use default method options.
|
| 956 |
+
"""
|
| 957 |
+
|
| 958 |
+
if _USE_C_DESCRIPTORS:
|
| 959 |
+
_C_DESCRIPTOR_CLASS = _message.MethodDescriptor
|
| 960 |
+
|
| 961 |
+
def __new__(cls,
|
| 962 |
+
name,
|
| 963 |
+
full_name,
|
| 964 |
+
index,
|
| 965 |
+
containing_service,
|
| 966 |
+
input_type,
|
| 967 |
+
output_type,
|
| 968 |
+
client_streaming=False,
|
| 969 |
+
server_streaming=False,
|
| 970 |
+
options=None,
|
| 971 |
+
serialized_options=None,
|
| 972 |
+
create_key=None):
|
| 973 |
+
_message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
|
| 974 |
+
return _message.default_pool.FindMethodByName(full_name)
|
| 975 |
+
|
| 976 |
+
def __init__(self,
|
| 977 |
+
name,
|
| 978 |
+
full_name,
|
| 979 |
+
index,
|
| 980 |
+
containing_service,
|
| 981 |
+
input_type,
|
| 982 |
+
output_type,
|
| 983 |
+
client_streaming=False,
|
| 984 |
+
server_streaming=False,
|
| 985 |
+
options=None,
|
| 986 |
+
serialized_options=None,
|
| 987 |
+
create_key=None):
|
| 988 |
+
"""The arguments are as described in the description of MethodDescriptor
|
| 989 |
+
attributes above.
|
| 990 |
+
|
| 991 |
+
Note that containing_service may be None, and may be set later if necessary.
|
| 992 |
+
"""
|
| 993 |
+
if create_key is not _internal_create_key:
|
| 994 |
+
_Deprecated('MethodDescriptor')
|
| 995 |
+
|
| 996 |
+
super(MethodDescriptor, self).__init__(
|
| 997 |
+
containing_service.file if containing_service else None,
|
| 998 |
+
options,
|
| 999 |
+
serialized_options,
|
| 1000 |
+
'MethodOptions',
|
| 1001 |
+
)
|
| 1002 |
+
self.name = name
|
| 1003 |
+
self.full_name = full_name
|
| 1004 |
+
self.index = index
|
| 1005 |
+
self.containing_service = containing_service
|
| 1006 |
+
self.input_type = input_type
|
| 1007 |
+
self.output_type = output_type
|
| 1008 |
+
self.client_streaming = client_streaming
|
| 1009 |
+
self.server_streaming = server_streaming
|
| 1010 |
+
|
| 1011 |
+
def CopyToProto(self, proto):
|
| 1012 |
+
"""Copies this to a descriptor_pb2.MethodDescriptorProto.
|
| 1013 |
+
|
| 1014 |
+
Args:
|
| 1015 |
+
proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto.
|
| 1016 |
+
|
| 1017 |
+
Raises:
|
| 1018 |
+
Error: If self couldn't be serialized, due to too few constructor
|
| 1019 |
+
arguments.
|
| 1020 |
+
"""
|
| 1021 |
+
if self.containing_service is not None:
|
| 1022 |
+
from google.protobuf import descriptor_pb2
|
| 1023 |
+
service_proto = descriptor_pb2.ServiceDescriptorProto()
|
| 1024 |
+
self.containing_service.CopyToProto(service_proto)
|
| 1025 |
+
proto.CopyFrom(service_proto.method[self.index])
|
| 1026 |
+
else:
|
| 1027 |
+
raise Error('Descriptor does not contain a service.')
|
| 1028 |
+
|
| 1029 |
+
|
| 1030 |
+
class FileDescriptor(DescriptorBase):
|
| 1031 |
+
"""Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
|
| 1032 |
+
|
| 1033 |
+
Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and
|
| 1034 |
+
:attr:`dependencies` fields are only set by the
|
| 1035 |
+
:py:mod:`google.protobuf.message_factory` module, and not by the generated
|
| 1036 |
+
proto code.
|
| 1037 |
+
|
| 1038 |
+
Attributes:
|
| 1039 |
+
name (str): Name of file, relative to root of source tree.
|
| 1040 |
+
package (str): Name of the package
|
| 1041 |
+
syntax (str): string indicating syntax of the file (can be "proto2" or
|
| 1042 |
+
"proto3")
|
| 1043 |
+
serialized_pb (bytes): Byte string of serialized
|
| 1044 |
+
:class:`descriptor_pb2.FileDescriptorProto`.
|
| 1045 |
+
dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor`
|
| 1046 |
+
objects this :class:`FileDescriptor` depends on.
|
| 1047 |
+
public_dependencies (list[FileDescriptor]): A subset of
|
| 1048 |
+
:attr:`dependencies`, which were declared as "public".
|
| 1049 |
+
message_types_by_name (dict(str, Descriptor)): Mapping from message names
|
| 1050 |
+
to their :class:`Descriptor`.
|
| 1051 |
+
enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to
|
| 1052 |
+
their :class:`EnumDescriptor`.
|
| 1053 |
+
extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension
|
| 1054 |
+
names declared at file scope to their :class:`FieldDescriptor`.
|
| 1055 |
+
services_by_name (dict(str, ServiceDescriptor)): Mapping from services'
|
| 1056 |
+
names to their :class:`ServiceDescriptor`.
|
| 1057 |
+
pool (DescriptorPool): The pool this descriptor belongs to. When not
|
| 1058 |
+
passed to the constructor, the global default pool is used.
|
| 1059 |
+
"""
|
| 1060 |
+
|
| 1061 |
+
if _USE_C_DESCRIPTORS:
|
| 1062 |
+
_C_DESCRIPTOR_CLASS = _message.FileDescriptor
|
| 1063 |
+
|
| 1064 |
+
def __new__(cls, name, package, options=None,
|
| 1065 |
+
serialized_options=None, serialized_pb=None,
|
| 1066 |
+
dependencies=None, public_dependencies=None,
|
| 1067 |
+
syntax=None, pool=None, create_key=None):
|
| 1068 |
+
# FileDescriptor() is called from various places, not only from generated
|
| 1069 |
+
# files, to register dynamic proto files and messages.
|
| 1070 |
+
# pylint: disable=g-explicit-bool-comparison
|
| 1071 |
+
if serialized_pb:
|
| 1072 |
+
return _message.default_pool.AddSerializedFile(serialized_pb)
|
| 1073 |
+
else:
|
| 1074 |
+
return super(FileDescriptor, cls).__new__(cls)
|
| 1075 |
+
|
| 1076 |
+
def __init__(self, name, package, options=None,
|
| 1077 |
+
serialized_options=None, serialized_pb=None,
|
| 1078 |
+
dependencies=None, public_dependencies=None,
|
| 1079 |
+
syntax=None, pool=None, create_key=None):
|
| 1080 |
+
"""Constructor."""
|
| 1081 |
+
if create_key is not _internal_create_key:
|
| 1082 |
+
_Deprecated('FileDescriptor')
|
| 1083 |
+
|
| 1084 |
+
super(FileDescriptor, self).__init__(
|
| 1085 |
+
None, options, serialized_options, 'FileOptions'
|
| 1086 |
+
)
|
| 1087 |
+
|
| 1088 |
+
if pool is None:
|
| 1089 |
+
from google.protobuf import descriptor_pool
|
| 1090 |
+
pool = descriptor_pool.Default()
|
| 1091 |
+
self.pool = pool
|
| 1092 |
+
self.message_types_by_name = {}
|
| 1093 |
+
self.name = name
|
| 1094 |
+
self.package = package
|
| 1095 |
+
self._deprecated_syntax = syntax or "proto2"
|
| 1096 |
+
self.serialized_pb = serialized_pb
|
| 1097 |
+
|
| 1098 |
+
self.enum_types_by_name = {}
|
| 1099 |
+
self.extensions_by_name = {}
|
| 1100 |
+
self.services_by_name = {}
|
| 1101 |
+
self.dependencies = (dependencies or [])
|
| 1102 |
+
self.public_dependencies = (public_dependencies or [])
|
| 1103 |
+
|
| 1104 |
+
@property
|
| 1105 |
+
def syntax(self):
|
| 1106 |
+
warnings.warn(
|
| 1107 |
+
'descriptor.syntax is deprecated. It will be removed'
|
| 1108 |
+
' soon. Most usages are checking field descriptors. Consider to use'
|
| 1109 |
+
' has_presence, is_packed on field descriptors.'
|
| 1110 |
+
)
|
| 1111 |
+
return self._deprecated_syntax
|
| 1112 |
+
|
| 1113 |
+
def CopyToProto(self, proto):
|
| 1114 |
+
"""Copies this to a descriptor_pb2.FileDescriptorProto.
|
| 1115 |
+
|
| 1116 |
+
Args:
|
| 1117 |
+
proto: An empty descriptor_pb2.FileDescriptorProto.
|
| 1118 |
+
"""
|
| 1119 |
+
proto.ParseFromString(self.serialized_pb)
|
| 1120 |
+
|
| 1121 |
+
|
| 1122 |
+
def _ParseOptions(message, string):
|
| 1123 |
+
"""Parses serialized options.
|
| 1124 |
+
|
| 1125 |
+
This helper function is used to parse serialized options in generated
|
| 1126 |
+
proto2 files. It must not be used outside proto2.
|
| 1127 |
+
"""
|
| 1128 |
+
message.ParseFromString(string)
|
| 1129 |
+
return message
|
| 1130 |
+
|
| 1131 |
+
|
| 1132 |
+
def _ToCamelCase(name):
|
| 1133 |
+
"""Converts name to camel-case and returns it."""
|
| 1134 |
+
capitalize_next = False
|
| 1135 |
+
result = []
|
| 1136 |
+
|
| 1137 |
+
for c in name:
|
| 1138 |
+
if c == '_':
|
| 1139 |
+
if result:
|
| 1140 |
+
capitalize_next = True
|
| 1141 |
+
elif capitalize_next:
|
| 1142 |
+
result.append(c.upper())
|
| 1143 |
+
capitalize_next = False
|
| 1144 |
+
else:
|
| 1145 |
+
result += c
|
| 1146 |
+
|
| 1147 |
+
# Lower-case the first letter.
|
| 1148 |
+
if result and result[0].isupper():
|
| 1149 |
+
result[0] = result[0].lower()
|
| 1150 |
+
return ''.join(result)
|
| 1151 |
+
|
| 1152 |
+
|
| 1153 |
+
def _OptionsOrNone(descriptor_proto):
|
| 1154 |
+
"""Returns the value of the field `options`, or None if it is not set."""
|
| 1155 |
+
if descriptor_proto.HasField('options'):
|
| 1156 |
+
return descriptor_proto.options
|
| 1157 |
+
else:
|
| 1158 |
+
return None
|
| 1159 |
+
|
| 1160 |
+
|
| 1161 |
+
def _ToJsonName(name):
|
| 1162 |
+
"""Converts name to Json name and returns it."""
|
| 1163 |
+
capitalize_next = False
|
| 1164 |
+
result = []
|
| 1165 |
+
|
| 1166 |
+
for c in name:
|
| 1167 |
+
if c == '_':
|
| 1168 |
+
capitalize_next = True
|
| 1169 |
+
elif capitalize_next:
|
| 1170 |
+
result.append(c.upper())
|
| 1171 |
+
capitalize_next = False
|
| 1172 |
+
else:
|
| 1173 |
+
result += c
|
| 1174 |
+
|
| 1175 |
+
return ''.join(result)
|
| 1176 |
+
|
| 1177 |
+
|
| 1178 |
+
def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
|
| 1179 |
+
syntax=None):
|
| 1180 |
+
"""Make a protobuf Descriptor given a DescriptorProto protobuf.
|
| 1181 |
+
|
| 1182 |
+
Handles nested descriptors. Note that this is limited to the scope of defining
|
| 1183 |
+
a message inside of another message. Composite fields can currently only be
|
| 1184 |
+
resolved if the message is defined in the same scope as the field.
|
| 1185 |
+
|
| 1186 |
+
Args:
|
| 1187 |
+
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
|
| 1188 |
+
package: Optional package name for the new message Descriptor (string).
|
| 1189 |
+
build_file_if_cpp: Update the C++ descriptor pool if api matches.
|
| 1190 |
+
Set to False on recursion, so no duplicates are created.
|
| 1191 |
+
syntax: The syntax/semantics that should be used. Set to "proto3" to get
|
| 1192 |
+
proto3 field presence semantics.
|
| 1193 |
+
Returns:
|
| 1194 |
+
A Descriptor for protobuf messages.
|
| 1195 |
+
"""
|
| 1196 |
+
if api_implementation.Type() != 'python' and build_file_if_cpp:
|
| 1197 |
+
# The C++ implementation requires all descriptors to be backed by the same
|
| 1198 |
+
# definition in the C++ descriptor pool. To do this, we build a
|
| 1199 |
+
# FileDescriptorProto with the same definition as this descriptor and build
|
| 1200 |
+
# it into the pool.
|
| 1201 |
+
from google.protobuf import descriptor_pb2
|
| 1202 |
+
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
|
| 1203 |
+
file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
|
| 1204 |
+
|
| 1205 |
+
# Generate a random name for this proto file to prevent conflicts with any
|
| 1206 |
+
# imported ones. We need to specify a file name so the descriptor pool
|
| 1207 |
+
# accepts our FileDescriptorProto, but it is not important what that file
|
| 1208 |
+
# name is actually set to.
|
| 1209 |
+
proto_name = binascii.hexlify(os.urandom(16)).decode('ascii')
|
| 1210 |
+
|
| 1211 |
+
if package:
|
| 1212 |
+
file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
|
| 1213 |
+
proto_name + '.proto')
|
| 1214 |
+
file_descriptor_proto.package = package
|
| 1215 |
+
else:
|
| 1216 |
+
file_descriptor_proto.name = proto_name + '.proto'
|
| 1217 |
+
|
| 1218 |
+
_message.default_pool.Add(file_descriptor_proto)
|
| 1219 |
+
result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
|
| 1220 |
+
|
| 1221 |
+
if _USE_C_DESCRIPTORS:
|
| 1222 |
+
return result.message_types_by_name[desc_proto.name]
|
| 1223 |
+
|
| 1224 |
+
full_message_name = [desc_proto.name]
|
| 1225 |
+
if package: full_message_name.insert(0, package)
|
| 1226 |
+
|
| 1227 |
+
# Create Descriptors for enum types
|
| 1228 |
+
enum_types = {}
|
| 1229 |
+
for enum_proto in desc_proto.enum_type:
|
| 1230 |
+
full_name = '.'.join(full_message_name + [enum_proto.name])
|
| 1231 |
+
enum_desc = EnumDescriptor(
|
| 1232 |
+
enum_proto.name, full_name, None, [
|
| 1233 |
+
EnumValueDescriptor(enum_val.name, ii, enum_val.number,
|
| 1234 |
+
create_key=_internal_create_key)
|
| 1235 |
+
for ii, enum_val in enumerate(enum_proto.value)],
|
| 1236 |
+
create_key=_internal_create_key)
|
| 1237 |
+
enum_types[full_name] = enum_desc
|
| 1238 |
+
|
| 1239 |
+
# Create Descriptors for nested types
|
| 1240 |
+
nested_types = {}
|
| 1241 |
+
for nested_proto in desc_proto.nested_type:
|
| 1242 |
+
full_name = '.'.join(full_message_name + [nested_proto.name])
|
| 1243 |
+
# Nested types are just those defined inside of the message, not all types
|
| 1244 |
+
# used by fields in the message, so no loops are possible here.
|
| 1245 |
+
nested_desc = MakeDescriptor(nested_proto,
|
| 1246 |
+
package='.'.join(full_message_name),
|
| 1247 |
+
build_file_if_cpp=False,
|
| 1248 |
+
syntax=syntax)
|
| 1249 |
+
nested_types[full_name] = nested_desc
|
| 1250 |
+
|
| 1251 |
+
fields = []
|
| 1252 |
+
for field_proto in desc_proto.field:
|
| 1253 |
+
full_name = '.'.join(full_message_name + [field_proto.name])
|
| 1254 |
+
enum_desc = None
|
| 1255 |
+
nested_desc = None
|
| 1256 |
+
if field_proto.json_name:
|
| 1257 |
+
json_name = field_proto.json_name
|
| 1258 |
+
else:
|
| 1259 |
+
json_name = None
|
| 1260 |
+
if field_proto.HasField('type_name'):
|
| 1261 |
+
type_name = field_proto.type_name
|
| 1262 |
+
full_type_name = '.'.join(full_message_name +
|
| 1263 |
+
[type_name[type_name.rfind('.')+1:]])
|
| 1264 |
+
if full_type_name in nested_types:
|
| 1265 |
+
nested_desc = nested_types[full_type_name]
|
| 1266 |
+
elif full_type_name in enum_types:
|
| 1267 |
+
enum_desc = enum_types[full_type_name]
|
| 1268 |
+
# Else type_name references a non-local type, which isn't implemented
|
| 1269 |
+
field = FieldDescriptor(
|
| 1270 |
+
field_proto.name, full_name, field_proto.number - 1,
|
| 1271 |
+
field_proto.number, field_proto.type,
|
| 1272 |
+
FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
|
| 1273 |
+
field_proto.label, None, nested_desc, enum_desc, None, False, None,
|
| 1274 |
+
options=_OptionsOrNone(field_proto), has_default_value=False,
|
| 1275 |
+
json_name=json_name, create_key=_internal_create_key)
|
| 1276 |
+
fields.append(field)
|
| 1277 |
+
|
| 1278 |
+
desc_name = '.'.join(full_message_name)
|
| 1279 |
+
return Descriptor(desc_proto.name, desc_name, None, None, fields,
|
| 1280 |
+
list(nested_types.values()), list(enum_types.values()), [],
|
| 1281 |
+
options=_OptionsOrNone(desc_proto),
|
| 1282 |
+
create_key=_internal_create_key)
|
lib/python3.10/site-packages/google/protobuf/descriptor_database.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Provides a container for DescriptorProtos."""
|
| 9 |
+
|
| 10 |
+
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
| 11 |
+
|
| 12 |
+
import warnings
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class Error(Exception):
|
| 16 |
+
pass
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class DescriptorDatabaseConflictingDefinitionError(Error):
|
| 20 |
+
"""Raised when a proto is added with the same name & different descriptor."""
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class DescriptorDatabase(object):
|
| 24 |
+
"""A container accepting FileDescriptorProtos and maps DescriptorProtos."""
|
| 25 |
+
|
| 26 |
+
def __init__(self):
|
| 27 |
+
self._file_desc_protos_by_file = {}
|
| 28 |
+
self._file_desc_protos_by_symbol = {}
|
| 29 |
+
|
| 30 |
+
def Add(self, file_desc_proto):
|
| 31 |
+
"""Adds the FileDescriptorProto and its types to this database.
|
| 32 |
+
|
| 33 |
+
Args:
|
| 34 |
+
file_desc_proto: The FileDescriptorProto to add.
|
| 35 |
+
Raises:
|
| 36 |
+
DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
|
| 37 |
+
add a proto with the same name but different definition than an
|
| 38 |
+
existing proto in the database.
|
| 39 |
+
"""
|
| 40 |
+
proto_name = file_desc_proto.name
|
| 41 |
+
if proto_name not in self._file_desc_protos_by_file:
|
| 42 |
+
self._file_desc_protos_by_file[proto_name] = file_desc_proto
|
| 43 |
+
elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
|
| 44 |
+
raise DescriptorDatabaseConflictingDefinitionError(
|
| 45 |
+
'%s already added, but with different descriptor.' % proto_name)
|
| 46 |
+
else:
|
| 47 |
+
return
|
| 48 |
+
|
| 49 |
+
# Add all the top-level descriptors to the index.
|
| 50 |
+
package = file_desc_proto.package
|
| 51 |
+
for message in file_desc_proto.message_type:
|
| 52 |
+
for name in _ExtractSymbols(message, package):
|
| 53 |
+
self._AddSymbol(name, file_desc_proto)
|
| 54 |
+
for enum in file_desc_proto.enum_type:
|
| 55 |
+
self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto)
|
| 56 |
+
for enum_value in enum.value:
|
| 57 |
+
self._file_desc_protos_by_symbol[
|
| 58 |
+
'.'.join((package, enum_value.name))] = file_desc_proto
|
| 59 |
+
for extension in file_desc_proto.extension:
|
| 60 |
+
self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto)
|
| 61 |
+
for service in file_desc_proto.service:
|
| 62 |
+
self._AddSymbol(('.'.join((package, service.name))), file_desc_proto)
|
| 63 |
+
|
| 64 |
+
def FindFileByName(self, name):
|
| 65 |
+
"""Finds the file descriptor proto by file name.
|
| 66 |
+
|
| 67 |
+
Typically the file name is a relative path ending to a .proto file. The
|
| 68 |
+
proto with the given name will have to have been added to this database
|
| 69 |
+
using the Add method or else an error will be raised.
|
| 70 |
+
|
| 71 |
+
Args:
|
| 72 |
+
name: The file name to find.
|
| 73 |
+
|
| 74 |
+
Returns:
|
| 75 |
+
The file descriptor proto matching the name.
|
| 76 |
+
|
| 77 |
+
Raises:
|
| 78 |
+
KeyError if no file by the given name was added.
|
| 79 |
+
"""
|
| 80 |
+
|
| 81 |
+
return self._file_desc_protos_by_file[name]
|
| 82 |
+
|
| 83 |
+
def FindFileContainingSymbol(self, symbol):
|
| 84 |
+
"""Finds the file descriptor proto containing the specified symbol.
|
| 85 |
+
|
| 86 |
+
The symbol should be a fully qualified name including the file descriptor's
|
| 87 |
+
package and any containing messages. Some examples:
|
| 88 |
+
|
| 89 |
+
'some.package.name.Message'
|
| 90 |
+
'some.package.name.Message.NestedEnum'
|
| 91 |
+
'some.package.name.Message.some_field'
|
| 92 |
+
|
| 93 |
+
The file descriptor proto containing the specified symbol must be added to
|
| 94 |
+
this database using the Add method or else an error will be raised.
|
| 95 |
+
|
| 96 |
+
Args:
|
| 97 |
+
symbol: The fully qualified symbol name.
|
| 98 |
+
|
| 99 |
+
Returns:
|
| 100 |
+
The file descriptor proto containing the symbol.
|
| 101 |
+
|
| 102 |
+
Raises:
|
| 103 |
+
KeyError if no file contains the specified symbol.
|
| 104 |
+
"""
|
| 105 |
+
try:
|
| 106 |
+
return self._file_desc_protos_by_symbol[symbol]
|
| 107 |
+
except KeyError:
|
| 108 |
+
# Fields, enum values, and nested extensions are not in
|
| 109 |
+
# _file_desc_protos_by_symbol. Try to find the top level
|
| 110 |
+
# descriptor. Non-existent nested symbol under a valid top level
|
| 111 |
+
# descriptor can also be found. The behavior is the same with
|
| 112 |
+
# protobuf C++.
|
| 113 |
+
top_level, _, _ = symbol.rpartition('.')
|
| 114 |
+
try:
|
| 115 |
+
return self._file_desc_protos_by_symbol[top_level]
|
| 116 |
+
except KeyError:
|
| 117 |
+
# Raise the original symbol as a KeyError for better diagnostics.
|
| 118 |
+
raise KeyError(symbol)
|
| 119 |
+
|
| 120 |
+
def FindFileContainingExtension(self, extendee_name, extension_number):
|
| 121 |
+
# TODO: implement this API.
|
| 122 |
+
return None
|
| 123 |
+
|
| 124 |
+
def FindAllExtensionNumbers(self, extendee_name):
|
| 125 |
+
# TODO: implement this API.
|
| 126 |
+
return []
|
| 127 |
+
|
| 128 |
+
def _AddSymbol(self, name, file_desc_proto):
|
| 129 |
+
if name in self._file_desc_protos_by_symbol:
|
| 130 |
+
warn_msg = ('Conflict register for file "' + file_desc_proto.name +
|
| 131 |
+
'": ' + name +
|
| 132 |
+
' is already defined in file "' +
|
| 133 |
+
self._file_desc_protos_by_symbol[name].name + '"')
|
| 134 |
+
warnings.warn(warn_msg, RuntimeWarning)
|
| 135 |
+
self._file_desc_protos_by_symbol[name] = file_desc_proto
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def _ExtractSymbols(desc_proto, package):
|
| 139 |
+
"""Pulls out all the symbols from a descriptor proto.
|
| 140 |
+
|
| 141 |
+
Args:
|
| 142 |
+
desc_proto: The proto to extract symbols from.
|
| 143 |
+
package: The package containing the descriptor type.
|
| 144 |
+
|
| 145 |
+
Yields:
|
| 146 |
+
The fully qualified name found in the descriptor.
|
| 147 |
+
"""
|
| 148 |
+
message_name = package + '.' + desc_proto.name if package else desc_proto.name
|
| 149 |
+
yield message_name
|
| 150 |
+
for nested_type in desc_proto.nested_type:
|
| 151 |
+
for symbol in _ExtractSymbols(nested_type, message_name):
|
| 152 |
+
yield symbol
|
| 153 |
+
for enum_type in desc_proto.enum_type:
|
| 154 |
+
yield '.'.join((message_name, enum_type.name))
|
lib/python3.10/site-packages/google/protobuf/descriptor_pb2.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
lib/python3.10/site-packages/google/protobuf/descriptor_pool.py
ADDED
|
@@ -0,0 +1,1271 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Provides DescriptorPool to use as a container for proto2 descriptors.
|
| 9 |
+
|
| 10 |
+
The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
|
| 11 |
+
a collection of protocol buffer descriptors for use when dynamically creating
|
| 12 |
+
message types at runtime.
|
| 13 |
+
|
| 14 |
+
For most applications protocol buffers should be used via modules generated by
|
| 15 |
+
the protocol buffer compiler tool. This should only be used when the type of
|
| 16 |
+
protocol buffers used in an application or library cannot be predetermined.
|
| 17 |
+
|
| 18 |
+
Below is a straightforward example on how to use this class::
|
| 19 |
+
|
| 20 |
+
pool = DescriptorPool()
|
| 21 |
+
file_descriptor_protos = [ ... ]
|
| 22 |
+
for file_descriptor_proto in file_descriptor_protos:
|
| 23 |
+
pool.Add(file_descriptor_proto)
|
| 24 |
+
my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
|
| 25 |
+
|
| 26 |
+
The message descriptor can be used in conjunction with the message_factory
|
| 27 |
+
module in order to create a protocol buffer class that can be encoded and
|
| 28 |
+
decoded.
|
| 29 |
+
|
| 30 |
+
If you want to get a Python class for the specified proto, use the
|
| 31 |
+
helper functions inside google.protobuf.message_factory
|
| 32 |
+
directly instead of this class.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
| 36 |
+
|
| 37 |
+
import collections
|
| 38 |
+
import warnings
|
| 39 |
+
|
| 40 |
+
from google.protobuf import descriptor
|
| 41 |
+
from google.protobuf import descriptor_database
|
| 42 |
+
from google.protobuf import text_encoding
|
| 43 |
+
from google.protobuf.internal import python_message
|
| 44 |
+
|
| 45 |
+
_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def _Deprecated(func):
|
| 49 |
+
"""Mark functions as deprecated."""
|
| 50 |
+
|
| 51 |
+
def NewFunc(*args, **kwargs):
|
| 52 |
+
warnings.warn(
|
| 53 |
+
'Call to deprecated function %s(). Note: Do add unlinked descriptors '
|
| 54 |
+
'to descriptor_pool is wrong. Please use Add() or AddSerializedFile() '
|
| 55 |
+
'instead. This function will be removed soon.' % func.__name__,
|
| 56 |
+
category=DeprecationWarning)
|
| 57 |
+
return func(*args, **kwargs)
|
| 58 |
+
NewFunc.__name__ = func.__name__
|
| 59 |
+
NewFunc.__doc__ = func.__doc__
|
| 60 |
+
NewFunc.__dict__.update(func.__dict__)
|
| 61 |
+
return NewFunc
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def _NormalizeFullyQualifiedName(name):
|
| 65 |
+
"""Remove leading period from fully-qualified type name.
|
| 66 |
+
|
| 67 |
+
Due to b/13860351 in descriptor_database.py, types in the root namespace are
|
| 68 |
+
generated with a leading period. This function removes that prefix.
|
| 69 |
+
|
| 70 |
+
Args:
|
| 71 |
+
name (str): The fully-qualified symbol name.
|
| 72 |
+
|
| 73 |
+
Returns:
|
| 74 |
+
str: The normalized fully-qualified symbol name.
|
| 75 |
+
"""
|
| 76 |
+
return name.lstrip('.')
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def _OptionsOrNone(descriptor_proto):
|
| 80 |
+
"""Returns the value of the field `options`, or None if it is not set."""
|
| 81 |
+
if descriptor_proto.HasField('options'):
|
| 82 |
+
return descriptor_proto.options
|
| 83 |
+
else:
|
| 84 |
+
return None
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def _IsMessageSetExtension(field):
|
| 88 |
+
return (field.is_extension and
|
| 89 |
+
field.containing_type.has_options and
|
| 90 |
+
field.containing_type.GetOptions().message_set_wire_format and
|
| 91 |
+
field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
|
| 92 |
+
field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class DescriptorPool(object):
|
| 96 |
+
"""A collection of protobufs dynamically constructed by descriptor protos."""
|
| 97 |
+
|
| 98 |
+
if _USE_C_DESCRIPTORS:
|
| 99 |
+
|
| 100 |
+
def __new__(cls, descriptor_db=None):
|
| 101 |
+
# pylint: disable=protected-access
|
| 102 |
+
return descriptor._message.DescriptorPool(descriptor_db)
|
| 103 |
+
|
| 104 |
+
def __init__(
|
| 105 |
+
self, descriptor_db=None, use_deprecated_legacy_json_field_conflicts=False
|
| 106 |
+
):
|
| 107 |
+
"""Initializes a Pool of proto buffs.
|
| 108 |
+
|
| 109 |
+
The descriptor_db argument to the constructor is provided to allow
|
| 110 |
+
specialized file descriptor proto lookup code to be triggered on demand. An
|
| 111 |
+
example would be an implementation which will read and compile a file
|
| 112 |
+
specified in a call to FindFileByName() and not require the call to Add()
|
| 113 |
+
at all. Results from this database will be cached internally here as well.
|
| 114 |
+
|
| 115 |
+
Args:
|
| 116 |
+
descriptor_db: A secondary source of file descriptors.
|
| 117 |
+
use_deprecated_legacy_json_field_conflicts: Unused, for compatibility with
|
| 118 |
+
C++.
|
| 119 |
+
"""
|
| 120 |
+
|
| 121 |
+
self._internal_db = descriptor_database.DescriptorDatabase()
|
| 122 |
+
self._descriptor_db = descriptor_db
|
| 123 |
+
self._descriptors = {}
|
| 124 |
+
self._enum_descriptors = {}
|
| 125 |
+
self._service_descriptors = {}
|
| 126 |
+
self._file_descriptors = {}
|
| 127 |
+
self._toplevel_extensions = {}
|
| 128 |
+
self._top_enum_values = {}
|
| 129 |
+
# We store extensions in two two-level mappings: The first key is the
|
| 130 |
+
# descriptor of the message being extended, the second key is the extension
|
| 131 |
+
# full name or its tag number.
|
| 132 |
+
self._extensions_by_name = collections.defaultdict(dict)
|
| 133 |
+
self._extensions_by_number = collections.defaultdict(dict)
|
| 134 |
+
|
| 135 |
+
def _CheckConflictRegister(self, desc, desc_name, file_name):
|
| 136 |
+
"""Check if the descriptor name conflicts with another of the same name.
|
| 137 |
+
|
| 138 |
+
Args:
|
| 139 |
+
desc: Descriptor of a message, enum, service, extension or enum value.
|
| 140 |
+
desc_name (str): the full name of desc.
|
| 141 |
+
file_name (str): The file name of descriptor.
|
| 142 |
+
"""
|
| 143 |
+
for register, descriptor_type in [
|
| 144 |
+
(self._descriptors, descriptor.Descriptor),
|
| 145 |
+
(self._enum_descriptors, descriptor.EnumDescriptor),
|
| 146 |
+
(self._service_descriptors, descriptor.ServiceDescriptor),
|
| 147 |
+
(self._toplevel_extensions, descriptor.FieldDescriptor),
|
| 148 |
+
(self._top_enum_values, descriptor.EnumValueDescriptor)]:
|
| 149 |
+
if desc_name in register:
|
| 150 |
+
old_desc = register[desc_name]
|
| 151 |
+
if isinstance(old_desc, descriptor.EnumValueDescriptor):
|
| 152 |
+
old_file = old_desc.type.file.name
|
| 153 |
+
else:
|
| 154 |
+
old_file = old_desc.file.name
|
| 155 |
+
|
| 156 |
+
if not isinstance(desc, descriptor_type) or (
|
| 157 |
+
old_file != file_name):
|
| 158 |
+
error_msg = ('Conflict register for file "' + file_name +
|
| 159 |
+
'": ' + desc_name +
|
| 160 |
+
' is already defined in file "' +
|
| 161 |
+
old_file + '". Please fix the conflict by adding '
|
| 162 |
+
'package name on the proto file, or use different '
|
| 163 |
+
'name for the duplication.')
|
| 164 |
+
if isinstance(desc, descriptor.EnumValueDescriptor):
|
| 165 |
+
error_msg += ('\nNote: enum values appear as '
|
| 166 |
+
'siblings of the enum type instead of '
|
| 167 |
+
'children of it.')
|
| 168 |
+
|
| 169 |
+
raise TypeError(error_msg)
|
| 170 |
+
|
| 171 |
+
return
|
| 172 |
+
|
| 173 |
+
def Add(self, file_desc_proto):
|
| 174 |
+
"""Adds the FileDescriptorProto and its types to this pool.
|
| 175 |
+
|
| 176 |
+
Args:
|
| 177 |
+
file_desc_proto (FileDescriptorProto): The file descriptor to add.
|
| 178 |
+
"""
|
| 179 |
+
|
| 180 |
+
self._internal_db.Add(file_desc_proto)
|
| 181 |
+
|
| 182 |
+
def AddSerializedFile(self, serialized_file_desc_proto):
|
| 183 |
+
"""Adds the FileDescriptorProto and its types to this pool.
|
| 184 |
+
|
| 185 |
+
Args:
|
| 186 |
+
serialized_file_desc_proto (bytes): A bytes string, serialization of the
|
| 187 |
+
:class:`FileDescriptorProto` to add.
|
| 188 |
+
|
| 189 |
+
Returns:
|
| 190 |
+
FileDescriptor: Descriptor for the added file.
|
| 191 |
+
"""
|
| 192 |
+
|
| 193 |
+
# pylint: disable=g-import-not-at-top
|
| 194 |
+
from google.protobuf import descriptor_pb2
|
| 195 |
+
file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
|
| 196 |
+
serialized_file_desc_proto)
|
| 197 |
+
file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto)
|
| 198 |
+
file_desc.serialized_pb = serialized_file_desc_proto
|
| 199 |
+
return file_desc
|
| 200 |
+
|
| 201 |
+
# Add Descriptor to descriptor pool is deprecated. Please use Add()
|
| 202 |
+
# or AddSerializedFile() to add a FileDescriptorProto instead.
|
| 203 |
+
@_Deprecated
|
| 204 |
+
def AddDescriptor(self, desc):
|
| 205 |
+
self._AddDescriptor(desc)
|
| 206 |
+
|
| 207 |
+
# Never call this method. It is for internal usage only.
|
| 208 |
+
def _AddDescriptor(self, desc):
|
| 209 |
+
"""Adds a Descriptor to the pool, non-recursively.
|
| 210 |
+
|
| 211 |
+
If the Descriptor contains nested messages or enums, the caller must
|
| 212 |
+
explicitly register them. This method also registers the FileDescriptor
|
| 213 |
+
associated with the message.
|
| 214 |
+
|
| 215 |
+
Args:
|
| 216 |
+
desc: A Descriptor.
|
| 217 |
+
"""
|
| 218 |
+
if not isinstance(desc, descriptor.Descriptor):
|
| 219 |
+
raise TypeError('Expected instance of descriptor.Descriptor.')
|
| 220 |
+
|
| 221 |
+
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
|
| 222 |
+
|
| 223 |
+
self._descriptors[desc.full_name] = desc
|
| 224 |
+
self._AddFileDescriptor(desc.file)
|
| 225 |
+
|
| 226 |
+
# Never call this method. It is for internal usage only.
|
| 227 |
+
def _AddEnumDescriptor(self, enum_desc):
|
| 228 |
+
"""Adds an EnumDescriptor to the pool.
|
| 229 |
+
|
| 230 |
+
This method also registers the FileDescriptor associated with the enum.
|
| 231 |
+
|
| 232 |
+
Args:
|
| 233 |
+
enum_desc: An EnumDescriptor.
|
| 234 |
+
"""
|
| 235 |
+
|
| 236 |
+
if not isinstance(enum_desc, descriptor.EnumDescriptor):
|
| 237 |
+
raise TypeError('Expected instance of descriptor.EnumDescriptor.')
|
| 238 |
+
|
| 239 |
+
file_name = enum_desc.file.name
|
| 240 |
+
self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name)
|
| 241 |
+
self._enum_descriptors[enum_desc.full_name] = enum_desc
|
| 242 |
+
|
| 243 |
+
# Top enum values need to be indexed.
|
| 244 |
+
# Count the number of dots to see whether the enum is toplevel or nested
|
| 245 |
+
# in a message. We cannot use enum_desc.containing_type at this stage.
|
| 246 |
+
if enum_desc.file.package:
|
| 247 |
+
top_level = (enum_desc.full_name.count('.')
|
| 248 |
+
- enum_desc.file.package.count('.') == 1)
|
| 249 |
+
else:
|
| 250 |
+
top_level = enum_desc.full_name.count('.') == 0
|
| 251 |
+
if top_level:
|
| 252 |
+
file_name = enum_desc.file.name
|
| 253 |
+
package = enum_desc.file.package
|
| 254 |
+
for enum_value in enum_desc.values:
|
| 255 |
+
full_name = _NormalizeFullyQualifiedName(
|
| 256 |
+
'.'.join((package, enum_value.name)))
|
| 257 |
+
self._CheckConflictRegister(enum_value, full_name, file_name)
|
| 258 |
+
self._top_enum_values[full_name] = enum_value
|
| 259 |
+
self._AddFileDescriptor(enum_desc.file)
|
| 260 |
+
|
| 261 |
+
# Add ServiceDescriptor to descriptor pool is deprecated. Please use Add()
|
| 262 |
+
# or AddSerializedFile() to add a FileDescriptorProto instead.
|
| 263 |
+
@_Deprecated
|
| 264 |
+
def AddServiceDescriptor(self, service_desc):
|
| 265 |
+
self._AddServiceDescriptor(service_desc)
|
| 266 |
+
|
| 267 |
+
# Never call this method. It is for internal usage only.
|
| 268 |
+
def _AddServiceDescriptor(self, service_desc):
|
| 269 |
+
"""Adds a ServiceDescriptor to the pool.
|
| 270 |
+
|
| 271 |
+
Args:
|
| 272 |
+
service_desc: A ServiceDescriptor.
|
| 273 |
+
"""
|
| 274 |
+
|
| 275 |
+
if not isinstance(service_desc, descriptor.ServiceDescriptor):
|
| 276 |
+
raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
|
| 277 |
+
|
| 278 |
+
self._CheckConflictRegister(service_desc, service_desc.full_name,
|
| 279 |
+
service_desc.file.name)
|
| 280 |
+
self._service_descriptors[service_desc.full_name] = service_desc
|
| 281 |
+
|
| 282 |
+
# Add ExtensionDescriptor to descriptor pool is deprecated. Please use Add()
|
| 283 |
+
# or AddSerializedFile() to add a FileDescriptorProto instead.
|
| 284 |
+
@_Deprecated
|
| 285 |
+
def AddExtensionDescriptor(self, extension):
|
| 286 |
+
self._AddExtensionDescriptor(extension)
|
| 287 |
+
|
| 288 |
+
# Never call this method. It is for internal usage only.
|
| 289 |
+
def _AddExtensionDescriptor(self, extension):
|
| 290 |
+
"""Adds a FieldDescriptor describing an extension to the pool.
|
| 291 |
+
|
| 292 |
+
Args:
|
| 293 |
+
extension: A FieldDescriptor.
|
| 294 |
+
|
| 295 |
+
Raises:
|
| 296 |
+
AssertionError: when another extension with the same number extends the
|
| 297 |
+
same message.
|
| 298 |
+
TypeError: when the specified extension is not a
|
| 299 |
+
descriptor.FieldDescriptor.
|
| 300 |
+
"""
|
| 301 |
+
if not (isinstance(extension, descriptor.FieldDescriptor) and
|
| 302 |
+
extension.is_extension):
|
| 303 |
+
raise TypeError('Expected an extension descriptor.')
|
| 304 |
+
|
| 305 |
+
if extension.extension_scope is None:
|
| 306 |
+
self._CheckConflictRegister(
|
| 307 |
+
extension, extension.full_name, extension.file.name)
|
| 308 |
+
self._toplevel_extensions[extension.full_name] = extension
|
| 309 |
+
|
| 310 |
+
try:
|
| 311 |
+
existing_desc = self._extensions_by_number[
|
| 312 |
+
extension.containing_type][extension.number]
|
| 313 |
+
except KeyError:
|
| 314 |
+
pass
|
| 315 |
+
else:
|
| 316 |
+
if extension is not existing_desc:
|
| 317 |
+
raise AssertionError(
|
| 318 |
+
'Extensions "%s" and "%s" both try to extend message type "%s" '
|
| 319 |
+
'with field number %d.' %
|
| 320 |
+
(extension.full_name, existing_desc.full_name,
|
| 321 |
+
extension.containing_type.full_name, extension.number))
|
| 322 |
+
|
| 323 |
+
self._extensions_by_number[extension.containing_type][
|
| 324 |
+
extension.number] = extension
|
| 325 |
+
self._extensions_by_name[extension.containing_type][
|
| 326 |
+
extension.full_name] = extension
|
| 327 |
+
|
| 328 |
+
# Also register MessageSet extensions with the type name.
|
| 329 |
+
if _IsMessageSetExtension(extension):
|
| 330 |
+
self._extensions_by_name[extension.containing_type][
|
| 331 |
+
extension.message_type.full_name] = extension
|
| 332 |
+
|
| 333 |
+
if hasattr(extension.containing_type, '_concrete_class'):
|
| 334 |
+
python_message._AttachFieldHelpers(
|
| 335 |
+
extension.containing_type._concrete_class, extension)
|
| 336 |
+
|
| 337 |
+
@_Deprecated
|
| 338 |
+
def AddFileDescriptor(self, file_desc):
|
| 339 |
+
self._InternalAddFileDescriptor(file_desc)
|
| 340 |
+
|
| 341 |
+
# Never call this method. It is for internal usage only.
|
| 342 |
+
def _InternalAddFileDescriptor(self, file_desc):
|
| 343 |
+
"""Adds a FileDescriptor to the pool, non-recursively.
|
| 344 |
+
|
| 345 |
+
If the FileDescriptor contains messages or enums, the caller must explicitly
|
| 346 |
+
register them.
|
| 347 |
+
|
| 348 |
+
Args:
|
| 349 |
+
file_desc: A FileDescriptor.
|
| 350 |
+
"""
|
| 351 |
+
|
| 352 |
+
self._AddFileDescriptor(file_desc)
|
| 353 |
+
|
| 354 |
+
def _AddFileDescriptor(self, file_desc):
|
| 355 |
+
"""Adds a FileDescriptor to the pool, non-recursively.
|
| 356 |
+
|
| 357 |
+
If the FileDescriptor contains messages or enums, the caller must explicitly
|
| 358 |
+
register them.
|
| 359 |
+
|
| 360 |
+
Args:
|
| 361 |
+
file_desc: A FileDescriptor.
|
| 362 |
+
"""
|
| 363 |
+
|
| 364 |
+
if not isinstance(file_desc, descriptor.FileDescriptor):
|
| 365 |
+
raise TypeError('Expected instance of descriptor.FileDescriptor.')
|
| 366 |
+
self._file_descriptors[file_desc.name] = file_desc
|
| 367 |
+
|
| 368 |
+
def FindFileByName(self, file_name):
|
| 369 |
+
"""Gets a FileDescriptor by file name.
|
| 370 |
+
|
| 371 |
+
Args:
|
| 372 |
+
file_name (str): The path to the file to get a descriptor for.
|
| 373 |
+
|
| 374 |
+
Returns:
|
| 375 |
+
FileDescriptor: The descriptor for the named file.
|
| 376 |
+
|
| 377 |
+
Raises:
|
| 378 |
+
KeyError: if the file cannot be found in the pool.
|
| 379 |
+
"""
|
| 380 |
+
|
| 381 |
+
try:
|
| 382 |
+
return self._file_descriptors[file_name]
|
| 383 |
+
except KeyError:
|
| 384 |
+
pass
|
| 385 |
+
|
| 386 |
+
try:
|
| 387 |
+
file_proto = self._internal_db.FindFileByName(file_name)
|
| 388 |
+
except KeyError as error:
|
| 389 |
+
if self._descriptor_db:
|
| 390 |
+
file_proto = self._descriptor_db.FindFileByName(file_name)
|
| 391 |
+
else:
|
| 392 |
+
raise error
|
| 393 |
+
if not file_proto:
|
| 394 |
+
raise KeyError('Cannot find a file named %s' % file_name)
|
| 395 |
+
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
| 396 |
+
|
| 397 |
+
def FindFileContainingSymbol(self, symbol):
|
| 398 |
+
"""Gets the FileDescriptor for the file containing the specified symbol.
|
| 399 |
+
|
| 400 |
+
Args:
|
| 401 |
+
symbol (str): The name of the symbol to search for.
|
| 402 |
+
|
| 403 |
+
Returns:
|
| 404 |
+
FileDescriptor: Descriptor for the file that contains the specified
|
| 405 |
+
symbol.
|
| 406 |
+
|
| 407 |
+
Raises:
|
| 408 |
+
KeyError: if the file cannot be found in the pool.
|
| 409 |
+
"""
|
| 410 |
+
|
| 411 |
+
symbol = _NormalizeFullyQualifiedName(symbol)
|
| 412 |
+
try:
|
| 413 |
+
return self._InternalFindFileContainingSymbol(symbol)
|
| 414 |
+
except KeyError:
|
| 415 |
+
pass
|
| 416 |
+
|
| 417 |
+
try:
|
| 418 |
+
# Try fallback database. Build and find again if possible.
|
| 419 |
+
self._FindFileContainingSymbolInDb(symbol)
|
| 420 |
+
return self._InternalFindFileContainingSymbol(symbol)
|
| 421 |
+
except KeyError:
|
| 422 |
+
raise KeyError('Cannot find a file containing %s' % symbol)
|
| 423 |
+
|
| 424 |
+
def _InternalFindFileContainingSymbol(self, symbol):
|
| 425 |
+
"""Gets the already built FileDescriptor containing the specified symbol.
|
| 426 |
+
|
| 427 |
+
Args:
|
| 428 |
+
symbol (str): The name of the symbol to search for.
|
| 429 |
+
|
| 430 |
+
Returns:
|
| 431 |
+
FileDescriptor: Descriptor for the file that contains the specified
|
| 432 |
+
symbol.
|
| 433 |
+
|
| 434 |
+
Raises:
|
| 435 |
+
KeyError: if the file cannot be found in the pool.
|
| 436 |
+
"""
|
| 437 |
+
try:
|
| 438 |
+
return self._descriptors[symbol].file
|
| 439 |
+
except KeyError:
|
| 440 |
+
pass
|
| 441 |
+
|
| 442 |
+
try:
|
| 443 |
+
return self._enum_descriptors[symbol].file
|
| 444 |
+
except KeyError:
|
| 445 |
+
pass
|
| 446 |
+
|
| 447 |
+
try:
|
| 448 |
+
return self._service_descriptors[symbol].file
|
| 449 |
+
except KeyError:
|
| 450 |
+
pass
|
| 451 |
+
|
| 452 |
+
try:
|
| 453 |
+
return self._top_enum_values[symbol].type.file
|
| 454 |
+
except KeyError:
|
| 455 |
+
pass
|
| 456 |
+
|
| 457 |
+
try:
|
| 458 |
+
return self._toplevel_extensions[symbol].file
|
| 459 |
+
except KeyError:
|
| 460 |
+
pass
|
| 461 |
+
|
| 462 |
+
# Try fields, enum values and nested extensions inside a message.
|
| 463 |
+
top_name, _, sub_name = symbol.rpartition('.')
|
| 464 |
+
try:
|
| 465 |
+
message = self.FindMessageTypeByName(top_name)
|
| 466 |
+
assert (sub_name in message.extensions_by_name or
|
| 467 |
+
sub_name in message.fields_by_name or
|
| 468 |
+
sub_name in message.enum_values_by_name)
|
| 469 |
+
return message.file
|
| 470 |
+
except (KeyError, AssertionError):
|
| 471 |
+
raise KeyError('Cannot find a file containing %s' % symbol)
|
| 472 |
+
|
| 473 |
+
def FindMessageTypeByName(self, full_name):
|
| 474 |
+
"""Loads the named descriptor from the pool.
|
| 475 |
+
|
| 476 |
+
Args:
|
| 477 |
+
full_name (str): The full name of the descriptor to load.
|
| 478 |
+
|
| 479 |
+
Returns:
|
| 480 |
+
Descriptor: The descriptor for the named type.
|
| 481 |
+
|
| 482 |
+
Raises:
|
| 483 |
+
KeyError: if the message cannot be found in the pool.
|
| 484 |
+
"""
|
| 485 |
+
|
| 486 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 487 |
+
if full_name not in self._descriptors:
|
| 488 |
+
self._FindFileContainingSymbolInDb(full_name)
|
| 489 |
+
return self._descriptors[full_name]
|
| 490 |
+
|
| 491 |
+
def FindEnumTypeByName(self, full_name):
|
| 492 |
+
"""Loads the named enum descriptor from the pool.
|
| 493 |
+
|
| 494 |
+
Args:
|
| 495 |
+
full_name (str): The full name of the enum descriptor to load.
|
| 496 |
+
|
| 497 |
+
Returns:
|
| 498 |
+
EnumDescriptor: The enum descriptor for the named type.
|
| 499 |
+
|
| 500 |
+
Raises:
|
| 501 |
+
KeyError: if the enum cannot be found in the pool.
|
| 502 |
+
"""
|
| 503 |
+
|
| 504 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 505 |
+
if full_name not in self._enum_descriptors:
|
| 506 |
+
self._FindFileContainingSymbolInDb(full_name)
|
| 507 |
+
return self._enum_descriptors[full_name]
|
| 508 |
+
|
| 509 |
+
def FindFieldByName(self, full_name):
|
| 510 |
+
"""Loads the named field descriptor from the pool.
|
| 511 |
+
|
| 512 |
+
Args:
|
| 513 |
+
full_name (str): The full name of the field descriptor to load.
|
| 514 |
+
|
| 515 |
+
Returns:
|
| 516 |
+
FieldDescriptor: The field descriptor for the named field.
|
| 517 |
+
|
| 518 |
+
Raises:
|
| 519 |
+
KeyError: if the field cannot be found in the pool.
|
| 520 |
+
"""
|
| 521 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 522 |
+
message_name, _, field_name = full_name.rpartition('.')
|
| 523 |
+
message_descriptor = self.FindMessageTypeByName(message_name)
|
| 524 |
+
return message_descriptor.fields_by_name[field_name]
|
| 525 |
+
|
| 526 |
+
def FindOneofByName(self, full_name):
|
| 527 |
+
"""Loads the named oneof descriptor from the pool.
|
| 528 |
+
|
| 529 |
+
Args:
|
| 530 |
+
full_name (str): The full name of the oneof descriptor to load.
|
| 531 |
+
|
| 532 |
+
Returns:
|
| 533 |
+
OneofDescriptor: The oneof descriptor for the named oneof.
|
| 534 |
+
|
| 535 |
+
Raises:
|
| 536 |
+
KeyError: if the oneof cannot be found in the pool.
|
| 537 |
+
"""
|
| 538 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 539 |
+
message_name, _, oneof_name = full_name.rpartition('.')
|
| 540 |
+
message_descriptor = self.FindMessageTypeByName(message_name)
|
| 541 |
+
return message_descriptor.oneofs_by_name[oneof_name]
|
| 542 |
+
|
| 543 |
+
def FindExtensionByName(self, full_name):
|
| 544 |
+
"""Loads the named extension descriptor from the pool.
|
| 545 |
+
|
| 546 |
+
Args:
|
| 547 |
+
full_name (str): The full name of the extension descriptor to load.
|
| 548 |
+
|
| 549 |
+
Returns:
|
| 550 |
+
FieldDescriptor: The field descriptor for the named extension.
|
| 551 |
+
|
| 552 |
+
Raises:
|
| 553 |
+
KeyError: if the extension cannot be found in the pool.
|
| 554 |
+
"""
|
| 555 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 556 |
+
try:
|
| 557 |
+
# The proto compiler does not give any link between the FileDescriptor
|
| 558 |
+
# and top-level extensions unless the FileDescriptorProto is added to
|
| 559 |
+
# the DescriptorDatabase, but this can impact memory usage.
|
| 560 |
+
# So we registered these extensions by name explicitly.
|
| 561 |
+
return self._toplevel_extensions[full_name]
|
| 562 |
+
except KeyError:
|
| 563 |
+
pass
|
| 564 |
+
message_name, _, extension_name = full_name.rpartition('.')
|
| 565 |
+
try:
|
| 566 |
+
# Most extensions are nested inside a message.
|
| 567 |
+
scope = self.FindMessageTypeByName(message_name)
|
| 568 |
+
except KeyError:
|
| 569 |
+
# Some extensions are defined at file scope.
|
| 570 |
+
scope = self._FindFileContainingSymbolInDb(full_name)
|
| 571 |
+
return scope.extensions_by_name[extension_name]
|
| 572 |
+
|
| 573 |
+
def FindExtensionByNumber(self, message_descriptor, number):
|
| 574 |
+
"""Gets the extension of the specified message with the specified number.
|
| 575 |
+
|
| 576 |
+
Extensions have to be registered to this pool by calling :func:`Add` or
|
| 577 |
+
:func:`AddExtensionDescriptor`.
|
| 578 |
+
|
| 579 |
+
Args:
|
| 580 |
+
message_descriptor (Descriptor): descriptor of the extended message.
|
| 581 |
+
number (int): Number of the extension field.
|
| 582 |
+
|
| 583 |
+
Returns:
|
| 584 |
+
FieldDescriptor: The descriptor for the extension.
|
| 585 |
+
|
| 586 |
+
Raises:
|
| 587 |
+
KeyError: when no extension with the given number is known for the
|
| 588 |
+
specified message.
|
| 589 |
+
"""
|
| 590 |
+
try:
|
| 591 |
+
return self._extensions_by_number[message_descriptor][number]
|
| 592 |
+
except KeyError:
|
| 593 |
+
self._TryLoadExtensionFromDB(message_descriptor, number)
|
| 594 |
+
return self._extensions_by_number[message_descriptor][number]
|
| 595 |
+
|
| 596 |
+
def FindAllExtensions(self, message_descriptor):
|
| 597 |
+
"""Gets all the known extensions of a given message.
|
| 598 |
+
|
| 599 |
+
Extensions have to be registered to this pool by build related
|
| 600 |
+
:func:`Add` or :func:`AddExtensionDescriptor`.
|
| 601 |
+
|
| 602 |
+
Args:
|
| 603 |
+
message_descriptor (Descriptor): Descriptor of the extended message.
|
| 604 |
+
|
| 605 |
+
Returns:
|
| 606 |
+
list[FieldDescriptor]: Field descriptors describing the extensions.
|
| 607 |
+
"""
|
| 608 |
+
# Fallback to descriptor db if FindAllExtensionNumbers is provided.
|
| 609 |
+
if self._descriptor_db and hasattr(
|
| 610 |
+
self._descriptor_db, 'FindAllExtensionNumbers'):
|
| 611 |
+
full_name = message_descriptor.full_name
|
| 612 |
+
all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name)
|
| 613 |
+
for number in all_numbers:
|
| 614 |
+
if number in self._extensions_by_number[message_descriptor]:
|
| 615 |
+
continue
|
| 616 |
+
self._TryLoadExtensionFromDB(message_descriptor, number)
|
| 617 |
+
|
| 618 |
+
return list(self._extensions_by_number[message_descriptor].values())
|
| 619 |
+
|
| 620 |
+
def _TryLoadExtensionFromDB(self, message_descriptor, number):
|
| 621 |
+
"""Try to Load extensions from descriptor db.
|
| 622 |
+
|
| 623 |
+
Args:
|
| 624 |
+
message_descriptor: descriptor of the extended message.
|
| 625 |
+
number: the extension number that needs to be loaded.
|
| 626 |
+
"""
|
| 627 |
+
if not self._descriptor_db:
|
| 628 |
+
return
|
| 629 |
+
# Only supported when FindFileContainingExtension is provided.
|
| 630 |
+
if not hasattr(
|
| 631 |
+
self._descriptor_db, 'FindFileContainingExtension'):
|
| 632 |
+
return
|
| 633 |
+
|
| 634 |
+
full_name = message_descriptor.full_name
|
| 635 |
+
file_proto = self._descriptor_db.FindFileContainingExtension(
|
| 636 |
+
full_name, number)
|
| 637 |
+
|
| 638 |
+
if file_proto is None:
|
| 639 |
+
return
|
| 640 |
+
|
| 641 |
+
try:
|
| 642 |
+
self._ConvertFileProtoToFileDescriptor(file_proto)
|
| 643 |
+
except:
|
| 644 |
+
warn_msg = ('Unable to load proto file %s for extension number %d.' %
|
| 645 |
+
(file_proto.name, number))
|
| 646 |
+
warnings.warn(warn_msg, RuntimeWarning)
|
| 647 |
+
|
| 648 |
+
def FindServiceByName(self, full_name):
|
| 649 |
+
"""Loads the named service descriptor from the pool.
|
| 650 |
+
|
| 651 |
+
Args:
|
| 652 |
+
full_name (str): The full name of the service descriptor to load.
|
| 653 |
+
|
| 654 |
+
Returns:
|
| 655 |
+
ServiceDescriptor: The service descriptor for the named service.
|
| 656 |
+
|
| 657 |
+
Raises:
|
| 658 |
+
KeyError: if the service cannot be found in the pool.
|
| 659 |
+
"""
|
| 660 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 661 |
+
if full_name not in self._service_descriptors:
|
| 662 |
+
self._FindFileContainingSymbolInDb(full_name)
|
| 663 |
+
return self._service_descriptors[full_name]
|
| 664 |
+
|
| 665 |
+
def FindMethodByName(self, full_name):
|
| 666 |
+
"""Loads the named service method descriptor from the pool.
|
| 667 |
+
|
| 668 |
+
Args:
|
| 669 |
+
full_name (str): The full name of the method descriptor to load.
|
| 670 |
+
|
| 671 |
+
Returns:
|
| 672 |
+
MethodDescriptor: The method descriptor for the service method.
|
| 673 |
+
|
| 674 |
+
Raises:
|
| 675 |
+
KeyError: if the method cannot be found in the pool.
|
| 676 |
+
"""
|
| 677 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 678 |
+
service_name, _, method_name = full_name.rpartition('.')
|
| 679 |
+
service_descriptor = self.FindServiceByName(service_name)
|
| 680 |
+
return service_descriptor.methods_by_name[method_name]
|
| 681 |
+
|
| 682 |
+
def _FindFileContainingSymbolInDb(self, symbol):
|
| 683 |
+
"""Finds the file in descriptor DB containing the specified symbol.
|
| 684 |
+
|
| 685 |
+
Args:
|
| 686 |
+
symbol (str): The name of the symbol to search for.
|
| 687 |
+
|
| 688 |
+
Returns:
|
| 689 |
+
FileDescriptor: The file that contains the specified symbol.
|
| 690 |
+
|
| 691 |
+
Raises:
|
| 692 |
+
KeyError: if the file cannot be found in the descriptor database.
|
| 693 |
+
"""
|
| 694 |
+
try:
|
| 695 |
+
file_proto = self._internal_db.FindFileContainingSymbol(symbol)
|
| 696 |
+
except KeyError as error:
|
| 697 |
+
if self._descriptor_db:
|
| 698 |
+
file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
|
| 699 |
+
else:
|
| 700 |
+
raise error
|
| 701 |
+
if not file_proto:
|
| 702 |
+
raise KeyError('Cannot find a file containing %s' % symbol)
|
| 703 |
+
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
| 704 |
+
|
| 705 |
+
def _ConvertFileProtoToFileDescriptor(self, file_proto):
|
| 706 |
+
"""Creates a FileDescriptor from a proto or returns a cached copy.
|
| 707 |
+
|
| 708 |
+
This method also has the side effect of loading all the symbols found in
|
| 709 |
+
the file into the appropriate dictionaries in the pool.
|
| 710 |
+
|
| 711 |
+
Args:
|
| 712 |
+
file_proto: The proto to convert.
|
| 713 |
+
|
| 714 |
+
Returns:
|
| 715 |
+
A FileDescriptor matching the passed in proto.
|
| 716 |
+
"""
|
| 717 |
+
if file_proto.name not in self._file_descriptors:
|
| 718 |
+
built_deps = list(self._GetDeps(file_proto.dependency))
|
| 719 |
+
direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
|
| 720 |
+
public_deps = [direct_deps[i] for i in file_proto.public_dependency]
|
| 721 |
+
|
| 722 |
+
file_descriptor = descriptor.FileDescriptor(
|
| 723 |
+
pool=self,
|
| 724 |
+
name=file_proto.name,
|
| 725 |
+
package=file_proto.package,
|
| 726 |
+
syntax=file_proto.syntax,
|
| 727 |
+
options=_OptionsOrNone(file_proto),
|
| 728 |
+
serialized_pb=file_proto.SerializeToString(),
|
| 729 |
+
dependencies=direct_deps,
|
| 730 |
+
public_dependencies=public_deps,
|
| 731 |
+
# pylint: disable=protected-access
|
| 732 |
+
create_key=descriptor._internal_create_key)
|
| 733 |
+
scope = {}
|
| 734 |
+
|
| 735 |
+
# This loop extracts all the message and enum types from all the
|
| 736 |
+
# dependencies of the file_proto. This is necessary to create the
|
| 737 |
+
# scope of available message types when defining the passed in
|
| 738 |
+
# file proto.
|
| 739 |
+
for dependency in built_deps:
|
| 740 |
+
scope.update(self._ExtractSymbols(
|
| 741 |
+
dependency.message_types_by_name.values()))
|
| 742 |
+
scope.update((_PrefixWithDot(enum.full_name), enum)
|
| 743 |
+
for enum in dependency.enum_types_by_name.values())
|
| 744 |
+
|
| 745 |
+
for message_type in file_proto.message_type:
|
| 746 |
+
message_desc = self._ConvertMessageDescriptor(
|
| 747 |
+
message_type, file_proto.package, file_descriptor, scope,
|
| 748 |
+
file_proto.syntax)
|
| 749 |
+
file_descriptor.message_types_by_name[message_desc.name] = (
|
| 750 |
+
message_desc)
|
| 751 |
+
|
| 752 |
+
for enum_type in file_proto.enum_type:
|
| 753 |
+
file_descriptor.enum_types_by_name[enum_type.name] = (
|
| 754 |
+
self._ConvertEnumDescriptor(enum_type, file_proto.package,
|
| 755 |
+
file_descriptor, None, scope, True))
|
| 756 |
+
|
| 757 |
+
for index, extension_proto in enumerate(file_proto.extension):
|
| 758 |
+
extension_desc = self._MakeFieldDescriptor(
|
| 759 |
+
extension_proto, file_proto.package, index, file_descriptor,
|
| 760 |
+
is_extension=True)
|
| 761 |
+
extension_desc.containing_type = self._GetTypeFromScope(
|
| 762 |
+
file_descriptor.package, extension_proto.extendee, scope)
|
| 763 |
+
self._SetFieldType(extension_proto, extension_desc,
|
| 764 |
+
file_descriptor.package, scope)
|
| 765 |
+
file_descriptor.extensions_by_name[extension_desc.name] = (
|
| 766 |
+
extension_desc)
|
| 767 |
+
|
| 768 |
+
for desc_proto in file_proto.message_type:
|
| 769 |
+
self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
|
| 770 |
+
|
| 771 |
+
if file_proto.package:
|
| 772 |
+
desc_proto_prefix = _PrefixWithDot(file_proto.package)
|
| 773 |
+
else:
|
| 774 |
+
desc_proto_prefix = ''
|
| 775 |
+
|
| 776 |
+
for desc_proto in file_proto.message_type:
|
| 777 |
+
desc = self._GetTypeFromScope(
|
| 778 |
+
desc_proto_prefix, desc_proto.name, scope)
|
| 779 |
+
file_descriptor.message_types_by_name[desc_proto.name] = desc
|
| 780 |
+
|
| 781 |
+
for index, service_proto in enumerate(file_proto.service):
|
| 782 |
+
file_descriptor.services_by_name[service_proto.name] = (
|
| 783 |
+
self._MakeServiceDescriptor(service_proto, index, scope,
|
| 784 |
+
file_proto.package, file_descriptor))
|
| 785 |
+
|
| 786 |
+
self._file_descriptors[file_proto.name] = file_descriptor
|
| 787 |
+
|
| 788 |
+
# Add extensions to the pool
|
| 789 |
+
def AddExtensionForNested(message_type):
|
| 790 |
+
for nested in message_type.nested_types:
|
| 791 |
+
AddExtensionForNested(nested)
|
| 792 |
+
for extension in message_type.extensions:
|
| 793 |
+
self._AddExtensionDescriptor(extension)
|
| 794 |
+
|
| 795 |
+
file_desc = self._file_descriptors[file_proto.name]
|
| 796 |
+
for extension in file_desc.extensions_by_name.values():
|
| 797 |
+
self._AddExtensionDescriptor(extension)
|
| 798 |
+
for message_type in file_desc.message_types_by_name.values():
|
| 799 |
+
AddExtensionForNested(message_type)
|
| 800 |
+
|
| 801 |
+
return file_desc
|
| 802 |
+
|
| 803 |
+
def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
|
| 804 |
+
scope=None, syntax=None):
|
| 805 |
+
"""Adds the proto to the pool in the specified package.
|
| 806 |
+
|
| 807 |
+
Args:
|
| 808 |
+
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
|
| 809 |
+
package: The package the proto should be located in.
|
| 810 |
+
file_desc: The file containing this message.
|
| 811 |
+
scope: Dict mapping short and full symbols to message and enum types.
|
| 812 |
+
syntax: string indicating syntax of the file ("proto2" or "proto3")
|
| 813 |
+
|
| 814 |
+
Returns:
|
| 815 |
+
The added descriptor.
|
| 816 |
+
"""
|
| 817 |
+
|
| 818 |
+
if package:
|
| 819 |
+
desc_name = '.'.join((package, desc_proto.name))
|
| 820 |
+
else:
|
| 821 |
+
desc_name = desc_proto.name
|
| 822 |
+
|
| 823 |
+
if file_desc is None:
|
| 824 |
+
file_name = None
|
| 825 |
+
else:
|
| 826 |
+
file_name = file_desc.name
|
| 827 |
+
|
| 828 |
+
if scope is None:
|
| 829 |
+
scope = {}
|
| 830 |
+
|
| 831 |
+
nested = [
|
| 832 |
+
self._ConvertMessageDescriptor(
|
| 833 |
+
nested, desc_name, file_desc, scope, syntax)
|
| 834 |
+
for nested in desc_proto.nested_type]
|
| 835 |
+
enums = [
|
| 836 |
+
self._ConvertEnumDescriptor(enum, desc_name, file_desc, None,
|
| 837 |
+
scope, False)
|
| 838 |
+
for enum in desc_proto.enum_type]
|
| 839 |
+
fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
|
| 840 |
+
for index, field in enumerate(desc_proto.field)]
|
| 841 |
+
extensions = [
|
| 842 |
+
self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
|
| 843 |
+
is_extension=True)
|
| 844 |
+
for index, extension in enumerate(desc_proto.extension)]
|
| 845 |
+
oneofs = [
|
| 846 |
+
# pylint: disable=g-complex-comprehension
|
| 847 |
+
descriptor.OneofDescriptor(
|
| 848 |
+
desc.name,
|
| 849 |
+
'.'.join((desc_name, desc.name)),
|
| 850 |
+
index,
|
| 851 |
+
None,
|
| 852 |
+
[],
|
| 853 |
+
_OptionsOrNone(desc),
|
| 854 |
+
# pylint: disable=protected-access
|
| 855 |
+
create_key=descriptor._internal_create_key)
|
| 856 |
+
for index, desc in enumerate(desc_proto.oneof_decl)
|
| 857 |
+
]
|
| 858 |
+
extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
|
| 859 |
+
if extension_ranges:
|
| 860 |
+
is_extendable = True
|
| 861 |
+
else:
|
| 862 |
+
is_extendable = False
|
| 863 |
+
desc = descriptor.Descriptor(
|
| 864 |
+
name=desc_proto.name,
|
| 865 |
+
full_name=desc_name,
|
| 866 |
+
filename=file_name,
|
| 867 |
+
containing_type=None,
|
| 868 |
+
fields=fields,
|
| 869 |
+
oneofs=oneofs,
|
| 870 |
+
nested_types=nested,
|
| 871 |
+
enum_types=enums,
|
| 872 |
+
extensions=extensions,
|
| 873 |
+
options=_OptionsOrNone(desc_proto),
|
| 874 |
+
is_extendable=is_extendable,
|
| 875 |
+
extension_ranges=extension_ranges,
|
| 876 |
+
file=file_desc,
|
| 877 |
+
serialized_start=None,
|
| 878 |
+
serialized_end=None,
|
| 879 |
+
syntax=syntax,
|
| 880 |
+
is_map_entry=desc_proto.options.map_entry,
|
| 881 |
+
# pylint: disable=protected-access
|
| 882 |
+
create_key=descriptor._internal_create_key)
|
| 883 |
+
for nested in desc.nested_types:
|
| 884 |
+
nested.containing_type = desc
|
| 885 |
+
for enum in desc.enum_types:
|
| 886 |
+
enum.containing_type = desc
|
| 887 |
+
for field_index, field_desc in enumerate(desc_proto.field):
|
| 888 |
+
if field_desc.HasField('oneof_index'):
|
| 889 |
+
oneof_index = field_desc.oneof_index
|
| 890 |
+
oneofs[oneof_index].fields.append(fields[field_index])
|
| 891 |
+
fields[field_index].containing_oneof = oneofs[oneof_index]
|
| 892 |
+
|
| 893 |
+
scope[_PrefixWithDot(desc_name)] = desc
|
| 894 |
+
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
|
| 895 |
+
self._descriptors[desc_name] = desc
|
| 896 |
+
return desc
|
| 897 |
+
|
| 898 |
+
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
|
| 899 |
+
containing_type=None, scope=None, top_level=False):
|
| 900 |
+
"""Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
|
| 901 |
+
|
| 902 |
+
Args:
|
| 903 |
+
enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
|
| 904 |
+
package: Optional package name for the new message EnumDescriptor.
|
| 905 |
+
file_desc: The file containing the enum descriptor.
|
| 906 |
+
containing_type: The type containing this enum.
|
| 907 |
+
scope: Scope containing available types.
|
| 908 |
+
top_level: If True, the enum is a top level symbol. If False, the enum
|
| 909 |
+
is defined inside a message.
|
| 910 |
+
|
| 911 |
+
Returns:
|
| 912 |
+
The added descriptor
|
| 913 |
+
"""
|
| 914 |
+
|
| 915 |
+
if package:
|
| 916 |
+
enum_name = '.'.join((package, enum_proto.name))
|
| 917 |
+
else:
|
| 918 |
+
enum_name = enum_proto.name
|
| 919 |
+
|
| 920 |
+
if file_desc is None:
|
| 921 |
+
file_name = None
|
| 922 |
+
else:
|
| 923 |
+
file_name = file_desc.name
|
| 924 |
+
|
| 925 |
+
values = [self._MakeEnumValueDescriptor(value, index)
|
| 926 |
+
for index, value in enumerate(enum_proto.value)]
|
| 927 |
+
desc = descriptor.EnumDescriptor(name=enum_proto.name,
|
| 928 |
+
full_name=enum_name,
|
| 929 |
+
filename=file_name,
|
| 930 |
+
file=file_desc,
|
| 931 |
+
values=values,
|
| 932 |
+
containing_type=containing_type,
|
| 933 |
+
options=_OptionsOrNone(enum_proto),
|
| 934 |
+
# pylint: disable=protected-access
|
| 935 |
+
create_key=descriptor._internal_create_key)
|
| 936 |
+
scope['.%s' % enum_name] = desc
|
| 937 |
+
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
|
| 938 |
+
self._enum_descriptors[enum_name] = desc
|
| 939 |
+
|
| 940 |
+
# Add top level enum values.
|
| 941 |
+
if top_level:
|
| 942 |
+
for value in values:
|
| 943 |
+
full_name = _NormalizeFullyQualifiedName(
|
| 944 |
+
'.'.join((package, value.name)))
|
| 945 |
+
self._CheckConflictRegister(value, full_name, file_name)
|
| 946 |
+
self._top_enum_values[full_name] = value
|
| 947 |
+
|
| 948 |
+
return desc
|
| 949 |
+
|
| 950 |
+
def _MakeFieldDescriptor(self, field_proto, message_name, index,
|
| 951 |
+
file_desc, is_extension=False):
|
| 952 |
+
"""Creates a field descriptor from a FieldDescriptorProto.
|
| 953 |
+
|
| 954 |
+
For message and enum type fields, this method will do a look up
|
| 955 |
+
in the pool for the appropriate descriptor for that type. If it
|
| 956 |
+
is unavailable, it will fall back to the _source function to
|
| 957 |
+
create it. If this type is still unavailable, construction will
|
| 958 |
+
fail.
|
| 959 |
+
|
| 960 |
+
Args:
|
| 961 |
+
field_proto: The proto describing the field.
|
| 962 |
+
message_name: The name of the containing message.
|
| 963 |
+
index: Index of the field
|
| 964 |
+
file_desc: The file containing the field descriptor.
|
| 965 |
+
is_extension: Indication that this field is for an extension.
|
| 966 |
+
|
| 967 |
+
Returns:
|
| 968 |
+
An initialized FieldDescriptor object
|
| 969 |
+
"""
|
| 970 |
+
|
| 971 |
+
if message_name:
|
| 972 |
+
full_name = '.'.join((message_name, field_proto.name))
|
| 973 |
+
else:
|
| 974 |
+
full_name = field_proto.name
|
| 975 |
+
|
| 976 |
+
if field_proto.json_name:
|
| 977 |
+
json_name = field_proto.json_name
|
| 978 |
+
else:
|
| 979 |
+
json_name = None
|
| 980 |
+
|
| 981 |
+
return descriptor.FieldDescriptor(
|
| 982 |
+
name=field_proto.name,
|
| 983 |
+
full_name=full_name,
|
| 984 |
+
index=index,
|
| 985 |
+
number=field_proto.number,
|
| 986 |
+
type=field_proto.type,
|
| 987 |
+
cpp_type=None,
|
| 988 |
+
message_type=None,
|
| 989 |
+
enum_type=None,
|
| 990 |
+
containing_type=None,
|
| 991 |
+
label=field_proto.label,
|
| 992 |
+
has_default_value=False,
|
| 993 |
+
default_value=None,
|
| 994 |
+
is_extension=is_extension,
|
| 995 |
+
extension_scope=None,
|
| 996 |
+
options=_OptionsOrNone(field_proto),
|
| 997 |
+
json_name=json_name,
|
| 998 |
+
file=file_desc,
|
| 999 |
+
# pylint: disable=protected-access
|
| 1000 |
+
create_key=descriptor._internal_create_key)
|
| 1001 |
+
|
| 1002 |
+
def _SetAllFieldTypes(self, package, desc_proto, scope):
|
| 1003 |
+
"""Sets all the descriptor's fields's types.
|
| 1004 |
+
|
| 1005 |
+
This method also sets the containing types on any extensions.
|
| 1006 |
+
|
| 1007 |
+
Args:
|
| 1008 |
+
package: The current package of desc_proto.
|
| 1009 |
+
desc_proto: The message descriptor to update.
|
| 1010 |
+
scope: Enclosing scope of available types.
|
| 1011 |
+
"""
|
| 1012 |
+
|
| 1013 |
+
package = _PrefixWithDot(package)
|
| 1014 |
+
|
| 1015 |
+
main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
|
| 1016 |
+
|
| 1017 |
+
if package == '.':
|
| 1018 |
+
nested_package = _PrefixWithDot(desc_proto.name)
|
| 1019 |
+
else:
|
| 1020 |
+
nested_package = '.'.join([package, desc_proto.name])
|
| 1021 |
+
|
| 1022 |
+
for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
|
| 1023 |
+
self._SetFieldType(field_proto, field_desc, nested_package, scope)
|
| 1024 |
+
|
| 1025 |
+
for extension_proto, extension_desc in (
|
| 1026 |
+
zip(desc_proto.extension, main_desc.extensions)):
|
| 1027 |
+
extension_desc.containing_type = self._GetTypeFromScope(
|
| 1028 |
+
nested_package, extension_proto.extendee, scope)
|
| 1029 |
+
self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
|
| 1030 |
+
|
| 1031 |
+
for nested_type in desc_proto.nested_type:
|
| 1032 |
+
self._SetAllFieldTypes(nested_package, nested_type, scope)
|
| 1033 |
+
|
| 1034 |
+
def _SetFieldType(self, field_proto, field_desc, package, scope):
|
| 1035 |
+
"""Sets the field's type, cpp_type, message_type and enum_type.
|
| 1036 |
+
|
| 1037 |
+
Args:
|
| 1038 |
+
field_proto: Data about the field in proto format.
|
| 1039 |
+
field_desc: The descriptor to modify.
|
| 1040 |
+
package: The package the field's container is in.
|
| 1041 |
+
scope: Enclosing scope of available types.
|
| 1042 |
+
"""
|
| 1043 |
+
if field_proto.type_name:
|
| 1044 |
+
desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
|
| 1045 |
+
else:
|
| 1046 |
+
desc = None
|
| 1047 |
+
|
| 1048 |
+
if not field_proto.HasField('type'):
|
| 1049 |
+
if isinstance(desc, descriptor.Descriptor):
|
| 1050 |
+
field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
|
| 1051 |
+
else:
|
| 1052 |
+
field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
|
| 1053 |
+
|
| 1054 |
+
field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
|
| 1055 |
+
field_proto.type)
|
| 1056 |
+
|
| 1057 |
+
if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
|
| 1058 |
+
or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
|
| 1059 |
+
field_desc.message_type = desc
|
| 1060 |
+
|
| 1061 |
+
if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
| 1062 |
+
field_desc.enum_type = desc
|
| 1063 |
+
|
| 1064 |
+
if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
| 1065 |
+
field_desc.has_default_value = False
|
| 1066 |
+
field_desc.default_value = []
|
| 1067 |
+
elif field_proto.HasField('default_value'):
|
| 1068 |
+
field_desc.has_default_value = True
|
| 1069 |
+
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
|
| 1070 |
+
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
|
| 1071 |
+
field_desc.default_value = float(field_proto.default_value)
|
| 1072 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
|
| 1073 |
+
field_desc.default_value = field_proto.default_value
|
| 1074 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
|
| 1075 |
+
field_desc.default_value = field_proto.default_value.lower() == 'true'
|
| 1076 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
| 1077 |
+
field_desc.default_value = field_desc.enum_type.values_by_name[
|
| 1078 |
+
field_proto.default_value].number
|
| 1079 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
| 1080 |
+
field_desc.default_value = text_encoding.CUnescape(
|
| 1081 |
+
field_proto.default_value)
|
| 1082 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
|
| 1083 |
+
field_desc.default_value = None
|
| 1084 |
+
else:
|
| 1085 |
+
# All other types are of the "int" type.
|
| 1086 |
+
field_desc.default_value = int(field_proto.default_value)
|
| 1087 |
+
else:
|
| 1088 |
+
field_desc.has_default_value = False
|
| 1089 |
+
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
|
| 1090 |
+
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
|
| 1091 |
+
field_desc.default_value = 0.0
|
| 1092 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
|
| 1093 |
+
field_desc.default_value = u''
|
| 1094 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
|
| 1095 |
+
field_desc.default_value = False
|
| 1096 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
| 1097 |
+
field_desc.default_value = field_desc.enum_type.values[0].number
|
| 1098 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
| 1099 |
+
field_desc.default_value = b''
|
| 1100 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
|
| 1101 |
+
field_desc.default_value = None
|
| 1102 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP:
|
| 1103 |
+
field_desc.default_value = None
|
| 1104 |
+
else:
|
| 1105 |
+
# All other types are of the "int" type.
|
| 1106 |
+
field_desc.default_value = 0
|
| 1107 |
+
|
| 1108 |
+
field_desc.type = field_proto.type
|
| 1109 |
+
|
| 1110 |
+
def _MakeEnumValueDescriptor(self, value_proto, index):
|
| 1111 |
+
"""Creates a enum value descriptor object from a enum value proto.
|
| 1112 |
+
|
| 1113 |
+
Args:
|
| 1114 |
+
value_proto: The proto describing the enum value.
|
| 1115 |
+
index: The index of the enum value.
|
| 1116 |
+
|
| 1117 |
+
Returns:
|
| 1118 |
+
An initialized EnumValueDescriptor object.
|
| 1119 |
+
"""
|
| 1120 |
+
|
| 1121 |
+
return descriptor.EnumValueDescriptor(
|
| 1122 |
+
name=value_proto.name,
|
| 1123 |
+
index=index,
|
| 1124 |
+
number=value_proto.number,
|
| 1125 |
+
options=_OptionsOrNone(value_proto),
|
| 1126 |
+
type=None,
|
| 1127 |
+
# pylint: disable=protected-access
|
| 1128 |
+
create_key=descriptor._internal_create_key)
|
| 1129 |
+
|
| 1130 |
+
def _MakeServiceDescriptor(self, service_proto, service_index, scope,
|
| 1131 |
+
package, file_desc):
|
| 1132 |
+
"""Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
|
| 1133 |
+
|
| 1134 |
+
Args:
|
| 1135 |
+
service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
|
| 1136 |
+
service_index: The index of the service in the File.
|
| 1137 |
+
scope: Dict mapping short and full symbols to message and enum types.
|
| 1138 |
+
package: Optional package name for the new message EnumDescriptor.
|
| 1139 |
+
file_desc: The file containing the service descriptor.
|
| 1140 |
+
|
| 1141 |
+
Returns:
|
| 1142 |
+
The added descriptor.
|
| 1143 |
+
"""
|
| 1144 |
+
|
| 1145 |
+
if package:
|
| 1146 |
+
service_name = '.'.join((package, service_proto.name))
|
| 1147 |
+
else:
|
| 1148 |
+
service_name = service_proto.name
|
| 1149 |
+
|
| 1150 |
+
methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
|
| 1151 |
+
scope, index)
|
| 1152 |
+
for index, method_proto in enumerate(service_proto.method)]
|
| 1153 |
+
desc = descriptor.ServiceDescriptor(
|
| 1154 |
+
name=service_proto.name,
|
| 1155 |
+
full_name=service_name,
|
| 1156 |
+
index=service_index,
|
| 1157 |
+
methods=methods,
|
| 1158 |
+
options=_OptionsOrNone(service_proto),
|
| 1159 |
+
file=file_desc,
|
| 1160 |
+
# pylint: disable=protected-access
|
| 1161 |
+
create_key=descriptor._internal_create_key)
|
| 1162 |
+
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
|
| 1163 |
+
self._service_descriptors[service_name] = desc
|
| 1164 |
+
return desc
|
| 1165 |
+
|
| 1166 |
+
def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
|
| 1167 |
+
index):
|
| 1168 |
+
"""Creates a method descriptor from a MethodDescriptorProto.
|
| 1169 |
+
|
| 1170 |
+
Args:
|
| 1171 |
+
method_proto: The proto describing the method.
|
| 1172 |
+
service_name: The name of the containing service.
|
| 1173 |
+
package: Optional package name to look up for types.
|
| 1174 |
+
scope: Scope containing available types.
|
| 1175 |
+
index: Index of the method in the service.
|
| 1176 |
+
|
| 1177 |
+
Returns:
|
| 1178 |
+
An initialized MethodDescriptor object.
|
| 1179 |
+
"""
|
| 1180 |
+
full_name = '.'.join((service_name, method_proto.name))
|
| 1181 |
+
input_type = self._GetTypeFromScope(
|
| 1182 |
+
package, method_proto.input_type, scope)
|
| 1183 |
+
output_type = self._GetTypeFromScope(
|
| 1184 |
+
package, method_proto.output_type, scope)
|
| 1185 |
+
return descriptor.MethodDescriptor(
|
| 1186 |
+
name=method_proto.name,
|
| 1187 |
+
full_name=full_name,
|
| 1188 |
+
index=index,
|
| 1189 |
+
containing_service=None,
|
| 1190 |
+
input_type=input_type,
|
| 1191 |
+
output_type=output_type,
|
| 1192 |
+
client_streaming=method_proto.client_streaming,
|
| 1193 |
+
server_streaming=method_proto.server_streaming,
|
| 1194 |
+
options=_OptionsOrNone(method_proto),
|
| 1195 |
+
# pylint: disable=protected-access
|
| 1196 |
+
create_key=descriptor._internal_create_key)
|
| 1197 |
+
|
| 1198 |
+
def _ExtractSymbols(self, descriptors):
|
| 1199 |
+
"""Pulls out all the symbols from descriptor protos.
|
| 1200 |
+
|
| 1201 |
+
Args:
|
| 1202 |
+
descriptors: The messages to extract descriptors from.
|
| 1203 |
+
Yields:
|
| 1204 |
+
A two element tuple of the type name and descriptor object.
|
| 1205 |
+
"""
|
| 1206 |
+
|
| 1207 |
+
for desc in descriptors:
|
| 1208 |
+
yield (_PrefixWithDot(desc.full_name), desc)
|
| 1209 |
+
for symbol in self._ExtractSymbols(desc.nested_types):
|
| 1210 |
+
yield symbol
|
| 1211 |
+
for enum in desc.enum_types:
|
| 1212 |
+
yield (_PrefixWithDot(enum.full_name), enum)
|
| 1213 |
+
|
| 1214 |
+
def _GetDeps(self, dependencies, visited=None):
|
| 1215 |
+
"""Recursively finds dependencies for file protos.
|
| 1216 |
+
|
| 1217 |
+
Args:
|
| 1218 |
+
dependencies: The names of the files being depended on.
|
| 1219 |
+
visited: The names of files already found.
|
| 1220 |
+
|
| 1221 |
+
Yields:
|
| 1222 |
+
Each direct and indirect dependency.
|
| 1223 |
+
"""
|
| 1224 |
+
|
| 1225 |
+
visited = visited or set()
|
| 1226 |
+
for dependency in dependencies:
|
| 1227 |
+
if dependency not in visited:
|
| 1228 |
+
visited.add(dependency)
|
| 1229 |
+
dep_desc = self.FindFileByName(dependency)
|
| 1230 |
+
yield dep_desc
|
| 1231 |
+
public_files = [d.name for d in dep_desc.public_dependencies]
|
| 1232 |
+
yield from self._GetDeps(public_files, visited)
|
| 1233 |
+
|
| 1234 |
+
def _GetTypeFromScope(self, package, type_name, scope):
|
| 1235 |
+
"""Finds a given type name in the current scope.
|
| 1236 |
+
|
| 1237 |
+
Args:
|
| 1238 |
+
package: The package the proto should be located in.
|
| 1239 |
+
type_name: The name of the type to be found in the scope.
|
| 1240 |
+
scope: Dict mapping short and full symbols to message and enum types.
|
| 1241 |
+
|
| 1242 |
+
Returns:
|
| 1243 |
+
The descriptor for the requested type.
|
| 1244 |
+
"""
|
| 1245 |
+
if type_name not in scope:
|
| 1246 |
+
components = _PrefixWithDot(package).split('.')
|
| 1247 |
+
while components:
|
| 1248 |
+
possible_match = '.'.join(components + [type_name])
|
| 1249 |
+
if possible_match in scope:
|
| 1250 |
+
type_name = possible_match
|
| 1251 |
+
break
|
| 1252 |
+
else:
|
| 1253 |
+
components.pop(-1)
|
| 1254 |
+
return scope[type_name]
|
| 1255 |
+
|
| 1256 |
+
|
| 1257 |
+
def _PrefixWithDot(name):
|
| 1258 |
+
return name if name.startswith('.') else '.%s' % name
|
| 1259 |
+
|
| 1260 |
+
|
| 1261 |
+
if _USE_C_DESCRIPTORS:
|
| 1262 |
+
# TODO: This pool could be constructed from Python code, when we
|
| 1263 |
+
# support a flag like 'use_cpp_generated_pool=True'.
|
| 1264 |
+
# pylint: disable=protected-access
|
| 1265 |
+
_DEFAULT = descriptor._message.default_pool
|
| 1266 |
+
else:
|
| 1267 |
+
_DEFAULT = DescriptorPool()
|
| 1268 |
+
|
| 1269 |
+
|
| 1270 |
+
def Default():
|
| 1271 |
+
return _DEFAULT
|
lib/python3.10/site-packages/google/protobuf/duration_pb2.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/duration.proto
|
| 4 |
+
# Protobuf Python Version: 4.25.6
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\":\n\x08\x44uration\x12\x18\n\x07seconds\x18\x01 \x01(\x03R\x07seconds\x12\x14\n\x05nanos\x18\x02 \x01(\x05R\x05nanosB\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 18 |
+
|
| 19 |
+
_globals = globals()
|
| 20 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 21 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', _globals)
|
| 22 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 23 |
+
_globals['DESCRIPTOR']._options = None
|
| 24 |
+
_globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 25 |
+
_globals['_DURATION']._serialized_start=51
|
| 26 |
+
_globals['_DURATION']._serialized_end=109
|
| 27 |
+
# @@protoc_insertion_point(module_scope)
|
lib/python3.10/site-packages/google/protobuf/empty_pb2.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/empty.proto
|
| 4 |
+
# Protobuf Python Version: 4.25.6
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 18 |
+
|
| 19 |
+
_globals = globals()
|
| 20 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 21 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', _globals)
|
| 22 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 23 |
+
_globals['DESCRIPTOR']._options = None
|
| 24 |
+
_globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 25 |
+
_globals['_EMPTY']._serialized_start=48
|
| 26 |
+
_globals['_EMPTY']._serialized_end=55
|
| 27 |
+
# @@protoc_insertion_point(module_scope)
|
lib/python3.10/site-packages/google/protobuf/field_mask_pb2.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/field_mask.proto
|
| 4 |
+
# Protobuf Python Version: 4.25.6
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"!\n\tFieldMask\x12\x14\n\x05paths\x18\x01 \x03(\tR\x05pathsB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 18 |
+
|
| 19 |
+
_globals = globals()
|
| 20 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 21 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', _globals)
|
| 22 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 23 |
+
_globals['DESCRIPTOR']._options = None
|
| 24 |
+
_globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 25 |
+
_globals['_FIELDMASK']._serialized_start=53
|
| 26 |
+
_globals['_FIELDMASK']._serialized_end=86
|
| 27 |
+
# @@protoc_insertion_point(module_scope)
|
lib/python3.10/site-packages/google/protobuf/internal/api_implementation.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Determine which implementation of the protobuf API is used in this process.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import importlib
|
| 12 |
+
import os
|
| 13 |
+
import sys
|
| 14 |
+
import warnings
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def _ApiVersionToImplementationType(api_version):
|
| 18 |
+
if api_version == 2:
|
| 19 |
+
return 'cpp'
|
| 20 |
+
if api_version == 1:
|
| 21 |
+
raise ValueError('api_version=1 is no longer supported.')
|
| 22 |
+
if api_version == 0:
|
| 23 |
+
return 'python'
|
| 24 |
+
return None
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
_implementation_type = None
|
| 28 |
+
try:
|
| 29 |
+
# pylint: disable=g-import-not-at-top
|
| 30 |
+
from google.protobuf.internal import _api_implementation
|
| 31 |
+
# The compile-time constants in the _api_implementation module can be used to
|
| 32 |
+
# switch to a certain implementation of the Python API at build time.
|
| 33 |
+
_implementation_type = _ApiVersionToImplementationType(
|
| 34 |
+
_api_implementation.api_version)
|
| 35 |
+
except ImportError:
|
| 36 |
+
pass # Unspecified by compiler flags.
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _CanImport(mod_name):
|
| 40 |
+
try:
|
| 41 |
+
mod = importlib.import_module(mod_name)
|
| 42 |
+
# Work around a known issue in the classic bootstrap .par import hook.
|
| 43 |
+
if not mod:
|
| 44 |
+
raise ImportError(mod_name + ' import succeeded but was None')
|
| 45 |
+
return True
|
| 46 |
+
except ImportError:
|
| 47 |
+
return False
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
if _implementation_type is None:
|
| 51 |
+
if _CanImport('google._upb._message'):
|
| 52 |
+
_implementation_type = 'upb'
|
| 53 |
+
elif _CanImport('google.protobuf.pyext._message'):
|
| 54 |
+
_implementation_type = 'cpp'
|
| 55 |
+
else:
|
| 56 |
+
_implementation_type = 'python'
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
# This environment variable can be used to switch to a certain implementation
|
| 60 |
+
# of the Python API, overriding the compile-time constants in the
|
| 61 |
+
# _api_implementation module. Right now only 'python', 'cpp' and 'upb' are
|
| 62 |
+
# valid values. Any other value will raise error.
|
| 63 |
+
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
|
| 64 |
+
_implementation_type)
|
| 65 |
+
|
| 66 |
+
if _implementation_type not in ('python', 'cpp', 'upb'):
|
| 67 |
+
raise ValueError('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION {0} is not '
|
| 68 |
+
'supported. Please set to \'python\', \'cpp\' or '
|
| 69 |
+
'\'upb\'.'.format(_implementation_type))
|
| 70 |
+
|
| 71 |
+
if 'PyPy' in sys.version and _implementation_type == 'cpp':
|
| 72 |
+
warnings.warn('PyPy does not work yet with cpp protocol buffers. '
|
| 73 |
+
'Falling back to the python implementation.')
|
| 74 |
+
_implementation_type = 'python'
|
| 75 |
+
|
| 76 |
+
_c_module = None
|
| 77 |
+
|
| 78 |
+
if _implementation_type == 'cpp':
|
| 79 |
+
try:
|
| 80 |
+
# pylint: disable=g-import-not-at-top
|
| 81 |
+
from google.protobuf.pyext import _message
|
| 82 |
+
sys.modules['google3.net.proto2.python.internal.cpp._message'] = _message
|
| 83 |
+
_c_module = _message
|
| 84 |
+
del _message
|
| 85 |
+
except ImportError:
|
| 86 |
+
# TODO: fail back to python
|
| 87 |
+
warnings.warn(
|
| 88 |
+
'Selected implementation cpp is not available.')
|
| 89 |
+
pass
|
| 90 |
+
|
| 91 |
+
if _implementation_type == 'upb':
|
| 92 |
+
try:
|
| 93 |
+
# pylint: disable=g-import-not-at-top
|
| 94 |
+
from google._upb import _message
|
| 95 |
+
_c_module = _message
|
| 96 |
+
del _message
|
| 97 |
+
except ImportError:
|
| 98 |
+
warnings.warn('Selected implementation upb is not available. '
|
| 99 |
+
'Falling back to the python implementation.')
|
| 100 |
+
_implementation_type = 'python'
|
| 101 |
+
pass
|
| 102 |
+
|
| 103 |
+
# Detect if serialization should be deterministic by default
|
| 104 |
+
try:
|
| 105 |
+
# The presence of this module in a build allows the proto implementation to
|
| 106 |
+
# be upgraded merely via build deps.
|
| 107 |
+
#
|
| 108 |
+
# NOTE: Merely importing this automatically enables deterministic proto
|
| 109 |
+
# serialization for C++ code, but we still need to export it as a boolean so
|
| 110 |
+
# that we can do the same for `_implementation_type == 'python'`.
|
| 111 |
+
#
|
| 112 |
+
# NOTE2: It is possible for C++ code to enable deterministic serialization by
|
| 113 |
+
# default _without_ affecting Python code, if the C++ implementation is not in
|
| 114 |
+
# use by this module. That is intended behavior, so we don't actually expose
|
| 115 |
+
# this boolean outside of this module.
|
| 116 |
+
#
|
| 117 |
+
# pylint: disable=g-import-not-at-top,unused-import
|
| 118 |
+
from google.protobuf import enable_deterministic_proto_serialization
|
| 119 |
+
_python_deterministic_proto_serialization = True
|
| 120 |
+
except ImportError:
|
| 121 |
+
_python_deterministic_proto_serialization = False
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
# Usage of this function is discouraged. Clients shouldn't care which
|
| 125 |
+
# implementation of the API is in use. Note that there is no guarantee
|
| 126 |
+
# that differences between APIs will be maintained.
|
| 127 |
+
# Please don't use this function if possible.
|
| 128 |
+
def Type():
|
| 129 |
+
return _implementation_type
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
# See comment on 'Type' above.
|
| 133 |
+
# TODO: Remove the API, it returns a constant. b/228102101
|
| 134 |
+
def Version():
|
| 135 |
+
return 2
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
# For internal use only
|
| 139 |
+
def IsPythonDefaultSerializationDeterministic():
|
| 140 |
+
return _python_deterministic_proto_serialization
|
lib/python3.10/site-packages/google/protobuf/internal/encoder.py
ADDED
|
@@ -0,0 +1,806 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Code for encoding protocol message primitives.
|
| 9 |
+
|
| 10 |
+
Contains the logic for encoding every logical protocol field type
|
| 11 |
+
into one of the 5 physical wire types.
|
| 12 |
+
|
| 13 |
+
This code is designed to push the Python interpreter's performance to the
|
| 14 |
+
limits.
|
| 15 |
+
|
| 16 |
+
The basic idea is that at startup time, for every field (i.e. every
|
| 17 |
+
FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
|
| 18 |
+
sizer takes a value of this field's type and computes its byte size. The
|
| 19 |
+
encoder takes a writer function and a value. It encodes the value into byte
|
| 20 |
+
strings and invokes the writer function to write those strings. Typically the
|
| 21 |
+
writer function is the write() method of a BytesIO.
|
| 22 |
+
|
| 23 |
+
We try to do as much work as possible when constructing the writer and the
|
| 24 |
+
sizer rather than when calling them. In particular:
|
| 25 |
+
* We copy any needed global functions to local variables, so that we do not need
|
| 26 |
+
to do costly global table lookups at runtime.
|
| 27 |
+
* Similarly, we try to do any attribute lookups at startup time if possible.
|
| 28 |
+
* Every field's tag is encoded to bytes at startup, since it can't change at
|
| 29 |
+
runtime.
|
| 30 |
+
* Whatever component of the field size we can compute at startup, we do.
|
| 31 |
+
* We *avoid* sharing code if doing so would make the code slower and not sharing
|
| 32 |
+
does not burden us too much. For example, encoders for repeated fields do
|
| 33 |
+
not just call the encoders for singular fields in a loop because this would
|
| 34 |
+
add an extra function call overhead for every loop iteration; instead, we
|
| 35 |
+
manually inline the single-value encoder into the loop.
|
| 36 |
+
* If a Python function lacks a return statement, Python actually generates
|
| 37 |
+
instructions to pop the result of the last statement off the stack, push
|
| 38 |
+
None onto the stack, and then return that. If we really don't care what
|
| 39 |
+
value is returned, then we can save two instructions by returning the
|
| 40 |
+
result of the last statement. It looks funny but it helps.
|
| 41 |
+
* We assume that type and bounds checking has happened at a higher level.
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
__author__ = 'kenton@google.com (Kenton Varda)'
|
| 45 |
+
|
| 46 |
+
import struct
|
| 47 |
+
|
| 48 |
+
from google.protobuf.internal import wire_format
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
# This will overflow and thus become IEEE-754 "infinity". We would use
|
| 52 |
+
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
|
| 53 |
+
_POS_INF = 1e10000
|
| 54 |
+
_NEG_INF = -_POS_INF
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def _VarintSize(value):
|
| 58 |
+
"""Compute the size of a varint value."""
|
| 59 |
+
if value <= 0x7f: return 1
|
| 60 |
+
if value <= 0x3fff: return 2
|
| 61 |
+
if value <= 0x1fffff: return 3
|
| 62 |
+
if value <= 0xfffffff: return 4
|
| 63 |
+
if value <= 0x7ffffffff: return 5
|
| 64 |
+
if value <= 0x3ffffffffff: return 6
|
| 65 |
+
if value <= 0x1ffffffffffff: return 7
|
| 66 |
+
if value <= 0xffffffffffffff: return 8
|
| 67 |
+
if value <= 0x7fffffffffffffff: return 9
|
| 68 |
+
return 10
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def _SignedVarintSize(value):
|
| 72 |
+
"""Compute the size of a signed varint value."""
|
| 73 |
+
if value < 0: return 10
|
| 74 |
+
if value <= 0x7f: return 1
|
| 75 |
+
if value <= 0x3fff: return 2
|
| 76 |
+
if value <= 0x1fffff: return 3
|
| 77 |
+
if value <= 0xfffffff: return 4
|
| 78 |
+
if value <= 0x7ffffffff: return 5
|
| 79 |
+
if value <= 0x3ffffffffff: return 6
|
| 80 |
+
if value <= 0x1ffffffffffff: return 7
|
| 81 |
+
if value <= 0xffffffffffffff: return 8
|
| 82 |
+
if value <= 0x7fffffffffffffff: return 9
|
| 83 |
+
return 10
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def _TagSize(field_number):
|
| 87 |
+
"""Returns the number of bytes required to serialize a tag with this field
|
| 88 |
+
number."""
|
| 89 |
+
# Just pass in type 0, since the type won't affect the tag+type size.
|
| 90 |
+
return _VarintSize(wire_format.PackTag(field_number, 0))
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
# --------------------------------------------------------------------
|
| 94 |
+
# In this section we define some generic sizers. Each of these functions
|
| 95 |
+
# takes parameters specific to a particular field type, e.g. int32 or fixed64.
|
| 96 |
+
# It returns another function which in turn takes parameters specific to a
|
| 97 |
+
# particular field, e.g. the field number and whether it is repeated or packed.
|
| 98 |
+
# Look at the next section to see how these are used.
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def _SimpleSizer(compute_value_size):
|
| 102 |
+
"""A sizer which uses the function compute_value_size to compute the size of
|
| 103 |
+
each value. Typically compute_value_size is _VarintSize."""
|
| 104 |
+
|
| 105 |
+
def SpecificSizer(field_number, is_repeated, is_packed):
|
| 106 |
+
tag_size = _TagSize(field_number)
|
| 107 |
+
if is_packed:
|
| 108 |
+
local_VarintSize = _VarintSize
|
| 109 |
+
def PackedFieldSize(value):
|
| 110 |
+
result = 0
|
| 111 |
+
for element in value:
|
| 112 |
+
result += compute_value_size(element)
|
| 113 |
+
return result + local_VarintSize(result) + tag_size
|
| 114 |
+
return PackedFieldSize
|
| 115 |
+
elif is_repeated:
|
| 116 |
+
def RepeatedFieldSize(value):
|
| 117 |
+
result = tag_size * len(value)
|
| 118 |
+
for element in value:
|
| 119 |
+
result += compute_value_size(element)
|
| 120 |
+
return result
|
| 121 |
+
return RepeatedFieldSize
|
| 122 |
+
else:
|
| 123 |
+
def FieldSize(value):
|
| 124 |
+
return tag_size + compute_value_size(value)
|
| 125 |
+
return FieldSize
|
| 126 |
+
|
| 127 |
+
return SpecificSizer
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def _ModifiedSizer(compute_value_size, modify_value):
|
| 131 |
+
"""Like SimpleSizer, but modify_value is invoked on each value before it is
|
| 132 |
+
passed to compute_value_size. modify_value is typically ZigZagEncode."""
|
| 133 |
+
|
| 134 |
+
def SpecificSizer(field_number, is_repeated, is_packed):
|
| 135 |
+
tag_size = _TagSize(field_number)
|
| 136 |
+
if is_packed:
|
| 137 |
+
local_VarintSize = _VarintSize
|
| 138 |
+
def PackedFieldSize(value):
|
| 139 |
+
result = 0
|
| 140 |
+
for element in value:
|
| 141 |
+
result += compute_value_size(modify_value(element))
|
| 142 |
+
return result + local_VarintSize(result) + tag_size
|
| 143 |
+
return PackedFieldSize
|
| 144 |
+
elif is_repeated:
|
| 145 |
+
def RepeatedFieldSize(value):
|
| 146 |
+
result = tag_size * len(value)
|
| 147 |
+
for element in value:
|
| 148 |
+
result += compute_value_size(modify_value(element))
|
| 149 |
+
return result
|
| 150 |
+
return RepeatedFieldSize
|
| 151 |
+
else:
|
| 152 |
+
def FieldSize(value):
|
| 153 |
+
return tag_size + compute_value_size(modify_value(value))
|
| 154 |
+
return FieldSize
|
| 155 |
+
|
| 156 |
+
return SpecificSizer
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def _FixedSizer(value_size):
|
| 160 |
+
"""Like _SimpleSizer except for a fixed-size field. The input is the size
|
| 161 |
+
of one value."""
|
| 162 |
+
|
| 163 |
+
def SpecificSizer(field_number, is_repeated, is_packed):
|
| 164 |
+
tag_size = _TagSize(field_number)
|
| 165 |
+
if is_packed:
|
| 166 |
+
local_VarintSize = _VarintSize
|
| 167 |
+
def PackedFieldSize(value):
|
| 168 |
+
result = len(value) * value_size
|
| 169 |
+
return result + local_VarintSize(result) + tag_size
|
| 170 |
+
return PackedFieldSize
|
| 171 |
+
elif is_repeated:
|
| 172 |
+
element_size = value_size + tag_size
|
| 173 |
+
def RepeatedFieldSize(value):
|
| 174 |
+
return len(value) * element_size
|
| 175 |
+
return RepeatedFieldSize
|
| 176 |
+
else:
|
| 177 |
+
field_size = value_size + tag_size
|
| 178 |
+
def FieldSize(value):
|
| 179 |
+
return field_size
|
| 180 |
+
return FieldSize
|
| 181 |
+
|
| 182 |
+
return SpecificSizer
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
# ====================================================================
|
| 186 |
+
# Here we declare a sizer constructor for each field type. Each "sizer
|
| 187 |
+
# constructor" is a function that takes (field_number, is_repeated, is_packed)
|
| 188 |
+
# as parameters and returns a sizer, which in turn takes a field value as
|
| 189 |
+
# a parameter and returns its encoded size.
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
|
| 193 |
+
|
| 194 |
+
UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
|
| 195 |
+
|
| 196 |
+
SInt32Sizer = SInt64Sizer = _ModifiedSizer(
|
| 197 |
+
_SignedVarintSize, wire_format.ZigZagEncode)
|
| 198 |
+
|
| 199 |
+
Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
|
| 200 |
+
Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
|
| 201 |
+
|
| 202 |
+
BoolSizer = _FixedSizer(1)
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def StringSizer(field_number, is_repeated, is_packed):
|
| 206 |
+
"""Returns a sizer for a string field."""
|
| 207 |
+
|
| 208 |
+
tag_size = _TagSize(field_number)
|
| 209 |
+
local_VarintSize = _VarintSize
|
| 210 |
+
local_len = len
|
| 211 |
+
assert not is_packed
|
| 212 |
+
if is_repeated:
|
| 213 |
+
def RepeatedFieldSize(value):
|
| 214 |
+
result = tag_size * len(value)
|
| 215 |
+
for element in value:
|
| 216 |
+
l = local_len(element.encode('utf-8'))
|
| 217 |
+
result += local_VarintSize(l) + l
|
| 218 |
+
return result
|
| 219 |
+
return RepeatedFieldSize
|
| 220 |
+
else:
|
| 221 |
+
def FieldSize(value):
|
| 222 |
+
l = local_len(value.encode('utf-8'))
|
| 223 |
+
return tag_size + local_VarintSize(l) + l
|
| 224 |
+
return FieldSize
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
def BytesSizer(field_number, is_repeated, is_packed):
|
| 228 |
+
"""Returns a sizer for a bytes field."""
|
| 229 |
+
|
| 230 |
+
tag_size = _TagSize(field_number)
|
| 231 |
+
local_VarintSize = _VarintSize
|
| 232 |
+
local_len = len
|
| 233 |
+
assert not is_packed
|
| 234 |
+
if is_repeated:
|
| 235 |
+
def RepeatedFieldSize(value):
|
| 236 |
+
result = tag_size * len(value)
|
| 237 |
+
for element in value:
|
| 238 |
+
l = local_len(element)
|
| 239 |
+
result += local_VarintSize(l) + l
|
| 240 |
+
return result
|
| 241 |
+
return RepeatedFieldSize
|
| 242 |
+
else:
|
| 243 |
+
def FieldSize(value):
|
| 244 |
+
l = local_len(value)
|
| 245 |
+
return tag_size + local_VarintSize(l) + l
|
| 246 |
+
return FieldSize
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
def GroupSizer(field_number, is_repeated, is_packed):
|
| 250 |
+
"""Returns a sizer for a group field."""
|
| 251 |
+
|
| 252 |
+
tag_size = _TagSize(field_number) * 2
|
| 253 |
+
assert not is_packed
|
| 254 |
+
if is_repeated:
|
| 255 |
+
def RepeatedFieldSize(value):
|
| 256 |
+
result = tag_size * len(value)
|
| 257 |
+
for element in value:
|
| 258 |
+
result += element.ByteSize()
|
| 259 |
+
return result
|
| 260 |
+
return RepeatedFieldSize
|
| 261 |
+
else:
|
| 262 |
+
def FieldSize(value):
|
| 263 |
+
return tag_size + value.ByteSize()
|
| 264 |
+
return FieldSize
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def MessageSizer(field_number, is_repeated, is_packed):
|
| 268 |
+
"""Returns a sizer for a message field."""
|
| 269 |
+
|
| 270 |
+
tag_size = _TagSize(field_number)
|
| 271 |
+
local_VarintSize = _VarintSize
|
| 272 |
+
assert not is_packed
|
| 273 |
+
if is_repeated:
|
| 274 |
+
def RepeatedFieldSize(value):
|
| 275 |
+
result = tag_size * len(value)
|
| 276 |
+
for element in value:
|
| 277 |
+
l = element.ByteSize()
|
| 278 |
+
result += local_VarintSize(l) + l
|
| 279 |
+
return result
|
| 280 |
+
return RepeatedFieldSize
|
| 281 |
+
else:
|
| 282 |
+
def FieldSize(value):
|
| 283 |
+
l = value.ByteSize()
|
| 284 |
+
return tag_size + local_VarintSize(l) + l
|
| 285 |
+
return FieldSize
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
# --------------------------------------------------------------------
|
| 289 |
+
# MessageSet is special: it needs custom logic to compute its size properly.
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
def MessageSetItemSizer(field_number):
|
| 293 |
+
"""Returns a sizer for extensions of MessageSet.
|
| 294 |
+
|
| 295 |
+
The message set message looks like this:
|
| 296 |
+
message MessageSet {
|
| 297 |
+
repeated group Item = 1 {
|
| 298 |
+
required int32 type_id = 2;
|
| 299 |
+
required string message = 3;
|
| 300 |
+
}
|
| 301 |
+
}
|
| 302 |
+
"""
|
| 303 |
+
static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
|
| 304 |
+
_TagSize(3))
|
| 305 |
+
local_VarintSize = _VarintSize
|
| 306 |
+
|
| 307 |
+
def FieldSize(value):
|
| 308 |
+
l = value.ByteSize()
|
| 309 |
+
return static_size + local_VarintSize(l) + l
|
| 310 |
+
|
| 311 |
+
return FieldSize
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
# --------------------------------------------------------------------
|
| 315 |
+
# Map is special: it needs custom logic to compute its size properly.
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def MapSizer(field_descriptor, is_message_map):
|
| 319 |
+
"""Returns a sizer for a map field."""
|
| 320 |
+
|
| 321 |
+
# Can't look at field_descriptor.message_type._concrete_class because it may
|
| 322 |
+
# not have been initialized yet.
|
| 323 |
+
message_type = field_descriptor.message_type
|
| 324 |
+
message_sizer = MessageSizer(field_descriptor.number, False, False)
|
| 325 |
+
|
| 326 |
+
def FieldSize(map_value):
|
| 327 |
+
total = 0
|
| 328 |
+
for key in map_value:
|
| 329 |
+
value = map_value[key]
|
| 330 |
+
# It's wasteful to create the messages and throw them away one second
|
| 331 |
+
# later since we'll do the same for the actual encode. But there's not an
|
| 332 |
+
# obvious way to avoid this within the current design without tons of code
|
| 333 |
+
# duplication. For message map, value.ByteSize() should be called to
|
| 334 |
+
# update the status.
|
| 335 |
+
entry_msg = message_type._concrete_class(key=key, value=value)
|
| 336 |
+
total += message_sizer(entry_msg)
|
| 337 |
+
if is_message_map:
|
| 338 |
+
value.ByteSize()
|
| 339 |
+
return total
|
| 340 |
+
|
| 341 |
+
return FieldSize
|
| 342 |
+
|
| 343 |
+
# ====================================================================
|
| 344 |
+
# Encoders!
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
def _VarintEncoder():
|
| 348 |
+
"""Return an encoder for a basic varint value (does not include tag)."""
|
| 349 |
+
|
| 350 |
+
local_int2byte = struct.Struct('>B').pack
|
| 351 |
+
|
| 352 |
+
def EncodeVarint(write, value, unused_deterministic=None):
|
| 353 |
+
bits = value & 0x7f
|
| 354 |
+
value >>= 7
|
| 355 |
+
while value:
|
| 356 |
+
write(local_int2byte(0x80|bits))
|
| 357 |
+
bits = value & 0x7f
|
| 358 |
+
value >>= 7
|
| 359 |
+
return write(local_int2byte(bits))
|
| 360 |
+
|
| 361 |
+
return EncodeVarint
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def _SignedVarintEncoder():
|
| 365 |
+
"""Return an encoder for a basic signed varint value (does not include
|
| 366 |
+
tag)."""
|
| 367 |
+
|
| 368 |
+
local_int2byte = struct.Struct('>B').pack
|
| 369 |
+
|
| 370 |
+
def EncodeSignedVarint(write, value, unused_deterministic=None):
|
| 371 |
+
if value < 0:
|
| 372 |
+
value += (1 << 64)
|
| 373 |
+
bits = value & 0x7f
|
| 374 |
+
value >>= 7
|
| 375 |
+
while value:
|
| 376 |
+
write(local_int2byte(0x80|bits))
|
| 377 |
+
bits = value & 0x7f
|
| 378 |
+
value >>= 7
|
| 379 |
+
return write(local_int2byte(bits))
|
| 380 |
+
|
| 381 |
+
return EncodeSignedVarint
|
| 382 |
+
|
| 383 |
+
|
| 384 |
+
_EncodeVarint = _VarintEncoder()
|
| 385 |
+
_EncodeSignedVarint = _SignedVarintEncoder()
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
def _VarintBytes(value):
|
| 389 |
+
"""Encode the given integer as a varint and return the bytes. This is only
|
| 390 |
+
called at startup time so it doesn't need to be fast."""
|
| 391 |
+
|
| 392 |
+
pieces = []
|
| 393 |
+
_EncodeVarint(pieces.append, value, True)
|
| 394 |
+
return b"".join(pieces)
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
def TagBytes(field_number, wire_type):
|
| 398 |
+
"""Encode the given tag and return the bytes. Only called at startup."""
|
| 399 |
+
|
| 400 |
+
return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type)))
|
| 401 |
+
|
| 402 |
+
# --------------------------------------------------------------------
|
| 403 |
+
# As with sizers (see above), we have a number of common encoder
|
| 404 |
+
# implementations.
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
|
| 408 |
+
"""Return a constructor for an encoder for fields of a particular type.
|
| 409 |
+
|
| 410 |
+
Args:
|
| 411 |
+
wire_type: The field's wire type, for encoding tags.
|
| 412 |
+
encode_value: A function which encodes an individual value, e.g.
|
| 413 |
+
_EncodeVarint().
|
| 414 |
+
compute_value_size: A function which computes the size of an individual
|
| 415 |
+
value, e.g. _VarintSize().
|
| 416 |
+
"""
|
| 417 |
+
|
| 418 |
+
def SpecificEncoder(field_number, is_repeated, is_packed):
|
| 419 |
+
if is_packed:
|
| 420 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 421 |
+
local_EncodeVarint = _EncodeVarint
|
| 422 |
+
def EncodePackedField(write, value, deterministic):
|
| 423 |
+
write(tag_bytes)
|
| 424 |
+
size = 0
|
| 425 |
+
for element in value:
|
| 426 |
+
size += compute_value_size(element)
|
| 427 |
+
local_EncodeVarint(write, size, deterministic)
|
| 428 |
+
for element in value:
|
| 429 |
+
encode_value(write, element, deterministic)
|
| 430 |
+
return EncodePackedField
|
| 431 |
+
elif is_repeated:
|
| 432 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 433 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 434 |
+
for element in value:
|
| 435 |
+
write(tag_bytes)
|
| 436 |
+
encode_value(write, element, deterministic)
|
| 437 |
+
return EncodeRepeatedField
|
| 438 |
+
else:
|
| 439 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 440 |
+
def EncodeField(write, value, deterministic):
|
| 441 |
+
write(tag_bytes)
|
| 442 |
+
return encode_value(write, value, deterministic)
|
| 443 |
+
return EncodeField
|
| 444 |
+
|
| 445 |
+
return SpecificEncoder
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
|
| 449 |
+
"""Like SimpleEncoder but additionally invokes modify_value on every value
|
| 450 |
+
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
|
| 451 |
+
|
| 452 |
+
def SpecificEncoder(field_number, is_repeated, is_packed):
|
| 453 |
+
if is_packed:
|
| 454 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 455 |
+
local_EncodeVarint = _EncodeVarint
|
| 456 |
+
def EncodePackedField(write, value, deterministic):
|
| 457 |
+
write(tag_bytes)
|
| 458 |
+
size = 0
|
| 459 |
+
for element in value:
|
| 460 |
+
size += compute_value_size(modify_value(element))
|
| 461 |
+
local_EncodeVarint(write, size, deterministic)
|
| 462 |
+
for element in value:
|
| 463 |
+
encode_value(write, modify_value(element), deterministic)
|
| 464 |
+
return EncodePackedField
|
| 465 |
+
elif is_repeated:
|
| 466 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 467 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 468 |
+
for element in value:
|
| 469 |
+
write(tag_bytes)
|
| 470 |
+
encode_value(write, modify_value(element), deterministic)
|
| 471 |
+
return EncodeRepeatedField
|
| 472 |
+
else:
|
| 473 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 474 |
+
def EncodeField(write, value, deterministic):
|
| 475 |
+
write(tag_bytes)
|
| 476 |
+
return encode_value(write, modify_value(value), deterministic)
|
| 477 |
+
return EncodeField
|
| 478 |
+
|
| 479 |
+
return SpecificEncoder
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def _StructPackEncoder(wire_type, format):
|
| 483 |
+
"""Return a constructor for an encoder for a fixed-width field.
|
| 484 |
+
|
| 485 |
+
Args:
|
| 486 |
+
wire_type: The field's wire type, for encoding tags.
|
| 487 |
+
format: The format string to pass to struct.pack().
|
| 488 |
+
"""
|
| 489 |
+
|
| 490 |
+
value_size = struct.calcsize(format)
|
| 491 |
+
|
| 492 |
+
def SpecificEncoder(field_number, is_repeated, is_packed):
|
| 493 |
+
local_struct_pack = struct.pack
|
| 494 |
+
if is_packed:
|
| 495 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 496 |
+
local_EncodeVarint = _EncodeVarint
|
| 497 |
+
def EncodePackedField(write, value, deterministic):
|
| 498 |
+
write(tag_bytes)
|
| 499 |
+
local_EncodeVarint(write, len(value) * value_size, deterministic)
|
| 500 |
+
for element in value:
|
| 501 |
+
write(local_struct_pack(format, element))
|
| 502 |
+
return EncodePackedField
|
| 503 |
+
elif is_repeated:
|
| 504 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 505 |
+
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
| 506 |
+
for element in value:
|
| 507 |
+
write(tag_bytes)
|
| 508 |
+
write(local_struct_pack(format, element))
|
| 509 |
+
return EncodeRepeatedField
|
| 510 |
+
else:
|
| 511 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 512 |
+
def EncodeField(write, value, unused_deterministic=None):
|
| 513 |
+
write(tag_bytes)
|
| 514 |
+
return write(local_struct_pack(format, value))
|
| 515 |
+
return EncodeField
|
| 516 |
+
|
| 517 |
+
return SpecificEncoder
|
| 518 |
+
|
| 519 |
+
|
| 520 |
+
def _FloatingPointEncoder(wire_type, format):
|
| 521 |
+
"""Return a constructor for an encoder for float fields.
|
| 522 |
+
|
| 523 |
+
This is like StructPackEncoder, but catches errors that may be due to
|
| 524 |
+
passing non-finite floating-point values to struct.pack, and makes a
|
| 525 |
+
second attempt to encode those values.
|
| 526 |
+
|
| 527 |
+
Args:
|
| 528 |
+
wire_type: The field's wire type, for encoding tags.
|
| 529 |
+
format: The format string to pass to struct.pack().
|
| 530 |
+
"""
|
| 531 |
+
|
| 532 |
+
value_size = struct.calcsize(format)
|
| 533 |
+
if value_size == 4:
|
| 534 |
+
def EncodeNonFiniteOrRaise(write, value):
|
| 535 |
+
# Remember that the serialized form uses little-endian byte order.
|
| 536 |
+
if value == _POS_INF:
|
| 537 |
+
write(b'\x00\x00\x80\x7F')
|
| 538 |
+
elif value == _NEG_INF:
|
| 539 |
+
write(b'\x00\x00\x80\xFF')
|
| 540 |
+
elif value != value: # NaN
|
| 541 |
+
write(b'\x00\x00\xC0\x7F')
|
| 542 |
+
else:
|
| 543 |
+
raise
|
| 544 |
+
elif value_size == 8:
|
| 545 |
+
def EncodeNonFiniteOrRaise(write, value):
|
| 546 |
+
if value == _POS_INF:
|
| 547 |
+
write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
|
| 548 |
+
elif value == _NEG_INF:
|
| 549 |
+
write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
|
| 550 |
+
elif value != value: # NaN
|
| 551 |
+
write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
|
| 552 |
+
else:
|
| 553 |
+
raise
|
| 554 |
+
else:
|
| 555 |
+
raise ValueError('Can\'t encode floating-point values that are '
|
| 556 |
+
'%d bytes long (only 4 or 8)' % value_size)
|
| 557 |
+
|
| 558 |
+
def SpecificEncoder(field_number, is_repeated, is_packed):
|
| 559 |
+
local_struct_pack = struct.pack
|
| 560 |
+
if is_packed:
|
| 561 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 562 |
+
local_EncodeVarint = _EncodeVarint
|
| 563 |
+
def EncodePackedField(write, value, deterministic):
|
| 564 |
+
write(tag_bytes)
|
| 565 |
+
local_EncodeVarint(write, len(value) * value_size, deterministic)
|
| 566 |
+
for element in value:
|
| 567 |
+
# This try/except block is going to be faster than any code that
|
| 568 |
+
# we could write to check whether element is finite.
|
| 569 |
+
try:
|
| 570 |
+
write(local_struct_pack(format, element))
|
| 571 |
+
except SystemError:
|
| 572 |
+
EncodeNonFiniteOrRaise(write, element)
|
| 573 |
+
return EncodePackedField
|
| 574 |
+
elif is_repeated:
|
| 575 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 576 |
+
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
| 577 |
+
for element in value:
|
| 578 |
+
write(tag_bytes)
|
| 579 |
+
try:
|
| 580 |
+
write(local_struct_pack(format, element))
|
| 581 |
+
except SystemError:
|
| 582 |
+
EncodeNonFiniteOrRaise(write, element)
|
| 583 |
+
return EncodeRepeatedField
|
| 584 |
+
else:
|
| 585 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 586 |
+
def EncodeField(write, value, unused_deterministic=None):
|
| 587 |
+
write(tag_bytes)
|
| 588 |
+
try:
|
| 589 |
+
write(local_struct_pack(format, value))
|
| 590 |
+
except SystemError:
|
| 591 |
+
EncodeNonFiniteOrRaise(write, value)
|
| 592 |
+
return EncodeField
|
| 593 |
+
|
| 594 |
+
return SpecificEncoder
|
| 595 |
+
|
| 596 |
+
|
| 597 |
+
# ====================================================================
|
| 598 |
+
# Here we declare an encoder constructor for each field type. These work
|
| 599 |
+
# very similarly to sizer constructors, described earlier.
|
| 600 |
+
|
| 601 |
+
|
| 602 |
+
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
|
| 603 |
+
wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
|
| 604 |
+
|
| 605 |
+
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
|
| 606 |
+
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
|
| 607 |
+
|
| 608 |
+
SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
|
| 609 |
+
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
|
| 610 |
+
wire_format.ZigZagEncode)
|
| 611 |
+
|
| 612 |
+
# Note that Python conveniently guarantees that when using the '<' prefix on
|
| 613 |
+
# formats, they will also have the same size across all platforms (as opposed
|
| 614 |
+
# to without the prefix, where their sizes depend on the C compiler's basic
|
| 615 |
+
# type sizes).
|
| 616 |
+
Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
|
| 617 |
+
Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
|
| 618 |
+
SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
|
| 619 |
+
SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
|
| 620 |
+
FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
|
| 621 |
+
DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
|
| 622 |
+
|
| 623 |
+
|
| 624 |
+
def BoolEncoder(field_number, is_repeated, is_packed):
|
| 625 |
+
"""Returns an encoder for a boolean field."""
|
| 626 |
+
|
| 627 |
+
false_byte = b'\x00'
|
| 628 |
+
true_byte = b'\x01'
|
| 629 |
+
if is_packed:
|
| 630 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 631 |
+
local_EncodeVarint = _EncodeVarint
|
| 632 |
+
def EncodePackedField(write, value, deterministic):
|
| 633 |
+
write(tag_bytes)
|
| 634 |
+
local_EncodeVarint(write, len(value), deterministic)
|
| 635 |
+
for element in value:
|
| 636 |
+
if element:
|
| 637 |
+
write(true_byte)
|
| 638 |
+
else:
|
| 639 |
+
write(false_byte)
|
| 640 |
+
return EncodePackedField
|
| 641 |
+
elif is_repeated:
|
| 642 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
| 643 |
+
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
| 644 |
+
for element in value:
|
| 645 |
+
write(tag_bytes)
|
| 646 |
+
if element:
|
| 647 |
+
write(true_byte)
|
| 648 |
+
else:
|
| 649 |
+
write(false_byte)
|
| 650 |
+
return EncodeRepeatedField
|
| 651 |
+
else:
|
| 652 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
| 653 |
+
def EncodeField(write, value, unused_deterministic=None):
|
| 654 |
+
write(tag_bytes)
|
| 655 |
+
if value:
|
| 656 |
+
return write(true_byte)
|
| 657 |
+
return write(false_byte)
|
| 658 |
+
return EncodeField
|
| 659 |
+
|
| 660 |
+
|
| 661 |
+
def StringEncoder(field_number, is_repeated, is_packed):
|
| 662 |
+
"""Returns an encoder for a string field."""
|
| 663 |
+
|
| 664 |
+
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 665 |
+
local_EncodeVarint = _EncodeVarint
|
| 666 |
+
local_len = len
|
| 667 |
+
assert not is_packed
|
| 668 |
+
if is_repeated:
|
| 669 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 670 |
+
for element in value:
|
| 671 |
+
encoded = element.encode('utf-8')
|
| 672 |
+
write(tag)
|
| 673 |
+
local_EncodeVarint(write, local_len(encoded), deterministic)
|
| 674 |
+
write(encoded)
|
| 675 |
+
return EncodeRepeatedField
|
| 676 |
+
else:
|
| 677 |
+
def EncodeField(write, value, deterministic):
|
| 678 |
+
encoded = value.encode('utf-8')
|
| 679 |
+
write(tag)
|
| 680 |
+
local_EncodeVarint(write, local_len(encoded), deterministic)
|
| 681 |
+
return write(encoded)
|
| 682 |
+
return EncodeField
|
| 683 |
+
|
| 684 |
+
|
| 685 |
+
def BytesEncoder(field_number, is_repeated, is_packed):
|
| 686 |
+
"""Returns an encoder for a bytes field."""
|
| 687 |
+
|
| 688 |
+
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 689 |
+
local_EncodeVarint = _EncodeVarint
|
| 690 |
+
local_len = len
|
| 691 |
+
assert not is_packed
|
| 692 |
+
if is_repeated:
|
| 693 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 694 |
+
for element in value:
|
| 695 |
+
write(tag)
|
| 696 |
+
local_EncodeVarint(write, local_len(element), deterministic)
|
| 697 |
+
write(element)
|
| 698 |
+
return EncodeRepeatedField
|
| 699 |
+
else:
|
| 700 |
+
def EncodeField(write, value, deterministic):
|
| 701 |
+
write(tag)
|
| 702 |
+
local_EncodeVarint(write, local_len(value), deterministic)
|
| 703 |
+
return write(value)
|
| 704 |
+
return EncodeField
|
| 705 |
+
|
| 706 |
+
|
| 707 |
+
def GroupEncoder(field_number, is_repeated, is_packed):
|
| 708 |
+
"""Returns an encoder for a group field."""
|
| 709 |
+
|
| 710 |
+
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
|
| 711 |
+
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
|
| 712 |
+
assert not is_packed
|
| 713 |
+
if is_repeated:
|
| 714 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 715 |
+
for element in value:
|
| 716 |
+
write(start_tag)
|
| 717 |
+
element._InternalSerialize(write, deterministic)
|
| 718 |
+
write(end_tag)
|
| 719 |
+
return EncodeRepeatedField
|
| 720 |
+
else:
|
| 721 |
+
def EncodeField(write, value, deterministic):
|
| 722 |
+
write(start_tag)
|
| 723 |
+
value._InternalSerialize(write, deterministic)
|
| 724 |
+
return write(end_tag)
|
| 725 |
+
return EncodeField
|
| 726 |
+
|
| 727 |
+
|
| 728 |
+
def MessageEncoder(field_number, is_repeated, is_packed):
|
| 729 |
+
"""Returns an encoder for a message field."""
|
| 730 |
+
|
| 731 |
+
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 732 |
+
local_EncodeVarint = _EncodeVarint
|
| 733 |
+
assert not is_packed
|
| 734 |
+
if is_repeated:
|
| 735 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 736 |
+
for element in value:
|
| 737 |
+
write(tag)
|
| 738 |
+
local_EncodeVarint(write, element.ByteSize(), deterministic)
|
| 739 |
+
element._InternalSerialize(write, deterministic)
|
| 740 |
+
return EncodeRepeatedField
|
| 741 |
+
else:
|
| 742 |
+
def EncodeField(write, value, deterministic):
|
| 743 |
+
write(tag)
|
| 744 |
+
local_EncodeVarint(write, value.ByteSize(), deterministic)
|
| 745 |
+
return value._InternalSerialize(write, deterministic)
|
| 746 |
+
return EncodeField
|
| 747 |
+
|
| 748 |
+
|
| 749 |
+
# --------------------------------------------------------------------
|
| 750 |
+
# As before, MessageSet is special.
|
| 751 |
+
|
| 752 |
+
|
| 753 |
+
def MessageSetItemEncoder(field_number):
|
| 754 |
+
"""Encoder for extensions of MessageSet.
|
| 755 |
+
|
| 756 |
+
The message set message looks like this:
|
| 757 |
+
message MessageSet {
|
| 758 |
+
repeated group Item = 1 {
|
| 759 |
+
required int32 type_id = 2;
|
| 760 |
+
required string message = 3;
|
| 761 |
+
}
|
| 762 |
+
}
|
| 763 |
+
"""
|
| 764 |
+
start_bytes = b"".join([
|
| 765 |
+
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
|
| 766 |
+
TagBytes(2, wire_format.WIRETYPE_VARINT),
|
| 767 |
+
_VarintBytes(field_number),
|
| 768 |
+
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
|
| 769 |
+
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
|
| 770 |
+
local_EncodeVarint = _EncodeVarint
|
| 771 |
+
|
| 772 |
+
def EncodeField(write, value, deterministic):
|
| 773 |
+
write(start_bytes)
|
| 774 |
+
local_EncodeVarint(write, value.ByteSize(), deterministic)
|
| 775 |
+
value._InternalSerialize(write, deterministic)
|
| 776 |
+
return write(end_bytes)
|
| 777 |
+
|
| 778 |
+
return EncodeField
|
| 779 |
+
|
| 780 |
+
|
| 781 |
+
# --------------------------------------------------------------------
|
| 782 |
+
# As before, Map is special.
|
| 783 |
+
|
| 784 |
+
|
| 785 |
+
def MapEncoder(field_descriptor):
|
| 786 |
+
"""Encoder for extensions of MessageSet.
|
| 787 |
+
|
| 788 |
+
Maps always have a wire format like this:
|
| 789 |
+
message MapEntry {
|
| 790 |
+
key_type key = 1;
|
| 791 |
+
value_type value = 2;
|
| 792 |
+
}
|
| 793 |
+
repeated MapEntry map = N;
|
| 794 |
+
"""
|
| 795 |
+
# Can't look at field_descriptor.message_type._concrete_class because it may
|
| 796 |
+
# not have been initialized yet.
|
| 797 |
+
message_type = field_descriptor.message_type
|
| 798 |
+
encode_message = MessageEncoder(field_descriptor.number, False, False)
|
| 799 |
+
|
| 800 |
+
def EncodeField(write, value, deterministic):
|
| 801 |
+
value_keys = sorted(value.keys()) if deterministic else value
|
| 802 |
+
for key in value_keys:
|
| 803 |
+
entry_msg = message_type._concrete_class(key=key, value=value[key])
|
| 804 |
+
encode_message(write, entry_msg, deterministic)
|
| 805 |
+
|
| 806 |
+
return EncodeField
|
lib/python3.10/site-packages/google/protobuf/internal/extension_dict.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Contains _ExtensionDict class to represent extensions.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from google.protobuf.internal import type_checkers
|
| 12 |
+
from google.protobuf.descriptor import FieldDescriptor
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def _VerifyExtensionHandle(message, extension_handle):
|
| 16 |
+
"""Verify that the given extension handle is valid."""
|
| 17 |
+
|
| 18 |
+
if not isinstance(extension_handle, FieldDescriptor):
|
| 19 |
+
raise KeyError('HasExtension() expects an extension handle, got: %s' %
|
| 20 |
+
extension_handle)
|
| 21 |
+
|
| 22 |
+
if not extension_handle.is_extension:
|
| 23 |
+
raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
|
| 24 |
+
|
| 25 |
+
if not extension_handle.containing_type:
|
| 26 |
+
raise KeyError('"%s" is missing a containing_type.'
|
| 27 |
+
% extension_handle.full_name)
|
| 28 |
+
|
| 29 |
+
if extension_handle.containing_type is not message.DESCRIPTOR:
|
| 30 |
+
raise KeyError('Extension "%s" extends message type "%s", but this '
|
| 31 |
+
'message is of type "%s".' %
|
| 32 |
+
(extension_handle.full_name,
|
| 33 |
+
extension_handle.containing_type.full_name,
|
| 34 |
+
message.DESCRIPTOR.full_name))
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# TODO: Unify error handling of "unknown extension" crap.
|
| 38 |
+
# TODO: Support iteritems()-style iteration over all
|
| 39 |
+
# extensions with the "has" bits turned on?
|
| 40 |
+
class _ExtensionDict(object):
|
| 41 |
+
|
| 42 |
+
"""Dict-like container for Extension fields on proto instances.
|
| 43 |
+
|
| 44 |
+
Note that in all cases we expect extension handles to be
|
| 45 |
+
FieldDescriptors.
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
def __init__(self, extended_message):
|
| 49 |
+
"""
|
| 50 |
+
Args:
|
| 51 |
+
extended_message: Message instance for which we are the Extensions dict.
|
| 52 |
+
"""
|
| 53 |
+
self._extended_message = extended_message
|
| 54 |
+
|
| 55 |
+
def __getitem__(self, extension_handle):
|
| 56 |
+
"""Returns the current value of the given extension handle."""
|
| 57 |
+
|
| 58 |
+
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
| 59 |
+
|
| 60 |
+
result = self._extended_message._fields.get(extension_handle)
|
| 61 |
+
if result is not None:
|
| 62 |
+
return result
|
| 63 |
+
|
| 64 |
+
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
|
| 65 |
+
result = extension_handle._default_constructor(self._extended_message)
|
| 66 |
+
elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
| 67 |
+
message_type = extension_handle.message_type
|
| 68 |
+
if not hasattr(message_type, '_concrete_class'):
|
| 69 |
+
# pylint: disable=g-import-not-at-top
|
| 70 |
+
from google.protobuf import message_factory
|
| 71 |
+
message_factory.GetMessageClass(message_type)
|
| 72 |
+
if not hasattr(extension_handle.message_type, '_concrete_class'):
|
| 73 |
+
from google.protobuf import message_factory
|
| 74 |
+
message_factory.GetMessageClass(extension_handle.message_type)
|
| 75 |
+
result = extension_handle.message_type._concrete_class()
|
| 76 |
+
try:
|
| 77 |
+
result._SetListener(self._extended_message._listener_for_children)
|
| 78 |
+
except ReferenceError:
|
| 79 |
+
pass
|
| 80 |
+
else:
|
| 81 |
+
# Singular scalar -- just return the default without inserting into the
|
| 82 |
+
# dict.
|
| 83 |
+
return extension_handle.default_value
|
| 84 |
+
|
| 85 |
+
# Atomically check if another thread has preempted us and, if not, swap
|
| 86 |
+
# in the new object we just created. If someone has preempted us, we
|
| 87 |
+
# take that object and discard ours.
|
| 88 |
+
# WARNING: We are relying on setdefault() being atomic. This is true
|
| 89 |
+
# in CPython but we haven't investigated others. This warning appears
|
| 90 |
+
# in several other locations in this file.
|
| 91 |
+
result = self._extended_message._fields.setdefault(
|
| 92 |
+
extension_handle, result)
|
| 93 |
+
|
| 94 |
+
return result
|
| 95 |
+
|
| 96 |
+
def __eq__(self, other):
|
| 97 |
+
if not isinstance(other, self.__class__):
|
| 98 |
+
return False
|
| 99 |
+
|
| 100 |
+
my_fields = self._extended_message.ListFields()
|
| 101 |
+
other_fields = other._extended_message.ListFields()
|
| 102 |
+
|
| 103 |
+
# Get rid of non-extension fields.
|
| 104 |
+
my_fields = [field for field in my_fields if field.is_extension]
|
| 105 |
+
other_fields = [field for field in other_fields if field.is_extension]
|
| 106 |
+
|
| 107 |
+
return my_fields == other_fields
|
| 108 |
+
|
| 109 |
+
def __ne__(self, other):
|
| 110 |
+
return not self == other
|
| 111 |
+
|
| 112 |
+
def __len__(self):
|
| 113 |
+
fields = self._extended_message.ListFields()
|
| 114 |
+
# Get rid of non-extension fields.
|
| 115 |
+
extension_fields = [field for field in fields if field[0].is_extension]
|
| 116 |
+
return len(extension_fields)
|
| 117 |
+
|
| 118 |
+
def __hash__(self):
|
| 119 |
+
raise TypeError('unhashable object')
|
| 120 |
+
|
| 121 |
+
# Note that this is only meaningful for non-repeated, scalar extension
|
| 122 |
+
# fields. Note also that we may have to call _Modified() when we do
|
| 123 |
+
# successfully set a field this way, to set any necessary "has" bits in the
|
| 124 |
+
# ancestors of the extended message.
|
| 125 |
+
def __setitem__(self, extension_handle, value):
|
| 126 |
+
"""If extension_handle specifies a non-repeated, scalar extension
|
| 127 |
+
field, sets the value of that field.
|
| 128 |
+
"""
|
| 129 |
+
|
| 130 |
+
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
| 131 |
+
|
| 132 |
+
if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or
|
| 133 |
+
extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
|
| 134 |
+
raise TypeError(
|
| 135 |
+
'Cannot assign to extension "%s" because it is a repeated or '
|
| 136 |
+
'composite type.' % extension_handle.full_name)
|
| 137 |
+
|
| 138 |
+
# It's slightly wasteful to lookup the type checker each time,
|
| 139 |
+
# but we expect this to be a vanishingly uncommon case anyway.
|
| 140 |
+
type_checker = type_checkers.GetTypeChecker(extension_handle)
|
| 141 |
+
# pylint: disable=protected-access
|
| 142 |
+
self._extended_message._fields[extension_handle] = (
|
| 143 |
+
type_checker.CheckValue(value))
|
| 144 |
+
self._extended_message._Modified()
|
| 145 |
+
|
| 146 |
+
def __delitem__(self, extension_handle):
|
| 147 |
+
self._extended_message.ClearExtension(extension_handle)
|
| 148 |
+
|
| 149 |
+
def _FindExtensionByName(self, name):
|
| 150 |
+
"""Tries to find a known extension with the specified name.
|
| 151 |
+
|
| 152 |
+
Args:
|
| 153 |
+
name: Extension full name.
|
| 154 |
+
|
| 155 |
+
Returns:
|
| 156 |
+
Extension field descriptor.
|
| 157 |
+
"""
|
| 158 |
+
descriptor = self._extended_message.DESCRIPTOR
|
| 159 |
+
extensions = descriptor.file.pool._extensions_by_name[descriptor]
|
| 160 |
+
return extensions.get(name, None)
|
| 161 |
+
|
| 162 |
+
def _FindExtensionByNumber(self, number):
|
| 163 |
+
"""Tries to find a known extension with the field number.
|
| 164 |
+
|
| 165 |
+
Args:
|
| 166 |
+
number: Extension field number.
|
| 167 |
+
|
| 168 |
+
Returns:
|
| 169 |
+
Extension field descriptor.
|
| 170 |
+
"""
|
| 171 |
+
descriptor = self._extended_message.DESCRIPTOR
|
| 172 |
+
extensions = descriptor.file.pool._extensions_by_number[descriptor]
|
| 173 |
+
return extensions.get(number, None)
|
| 174 |
+
|
| 175 |
+
def __iter__(self):
|
| 176 |
+
# Return a generator over the populated extension fields
|
| 177 |
+
return (f[0] for f in self._extended_message.ListFields()
|
| 178 |
+
if f[0].is_extension)
|
| 179 |
+
|
| 180 |
+
def __contains__(self, extension_handle):
|
| 181 |
+
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
| 182 |
+
|
| 183 |
+
if extension_handle not in self._extended_message._fields:
|
| 184 |
+
return False
|
| 185 |
+
|
| 186 |
+
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
|
| 187 |
+
return bool(self._extended_message._fields.get(extension_handle))
|
| 188 |
+
|
| 189 |
+
if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
| 190 |
+
value = self._extended_message._fields.get(extension_handle)
|
| 191 |
+
# pylint: disable=protected-access
|
| 192 |
+
return value is not None and value._is_present_in_parent
|
| 193 |
+
|
| 194 |
+
return True
|
lib/python3.10/site-packages/google/protobuf/internal/message_listener.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Defines a listener interface for observing certain
|
| 9 |
+
state transitions on Message objects.
|
| 10 |
+
|
| 11 |
+
Also defines a null implementation of this interface.
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
__author__ = 'robinson@google.com (Will Robinson)'
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class MessageListener(object):
|
| 18 |
+
|
| 19 |
+
"""Listens for modifications made to a message. Meant to be registered via
|
| 20 |
+
Message._SetListener().
|
| 21 |
+
|
| 22 |
+
Attributes:
|
| 23 |
+
dirty: If True, then calling Modified() would be a no-op. This can be
|
| 24 |
+
used to avoid these calls entirely in the common case.
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def Modified(self):
|
| 28 |
+
"""Called every time the message is modified in such a way that the parent
|
| 29 |
+
message may need to be updated. This currently means either:
|
| 30 |
+
(a) The message was modified for the first time, so the parent message
|
| 31 |
+
should henceforth mark the message as present.
|
| 32 |
+
(b) The message's cached byte size became dirty -- i.e. the message was
|
| 33 |
+
modified for the first time after a previous call to ByteSize().
|
| 34 |
+
Therefore the parent should also mark its byte size as dirty.
|
| 35 |
+
Note that (a) implies (b), since new objects start out with a client cached
|
| 36 |
+
size (zero). However, we document (a) explicitly because it is important.
|
| 37 |
+
|
| 38 |
+
Modified() will *only* be called in response to one of these two events --
|
| 39 |
+
not every time the sub-message is modified.
|
| 40 |
+
|
| 41 |
+
Note that if the listener's |dirty| attribute is true, then calling
|
| 42 |
+
Modified at the moment would be a no-op, so it can be skipped. Performance-
|
| 43 |
+
sensitive callers should check this attribute directly before calling since
|
| 44 |
+
it will be true most of the time.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
raise NotImplementedError
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class NullMessageListener(object):
|
| 51 |
+
|
| 52 |
+
"""No-op MessageListener implementation."""
|
| 53 |
+
|
| 54 |
+
def Modified(self):
|
| 55 |
+
pass
|
lib/python3.10/site-packages/google/protobuf/internal/python_message.py
ADDED
|
@@ -0,0 +1,1546 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
# This code is meant to work on Python 2.4 and above only.
|
| 9 |
+
#
|
| 10 |
+
# TODO: Helpers for verbose, common checks like seeing if a
|
| 11 |
+
# descriptor's cpp_type is CPPTYPE_MESSAGE.
|
| 12 |
+
|
| 13 |
+
"""Contains a metaclass and helper functions used to create
|
| 14 |
+
protocol message classes from Descriptor objects at runtime.
|
| 15 |
+
|
| 16 |
+
Recall that a metaclass is the "type" of a class.
|
| 17 |
+
(A class is to a metaclass what an instance is to a class.)
|
| 18 |
+
|
| 19 |
+
In this case, we use the GeneratedProtocolMessageType metaclass
|
| 20 |
+
to inject all the useful functionality into the classes
|
| 21 |
+
output by the protocol compiler at compile-time.
|
| 22 |
+
|
| 23 |
+
The upshot of all this is that the real implementation
|
| 24 |
+
details for ALL pure-Python protocol buffers are *here in
|
| 25 |
+
this file*.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
__author__ = 'robinson@google.com (Will Robinson)'
|
| 29 |
+
|
| 30 |
+
from io import BytesIO
|
| 31 |
+
import struct
|
| 32 |
+
import sys
|
| 33 |
+
import warnings
|
| 34 |
+
import weakref
|
| 35 |
+
|
| 36 |
+
from google.protobuf import descriptor as descriptor_mod
|
| 37 |
+
from google.protobuf import message as message_mod
|
| 38 |
+
from google.protobuf import text_format
|
| 39 |
+
# We use "as" to avoid name collisions with variables.
|
| 40 |
+
from google.protobuf.internal import api_implementation
|
| 41 |
+
from google.protobuf.internal import containers
|
| 42 |
+
from google.protobuf.internal import decoder
|
| 43 |
+
from google.protobuf.internal import encoder
|
| 44 |
+
from google.protobuf.internal import enum_type_wrapper
|
| 45 |
+
from google.protobuf.internal import extension_dict
|
| 46 |
+
from google.protobuf.internal import message_listener as message_listener_mod
|
| 47 |
+
from google.protobuf.internal import type_checkers
|
| 48 |
+
from google.protobuf.internal import well_known_types
|
| 49 |
+
from google.protobuf.internal import wire_format
|
| 50 |
+
|
| 51 |
+
_FieldDescriptor = descriptor_mod.FieldDescriptor
|
| 52 |
+
_AnyFullTypeName = 'google.protobuf.Any'
|
| 53 |
+
_ExtensionDict = extension_dict._ExtensionDict
|
| 54 |
+
|
| 55 |
+
class GeneratedProtocolMessageType(type):
|
| 56 |
+
|
| 57 |
+
"""Metaclass for protocol message classes created at runtime from Descriptors.
|
| 58 |
+
|
| 59 |
+
We add implementations for all methods described in the Message class. We
|
| 60 |
+
also create properties to allow getting/setting all fields in the protocol
|
| 61 |
+
message. Finally, we create slots to prevent users from accidentally
|
| 62 |
+
"setting" nonexistent fields in the protocol message, which then wouldn't get
|
| 63 |
+
serialized / deserialized properly.
|
| 64 |
+
|
| 65 |
+
The protocol compiler currently uses this metaclass to create protocol
|
| 66 |
+
message classes at runtime. Clients can also manually create their own
|
| 67 |
+
classes at runtime, as in this example:
|
| 68 |
+
|
| 69 |
+
mydescriptor = Descriptor(.....)
|
| 70 |
+
factory = symbol_database.Default()
|
| 71 |
+
factory.pool.AddDescriptor(mydescriptor)
|
| 72 |
+
MyProtoClass = factory.GetPrototype(mydescriptor)
|
| 73 |
+
myproto_instance = MyProtoClass()
|
| 74 |
+
myproto.foo_field = 23
|
| 75 |
+
...
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
# Must be consistent with the protocol-compiler code in
|
| 79 |
+
# proto2/compiler/internal/generator.*.
|
| 80 |
+
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
| 81 |
+
|
| 82 |
+
def __new__(cls, name, bases, dictionary):
|
| 83 |
+
"""Custom allocation for runtime-generated class types.
|
| 84 |
+
|
| 85 |
+
We override __new__ because this is apparently the only place
|
| 86 |
+
where we can meaningfully set __slots__ on the class we're creating(?).
|
| 87 |
+
(The interplay between metaclasses and slots is not very well-documented).
|
| 88 |
+
|
| 89 |
+
Args:
|
| 90 |
+
name: Name of the class (ignored, but required by the
|
| 91 |
+
metaclass protocol).
|
| 92 |
+
bases: Base classes of the class we're constructing.
|
| 93 |
+
(Should be message.Message). We ignore this field, but
|
| 94 |
+
it's required by the metaclass protocol
|
| 95 |
+
dictionary: The class dictionary of the class we're
|
| 96 |
+
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
| 97 |
+
a Descriptor object describing this protocol message
|
| 98 |
+
type.
|
| 99 |
+
|
| 100 |
+
Returns:
|
| 101 |
+
Newly-allocated class.
|
| 102 |
+
|
| 103 |
+
Raises:
|
| 104 |
+
RuntimeError: Generated code only work with python cpp extension.
|
| 105 |
+
"""
|
| 106 |
+
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
| 107 |
+
|
| 108 |
+
if isinstance(descriptor, str):
|
| 109 |
+
raise RuntimeError('The generated code only work with python cpp '
|
| 110 |
+
'extension, but it is using pure python runtime.')
|
| 111 |
+
|
| 112 |
+
# If a concrete class already exists for this descriptor, don't try to
|
| 113 |
+
# create another. Doing so will break any messages that already exist with
|
| 114 |
+
# the existing class.
|
| 115 |
+
#
|
| 116 |
+
# The C++ implementation appears to have its own internal `PyMessageFactory`
|
| 117 |
+
# to achieve similar results.
|
| 118 |
+
#
|
| 119 |
+
# This most commonly happens in `text_format.py` when using descriptors from
|
| 120 |
+
# a custom pool; it calls symbol_database.Global().getPrototype() on a
|
| 121 |
+
# descriptor which already has an existing concrete class.
|
| 122 |
+
new_class = getattr(descriptor, '_concrete_class', None)
|
| 123 |
+
if new_class:
|
| 124 |
+
return new_class
|
| 125 |
+
|
| 126 |
+
if descriptor.full_name in well_known_types.WKTBASES:
|
| 127 |
+
bases += (well_known_types.WKTBASES[descriptor.full_name],)
|
| 128 |
+
_AddClassAttributesForNestedExtensions(descriptor, dictionary)
|
| 129 |
+
_AddSlots(descriptor, dictionary)
|
| 130 |
+
|
| 131 |
+
superclass = super(GeneratedProtocolMessageType, cls)
|
| 132 |
+
new_class = superclass.__new__(cls, name, bases, dictionary)
|
| 133 |
+
return new_class
|
| 134 |
+
|
| 135 |
+
def __init__(cls, name, bases, dictionary):
|
| 136 |
+
"""Here we perform the majority of our work on the class.
|
| 137 |
+
We add enum getters, an __init__ method, implementations
|
| 138 |
+
of all Message methods, and properties for all fields
|
| 139 |
+
in the protocol type.
|
| 140 |
+
|
| 141 |
+
Args:
|
| 142 |
+
name: Name of the class (ignored, but required by the
|
| 143 |
+
metaclass protocol).
|
| 144 |
+
bases: Base classes of the class we're constructing.
|
| 145 |
+
(Should be message.Message). We ignore this field, but
|
| 146 |
+
it's required by the metaclass protocol
|
| 147 |
+
dictionary: The class dictionary of the class we're
|
| 148 |
+
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
| 149 |
+
a Descriptor object describing this protocol message
|
| 150 |
+
type.
|
| 151 |
+
"""
|
| 152 |
+
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
| 153 |
+
|
| 154 |
+
# If this is an _existing_ class looked up via `_concrete_class` in the
|
| 155 |
+
# __new__ method above, then we don't need to re-initialize anything.
|
| 156 |
+
existing_class = getattr(descriptor, '_concrete_class', None)
|
| 157 |
+
if existing_class:
|
| 158 |
+
assert existing_class is cls, (
|
| 159 |
+
'Duplicate `GeneratedProtocolMessageType` created for descriptor %r'
|
| 160 |
+
% (descriptor.full_name))
|
| 161 |
+
return
|
| 162 |
+
|
| 163 |
+
cls._message_set_decoders_by_tag = {}
|
| 164 |
+
cls._fields_by_tag = {}
|
| 165 |
+
if (descriptor.has_options and
|
| 166 |
+
descriptor.GetOptions().message_set_wire_format):
|
| 167 |
+
cls._message_set_decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
|
| 168 |
+
decoder.MessageSetItemDecoder(descriptor),
|
| 169 |
+
None,
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
# Attach stuff to each FieldDescriptor for quick lookup later on.
|
| 173 |
+
for field in descriptor.fields:
|
| 174 |
+
_AttachFieldHelpers(cls, field)
|
| 175 |
+
|
| 176 |
+
if descriptor.is_extendable and hasattr(descriptor.file, 'pool'):
|
| 177 |
+
extensions = descriptor.file.pool.FindAllExtensions(descriptor)
|
| 178 |
+
for ext in extensions:
|
| 179 |
+
_AttachFieldHelpers(cls, ext)
|
| 180 |
+
|
| 181 |
+
descriptor._concrete_class = cls # pylint: disable=protected-access
|
| 182 |
+
_AddEnumValues(descriptor, cls)
|
| 183 |
+
_AddInitMethod(descriptor, cls)
|
| 184 |
+
_AddPropertiesForFields(descriptor, cls)
|
| 185 |
+
_AddPropertiesForExtensions(descriptor, cls)
|
| 186 |
+
_AddStaticMethods(cls)
|
| 187 |
+
_AddMessageMethods(descriptor, cls)
|
| 188 |
+
_AddPrivateHelperMethods(descriptor, cls)
|
| 189 |
+
|
| 190 |
+
superclass = super(GeneratedProtocolMessageType, cls)
|
| 191 |
+
superclass.__init__(name, bases, dictionary)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
# Stateless helpers for GeneratedProtocolMessageType below.
|
| 195 |
+
# Outside clients should not access these directly.
|
| 196 |
+
#
|
| 197 |
+
# I opted not to make any of these methods on the metaclass, to make it more
|
| 198 |
+
# clear that I'm not really using any state there and to keep clients from
|
| 199 |
+
# thinking that they have direct access to these construction helpers.
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def _PropertyName(proto_field_name):
|
| 203 |
+
"""Returns the name of the public property attribute which
|
| 204 |
+
clients can use to get and (in some cases) set the value
|
| 205 |
+
of a protocol message field.
|
| 206 |
+
|
| 207 |
+
Args:
|
| 208 |
+
proto_field_name: The protocol message field name, exactly
|
| 209 |
+
as it appears (or would appear) in a .proto file.
|
| 210 |
+
"""
|
| 211 |
+
# TODO: Escape Python keywords (e.g., yield), and test this support.
|
| 212 |
+
# nnorwitz makes my day by writing:
|
| 213 |
+
# """
|
| 214 |
+
# FYI. See the keyword module in the stdlib. This could be as simple as:
|
| 215 |
+
#
|
| 216 |
+
# if keyword.iskeyword(proto_field_name):
|
| 217 |
+
# return proto_field_name + "_"
|
| 218 |
+
# return proto_field_name
|
| 219 |
+
# """
|
| 220 |
+
# Kenton says: The above is a BAD IDEA. People rely on being able to use
|
| 221 |
+
# getattr() and setattr() to reflectively manipulate field values. If we
|
| 222 |
+
# rename the properties, then every such user has to also make sure to apply
|
| 223 |
+
# the same transformation. Note that currently if you name a field "yield",
|
| 224 |
+
# you can still access it just fine using getattr/setattr -- it's not even
|
| 225 |
+
# that cumbersome to do so.
|
| 226 |
+
# TODO: Remove this method entirely if/when everyone agrees with my
|
| 227 |
+
# position.
|
| 228 |
+
return proto_field_name
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def _AddSlots(message_descriptor, dictionary):
|
| 232 |
+
"""Adds a __slots__ entry to dictionary, containing the names of all valid
|
| 233 |
+
attributes for this message type.
|
| 234 |
+
|
| 235 |
+
Args:
|
| 236 |
+
message_descriptor: A Descriptor instance describing this message type.
|
| 237 |
+
dictionary: Class dictionary to which we'll add a '__slots__' entry.
|
| 238 |
+
"""
|
| 239 |
+
dictionary['__slots__'] = ['_cached_byte_size',
|
| 240 |
+
'_cached_byte_size_dirty',
|
| 241 |
+
'_fields',
|
| 242 |
+
'_unknown_fields',
|
| 243 |
+
'_unknown_field_set',
|
| 244 |
+
'_is_present_in_parent',
|
| 245 |
+
'_listener',
|
| 246 |
+
'_listener_for_children',
|
| 247 |
+
'__weakref__',
|
| 248 |
+
'_oneofs']
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def _IsMessageSetExtension(field):
|
| 252 |
+
return (field.is_extension and
|
| 253 |
+
field.containing_type.has_options and
|
| 254 |
+
field.containing_type.GetOptions().message_set_wire_format and
|
| 255 |
+
field.type == _FieldDescriptor.TYPE_MESSAGE and
|
| 256 |
+
field.label == _FieldDescriptor.LABEL_OPTIONAL)
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def _IsMapField(field):
|
| 260 |
+
return (field.type == _FieldDescriptor.TYPE_MESSAGE and
|
| 261 |
+
field.message_type._is_map_entry)
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def _IsMessageMapField(field):
|
| 265 |
+
value_type = field.message_type.fields_by_name['value']
|
| 266 |
+
return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE
|
| 267 |
+
|
| 268 |
+
def _AttachFieldHelpers(cls, field_descriptor):
|
| 269 |
+
is_repeated = field_descriptor.label == _FieldDescriptor.LABEL_REPEATED
|
| 270 |
+
field_descriptor._default_constructor = _DefaultValueConstructorForField(
|
| 271 |
+
field_descriptor
|
| 272 |
+
)
|
| 273 |
+
|
| 274 |
+
def AddFieldByTag(wiretype, is_packed):
|
| 275 |
+
tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
|
| 276 |
+
cls._fields_by_tag[tag_bytes] = (field_descriptor, is_packed)
|
| 277 |
+
|
| 278 |
+
AddFieldByTag(
|
| 279 |
+
type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], False
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
|
| 283 |
+
# To support wire compatibility of adding packed = true, add a decoder for
|
| 284 |
+
# packed values regardless of the field's options.
|
| 285 |
+
AddFieldByTag(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
def _MaybeAddEncoder(cls, field_descriptor):
|
| 289 |
+
if hasattr(field_descriptor, '_encoder'):
|
| 290 |
+
return
|
| 291 |
+
is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
|
| 292 |
+
is_map_entry = _IsMapField(field_descriptor)
|
| 293 |
+
is_packed = field_descriptor.is_packed
|
| 294 |
+
|
| 295 |
+
if is_map_entry:
|
| 296 |
+
field_encoder = encoder.MapEncoder(field_descriptor)
|
| 297 |
+
sizer = encoder.MapSizer(field_descriptor,
|
| 298 |
+
_IsMessageMapField(field_descriptor))
|
| 299 |
+
elif _IsMessageSetExtension(field_descriptor):
|
| 300 |
+
field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
|
| 301 |
+
sizer = encoder.MessageSetItemSizer(field_descriptor.number)
|
| 302 |
+
else:
|
| 303 |
+
field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
|
| 304 |
+
field_descriptor.number, is_repeated, is_packed)
|
| 305 |
+
sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
|
| 306 |
+
field_descriptor.number, is_repeated, is_packed)
|
| 307 |
+
|
| 308 |
+
field_descriptor._sizer = sizer
|
| 309 |
+
field_descriptor._encoder = field_encoder
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
def _MaybeAddDecoder(cls, field_descriptor):
|
| 313 |
+
if hasattr(field_descriptor, '_decoders'):
|
| 314 |
+
return
|
| 315 |
+
|
| 316 |
+
is_repeated = field_descriptor.label == _FieldDescriptor.LABEL_REPEATED
|
| 317 |
+
is_map_entry = _IsMapField(field_descriptor)
|
| 318 |
+
helper_decoders = {}
|
| 319 |
+
|
| 320 |
+
def AddDecoder(is_packed):
|
| 321 |
+
decode_type = field_descriptor.type
|
| 322 |
+
if (decode_type == _FieldDescriptor.TYPE_ENUM and
|
| 323 |
+
not field_descriptor.enum_type.is_closed):
|
| 324 |
+
decode_type = _FieldDescriptor.TYPE_INT32
|
| 325 |
+
|
| 326 |
+
oneof_descriptor = None
|
| 327 |
+
if field_descriptor.containing_oneof is not None:
|
| 328 |
+
oneof_descriptor = field_descriptor
|
| 329 |
+
|
| 330 |
+
if is_map_entry:
|
| 331 |
+
is_message_map = _IsMessageMapField(field_descriptor)
|
| 332 |
+
|
| 333 |
+
field_decoder = decoder.MapDecoder(
|
| 334 |
+
field_descriptor, _GetInitializeDefaultForMap(field_descriptor),
|
| 335 |
+
is_message_map)
|
| 336 |
+
elif decode_type == _FieldDescriptor.TYPE_STRING:
|
| 337 |
+
field_decoder = decoder.StringDecoder(
|
| 338 |
+
field_descriptor.number, is_repeated, is_packed,
|
| 339 |
+
field_descriptor, field_descriptor._default_constructor,
|
| 340 |
+
not field_descriptor.has_presence)
|
| 341 |
+
elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 342 |
+
field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
|
| 343 |
+
field_descriptor.number, is_repeated, is_packed,
|
| 344 |
+
field_descriptor, field_descriptor._default_constructor)
|
| 345 |
+
else:
|
| 346 |
+
field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
|
| 347 |
+
field_descriptor.number, is_repeated, is_packed,
|
| 348 |
+
# pylint: disable=protected-access
|
| 349 |
+
field_descriptor, field_descriptor._default_constructor,
|
| 350 |
+
not field_descriptor.has_presence)
|
| 351 |
+
|
| 352 |
+
helper_decoders[is_packed] = field_decoder
|
| 353 |
+
|
| 354 |
+
AddDecoder(False)
|
| 355 |
+
|
| 356 |
+
if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
|
| 357 |
+
# To support wire compatibility of adding packed = true, add a decoder for
|
| 358 |
+
# packed values regardless of the field's options.
|
| 359 |
+
AddDecoder(True)
|
| 360 |
+
|
| 361 |
+
field_descriptor._decoders = helper_decoders
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
|
| 365 |
+
extensions = descriptor.extensions_by_name
|
| 366 |
+
for extension_name, extension_field in extensions.items():
|
| 367 |
+
assert extension_name not in dictionary
|
| 368 |
+
dictionary[extension_name] = extension_field
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
def _AddEnumValues(descriptor, cls):
|
| 372 |
+
"""Sets class-level attributes for all enum fields defined in this message.
|
| 373 |
+
|
| 374 |
+
Also exporting a class-level object that can name enum values.
|
| 375 |
+
|
| 376 |
+
Args:
|
| 377 |
+
descriptor: Descriptor object for this message type.
|
| 378 |
+
cls: Class we're constructing for this message type.
|
| 379 |
+
"""
|
| 380 |
+
for enum_type in descriptor.enum_types:
|
| 381 |
+
setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
|
| 382 |
+
for enum_value in enum_type.values:
|
| 383 |
+
setattr(cls, enum_value.name, enum_value.number)
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
def _GetInitializeDefaultForMap(field):
|
| 387 |
+
if field.label != _FieldDescriptor.LABEL_REPEATED:
|
| 388 |
+
raise ValueError('map_entry set on non-repeated field %s' % (
|
| 389 |
+
field.name))
|
| 390 |
+
fields_by_name = field.message_type.fields_by_name
|
| 391 |
+
key_checker = type_checkers.GetTypeChecker(fields_by_name['key'])
|
| 392 |
+
|
| 393 |
+
value_field = fields_by_name['value']
|
| 394 |
+
if _IsMessageMapField(field):
|
| 395 |
+
def MakeMessageMapDefault(message):
|
| 396 |
+
return containers.MessageMap(
|
| 397 |
+
message._listener_for_children, value_field.message_type, key_checker,
|
| 398 |
+
field.message_type)
|
| 399 |
+
return MakeMessageMapDefault
|
| 400 |
+
else:
|
| 401 |
+
value_checker = type_checkers.GetTypeChecker(value_field)
|
| 402 |
+
def MakePrimitiveMapDefault(message):
|
| 403 |
+
return containers.ScalarMap(
|
| 404 |
+
message._listener_for_children, key_checker, value_checker,
|
| 405 |
+
field.message_type)
|
| 406 |
+
return MakePrimitiveMapDefault
|
| 407 |
+
|
| 408 |
+
def _DefaultValueConstructorForField(field):
|
| 409 |
+
"""Returns a function which returns a default value for a field.
|
| 410 |
+
|
| 411 |
+
Args:
|
| 412 |
+
field: FieldDescriptor object for this field.
|
| 413 |
+
|
| 414 |
+
The returned function has one argument:
|
| 415 |
+
message: Message instance containing this field, or a weakref proxy
|
| 416 |
+
of same.
|
| 417 |
+
|
| 418 |
+
That function in turn returns a default value for this field. The default
|
| 419 |
+
value may refer back to |message| via a weak reference.
|
| 420 |
+
"""
|
| 421 |
+
|
| 422 |
+
if _IsMapField(field):
|
| 423 |
+
return _GetInitializeDefaultForMap(field)
|
| 424 |
+
|
| 425 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 426 |
+
if field.has_default_value and field.default_value != []:
|
| 427 |
+
raise ValueError('Repeated field default value not empty list: %s' % (
|
| 428 |
+
field.default_value))
|
| 429 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 430 |
+
# We can't look at _concrete_class yet since it might not have
|
| 431 |
+
# been set. (Depends on order in which we initialize the classes).
|
| 432 |
+
message_type = field.message_type
|
| 433 |
+
def MakeRepeatedMessageDefault(message):
|
| 434 |
+
return containers.RepeatedCompositeFieldContainer(
|
| 435 |
+
message._listener_for_children, field.message_type)
|
| 436 |
+
return MakeRepeatedMessageDefault
|
| 437 |
+
else:
|
| 438 |
+
type_checker = type_checkers.GetTypeChecker(field)
|
| 439 |
+
def MakeRepeatedScalarDefault(message):
|
| 440 |
+
return containers.RepeatedScalarFieldContainer(
|
| 441 |
+
message._listener_for_children, type_checker)
|
| 442 |
+
return MakeRepeatedScalarDefault
|
| 443 |
+
|
| 444 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 445 |
+
message_type = field.message_type
|
| 446 |
+
def MakeSubMessageDefault(message):
|
| 447 |
+
# _concrete_class may not yet be initialized.
|
| 448 |
+
if not hasattr(message_type, '_concrete_class'):
|
| 449 |
+
from google.protobuf import message_factory
|
| 450 |
+
message_factory.GetMessageClass(message_type)
|
| 451 |
+
result = message_type._concrete_class()
|
| 452 |
+
result._SetListener(
|
| 453 |
+
_OneofListener(message, field)
|
| 454 |
+
if field.containing_oneof is not None
|
| 455 |
+
else message._listener_for_children)
|
| 456 |
+
return result
|
| 457 |
+
return MakeSubMessageDefault
|
| 458 |
+
|
| 459 |
+
def MakeScalarDefault(message):
|
| 460 |
+
# TODO: This may be broken since there may not be
|
| 461 |
+
# default_value. Combine with has_default_value somehow.
|
| 462 |
+
return field.default_value
|
| 463 |
+
return MakeScalarDefault
|
| 464 |
+
|
| 465 |
+
|
| 466 |
+
def _ReraiseTypeErrorWithFieldName(message_name, field_name):
|
| 467 |
+
"""Re-raise the currently-handled TypeError with the field name added."""
|
| 468 |
+
exc = sys.exc_info()[1]
|
| 469 |
+
if len(exc.args) == 1 and type(exc) is TypeError:
|
| 470 |
+
# simple TypeError; add field name to exception message
|
| 471 |
+
exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name))
|
| 472 |
+
|
| 473 |
+
# re-raise possibly-amended exception with original traceback:
|
| 474 |
+
raise exc.with_traceback(sys.exc_info()[2])
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
def _AddInitMethod(message_descriptor, cls):
|
| 478 |
+
"""Adds an __init__ method to cls."""
|
| 479 |
+
|
| 480 |
+
def _GetIntegerEnumValue(enum_type, value):
|
| 481 |
+
"""Convert a string or integer enum value to an integer.
|
| 482 |
+
|
| 483 |
+
If the value is a string, it is converted to the enum value in
|
| 484 |
+
enum_type with the same name. If the value is not a string, it's
|
| 485 |
+
returned as-is. (No conversion or bounds-checking is done.)
|
| 486 |
+
"""
|
| 487 |
+
if isinstance(value, str):
|
| 488 |
+
try:
|
| 489 |
+
return enum_type.values_by_name[value].number
|
| 490 |
+
except KeyError:
|
| 491 |
+
raise ValueError('Enum type %s: unknown label "%s"' % (
|
| 492 |
+
enum_type.full_name, value))
|
| 493 |
+
return value
|
| 494 |
+
|
| 495 |
+
def init(self, **kwargs):
|
| 496 |
+
self._cached_byte_size = 0
|
| 497 |
+
self._cached_byte_size_dirty = len(kwargs) > 0
|
| 498 |
+
self._fields = {}
|
| 499 |
+
# Contains a mapping from oneof field descriptors to the descriptor
|
| 500 |
+
# of the currently set field in that oneof field.
|
| 501 |
+
self._oneofs = {}
|
| 502 |
+
|
| 503 |
+
# _unknown_fields is () when empty for efficiency, and will be turned into
|
| 504 |
+
# a list if fields are added.
|
| 505 |
+
self._unknown_fields = ()
|
| 506 |
+
# _unknown_field_set is None when empty for efficiency, and will be
|
| 507 |
+
# turned into UnknownFieldSet struct if fields are added.
|
| 508 |
+
self._unknown_field_set = None # pylint: disable=protected-access
|
| 509 |
+
self._is_present_in_parent = False
|
| 510 |
+
self._listener = message_listener_mod.NullMessageListener()
|
| 511 |
+
self._listener_for_children = _Listener(self)
|
| 512 |
+
for field_name, field_value in kwargs.items():
|
| 513 |
+
field = _GetFieldByName(message_descriptor, field_name)
|
| 514 |
+
if field is None:
|
| 515 |
+
raise TypeError('%s() got an unexpected keyword argument "%s"' %
|
| 516 |
+
(message_descriptor.name, field_name))
|
| 517 |
+
if field_value is None:
|
| 518 |
+
# field=None is the same as no field at all.
|
| 519 |
+
continue
|
| 520 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 521 |
+
copy = field._default_constructor(self)
|
| 522 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite
|
| 523 |
+
if _IsMapField(field):
|
| 524 |
+
if _IsMessageMapField(field):
|
| 525 |
+
for key in field_value:
|
| 526 |
+
copy[key].MergeFrom(field_value[key])
|
| 527 |
+
else:
|
| 528 |
+
copy.update(field_value)
|
| 529 |
+
else:
|
| 530 |
+
for val in field_value:
|
| 531 |
+
if isinstance(val, dict):
|
| 532 |
+
copy.add(**val)
|
| 533 |
+
else:
|
| 534 |
+
copy.add().MergeFrom(val)
|
| 535 |
+
else: # Scalar
|
| 536 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
|
| 537 |
+
field_value = [_GetIntegerEnumValue(field.enum_type, val)
|
| 538 |
+
for val in field_value]
|
| 539 |
+
copy.extend(field_value)
|
| 540 |
+
self._fields[field] = copy
|
| 541 |
+
elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 542 |
+
copy = field._default_constructor(self)
|
| 543 |
+
new_val = field_value
|
| 544 |
+
if isinstance(field_value, dict):
|
| 545 |
+
new_val = field.message_type._concrete_class(**field_value)
|
| 546 |
+
try:
|
| 547 |
+
copy.MergeFrom(new_val)
|
| 548 |
+
except TypeError:
|
| 549 |
+
_ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
|
| 550 |
+
self._fields[field] = copy
|
| 551 |
+
else:
|
| 552 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
|
| 553 |
+
field_value = _GetIntegerEnumValue(field.enum_type, field_value)
|
| 554 |
+
try:
|
| 555 |
+
setattr(self, field_name, field_value)
|
| 556 |
+
except TypeError:
|
| 557 |
+
_ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
|
| 558 |
+
|
| 559 |
+
init.__module__ = None
|
| 560 |
+
init.__doc__ = None
|
| 561 |
+
cls.__init__ = init
|
| 562 |
+
|
| 563 |
+
|
| 564 |
+
def _GetFieldByName(message_descriptor, field_name):
|
| 565 |
+
"""Returns a field descriptor by field name.
|
| 566 |
+
|
| 567 |
+
Args:
|
| 568 |
+
message_descriptor: A Descriptor describing all fields in message.
|
| 569 |
+
field_name: The name of the field to retrieve.
|
| 570 |
+
Returns:
|
| 571 |
+
The field descriptor associated with the field name.
|
| 572 |
+
"""
|
| 573 |
+
try:
|
| 574 |
+
return message_descriptor.fields_by_name[field_name]
|
| 575 |
+
except KeyError:
|
| 576 |
+
raise ValueError('Protocol message %s has no "%s" field.' %
|
| 577 |
+
(message_descriptor.name, field_name))
|
| 578 |
+
|
| 579 |
+
|
| 580 |
+
def _AddPropertiesForFields(descriptor, cls):
|
| 581 |
+
"""Adds properties for all fields in this protocol message type."""
|
| 582 |
+
for field in descriptor.fields:
|
| 583 |
+
_AddPropertiesForField(field, cls)
|
| 584 |
+
|
| 585 |
+
if descriptor.is_extendable:
|
| 586 |
+
# _ExtensionDict is just an adaptor with no state so we allocate a new one
|
| 587 |
+
# every time it is accessed.
|
| 588 |
+
cls.Extensions = property(lambda self: _ExtensionDict(self))
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
def _AddPropertiesForField(field, cls):
|
| 592 |
+
"""Adds a public property for a protocol message field.
|
| 593 |
+
Clients can use this property to get and (in the case
|
| 594 |
+
of non-repeated scalar fields) directly set the value
|
| 595 |
+
of a protocol message field.
|
| 596 |
+
|
| 597 |
+
Args:
|
| 598 |
+
field: A FieldDescriptor for this field.
|
| 599 |
+
cls: The class we're constructing.
|
| 600 |
+
"""
|
| 601 |
+
# Catch it if we add other types that we should
|
| 602 |
+
# handle specially here.
|
| 603 |
+
assert _FieldDescriptor.MAX_CPPTYPE == 10
|
| 604 |
+
|
| 605 |
+
constant_name = field.name.upper() + '_FIELD_NUMBER'
|
| 606 |
+
setattr(cls, constant_name, field.number)
|
| 607 |
+
|
| 608 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 609 |
+
_AddPropertiesForRepeatedField(field, cls)
|
| 610 |
+
elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 611 |
+
_AddPropertiesForNonRepeatedCompositeField(field, cls)
|
| 612 |
+
else:
|
| 613 |
+
_AddPropertiesForNonRepeatedScalarField(field, cls)
|
| 614 |
+
|
| 615 |
+
|
| 616 |
+
class _FieldProperty(property):
|
| 617 |
+
__slots__ = ('DESCRIPTOR',)
|
| 618 |
+
|
| 619 |
+
def __init__(self, descriptor, getter, setter, doc):
|
| 620 |
+
property.__init__(self, getter, setter, doc=doc)
|
| 621 |
+
self.DESCRIPTOR = descriptor
|
| 622 |
+
|
| 623 |
+
|
| 624 |
+
def _AddPropertiesForRepeatedField(field, cls):
|
| 625 |
+
"""Adds a public property for a "repeated" protocol message field. Clients
|
| 626 |
+
can use this property to get the value of the field, which will be either a
|
| 627 |
+
RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see
|
| 628 |
+
below).
|
| 629 |
+
|
| 630 |
+
Note that when clients add values to these containers, we perform
|
| 631 |
+
type-checking in the case of repeated scalar fields, and we also set any
|
| 632 |
+
necessary "has" bits as a side-effect.
|
| 633 |
+
|
| 634 |
+
Args:
|
| 635 |
+
field: A FieldDescriptor for this field.
|
| 636 |
+
cls: The class we're constructing.
|
| 637 |
+
"""
|
| 638 |
+
proto_field_name = field.name
|
| 639 |
+
property_name = _PropertyName(proto_field_name)
|
| 640 |
+
|
| 641 |
+
def getter(self):
|
| 642 |
+
field_value = self._fields.get(field)
|
| 643 |
+
if field_value is None:
|
| 644 |
+
# Construct a new object to represent this field.
|
| 645 |
+
field_value = field._default_constructor(self)
|
| 646 |
+
|
| 647 |
+
# Atomically check if another thread has preempted us and, if not, swap
|
| 648 |
+
# in the new object we just created. If someone has preempted us, we
|
| 649 |
+
# take that object and discard ours.
|
| 650 |
+
# WARNING: We are relying on setdefault() being atomic. This is true
|
| 651 |
+
# in CPython but we haven't investigated others. This warning appears
|
| 652 |
+
# in several other locations in this file.
|
| 653 |
+
field_value = self._fields.setdefault(field, field_value)
|
| 654 |
+
return field_value
|
| 655 |
+
getter.__module__ = None
|
| 656 |
+
getter.__doc__ = 'Getter for %s.' % proto_field_name
|
| 657 |
+
|
| 658 |
+
# We define a setter just so we can throw an exception with a more
|
| 659 |
+
# helpful error message.
|
| 660 |
+
def setter(self, new_value):
|
| 661 |
+
raise AttributeError('Assignment not allowed to repeated field '
|
| 662 |
+
'"%s" in protocol message object.' % proto_field_name)
|
| 663 |
+
|
| 664 |
+
doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
|
| 665 |
+
setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
|
| 666 |
+
|
| 667 |
+
|
| 668 |
+
def _AddPropertiesForNonRepeatedScalarField(field, cls):
|
| 669 |
+
"""Adds a public property for a nonrepeated, scalar protocol message field.
|
| 670 |
+
Clients can use this property to get and directly set the value of the field.
|
| 671 |
+
Note that when the client sets the value of a field by using this property,
|
| 672 |
+
all necessary "has" bits are set as a side-effect, and we also perform
|
| 673 |
+
type-checking.
|
| 674 |
+
|
| 675 |
+
Args:
|
| 676 |
+
field: A FieldDescriptor for this field.
|
| 677 |
+
cls: The class we're constructing.
|
| 678 |
+
"""
|
| 679 |
+
proto_field_name = field.name
|
| 680 |
+
property_name = _PropertyName(proto_field_name)
|
| 681 |
+
type_checker = type_checkers.GetTypeChecker(field)
|
| 682 |
+
default_value = field.default_value
|
| 683 |
+
|
| 684 |
+
def getter(self):
|
| 685 |
+
# TODO: This may be broken since there may not be
|
| 686 |
+
# default_value. Combine with has_default_value somehow.
|
| 687 |
+
return self._fields.get(field, default_value)
|
| 688 |
+
getter.__module__ = None
|
| 689 |
+
getter.__doc__ = 'Getter for %s.' % proto_field_name
|
| 690 |
+
|
| 691 |
+
def field_setter(self, new_value):
|
| 692 |
+
# pylint: disable=protected-access
|
| 693 |
+
# Testing the value for truthiness captures all of the proto3 defaults
|
| 694 |
+
# (0, 0.0, enum 0, and False).
|
| 695 |
+
try:
|
| 696 |
+
new_value = type_checker.CheckValue(new_value)
|
| 697 |
+
except TypeError as e:
|
| 698 |
+
raise TypeError(
|
| 699 |
+
'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e))
|
| 700 |
+
if not field.has_presence and not new_value:
|
| 701 |
+
self._fields.pop(field, None)
|
| 702 |
+
else:
|
| 703 |
+
self._fields[field] = new_value
|
| 704 |
+
# Check _cached_byte_size_dirty inline to improve performance, since scalar
|
| 705 |
+
# setters are called frequently.
|
| 706 |
+
if not self._cached_byte_size_dirty:
|
| 707 |
+
self._Modified()
|
| 708 |
+
|
| 709 |
+
if field.containing_oneof:
|
| 710 |
+
def setter(self, new_value):
|
| 711 |
+
field_setter(self, new_value)
|
| 712 |
+
self._UpdateOneofState(field)
|
| 713 |
+
else:
|
| 714 |
+
setter = field_setter
|
| 715 |
+
|
| 716 |
+
setter.__module__ = None
|
| 717 |
+
setter.__doc__ = 'Setter for %s.' % proto_field_name
|
| 718 |
+
|
| 719 |
+
# Add a property to encapsulate the getter/setter.
|
| 720 |
+
doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
|
| 721 |
+
setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
|
| 722 |
+
|
| 723 |
+
|
| 724 |
+
def _AddPropertiesForNonRepeatedCompositeField(field, cls):
|
| 725 |
+
"""Adds a public property for a nonrepeated, composite protocol message field.
|
| 726 |
+
A composite field is a "group" or "message" field.
|
| 727 |
+
|
| 728 |
+
Clients can use this property to get the value of the field, but cannot
|
| 729 |
+
assign to the property directly.
|
| 730 |
+
|
| 731 |
+
Args:
|
| 732 |
+
field: A FieldDescriptor for this field.
|
| 733 |
+
cls: The class we're constructing.
|
| 734 |
+
"""
|
| 735 |
+
# TODO: Remove duplication with similar method
|
| 736 |
+
# for non-repeated scalars.
|
| 737 |
+
proto_field_name = field.name
|
| 738 |
+
property_name = _PropertyName(proto_field_name)
|
| 739 |
+
|
| 740 |
+
def getter(self):
|
| 741 |
+
field_value = self._fields.get(field)
|
| 742 |
+
if field_value is None:
|
| 743 |
+
# Construct a new object to represent this field.
|
| 744 |
+
field_value = field._default_constructor(self)
|
| 745 |
+
|
| 746 |
+
# Atomically check if another thread has preempted us and, if not, swap
|
| 747 |
+
# in the new object we just created. If someone has preempted us, we
|
| 748 |
+
# take that object and discard ours.
|
| 749 |
+
# WARNING: We are relying on setdefault() being atomic. This is true
|
| 750 |
+
# in CPython but we haven't investigated others. This warning appears
|
| 751 |
+
# in several other locations in this file.
|
| 752 |
+
field_value = self._fields.setdefault(field, field_value)
|
| 753 |
+
return field_value
|
| 754 |
+
getter.__module__ = None
|
| 755 |
+
getter.__doc__ = 'Getter for %s.' % proto_field_name
|
| 756 |
+
|
| 757 |
+
# We define a setter just so we can throw an exception with a more
|
| 758 |
+
# helpful error message.
|
| 759 |
+
def setter(self, new_value):
|
| 760 |
+
raise AttributeError('Assignment not allowed to composite field '
|
| 761 |
+
'"%s" in protocol message object.' % proto_field_name)
|
| 762 |
+
|
| 763 |
+
# Add a property to encapsulate the getter.
|
| 764 |
+
doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
|
| 765 |
+
setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
|
| 766 |
+
|
| 767 |
+
|
| 768 |
+
def _AddPropertiesForExtensions(descriptor, cls):
|
| 769 |
+
"""Adds properties for all fields in this protocol message type."""
|
| 770 |
+
extensions = descriptor.extensions_by_name
|
| 771 |
+
for extension_name, extension_field in extensions.items():
|
| 772 |
+
constant_name = extension_name.upper() + '_FIELD_NUMBER'
|
| 773 |
+
setattr(cls, constant_name, extension_field.number)
|
| 774 |
+
|
| 775 |
+
# TODO: Migrate all users of these attributes to functions like
|
| 776 |
+
# pool.FindExtensionByNumber(descriptor).
|
| 777 |
+
if descriptor.file is not None:
|
| 778 |
+
# TODO: Use cls.MESSAGE_FACTORY.pool when available.
|
| 779 |
+
pool = descriptor.file.pool
|
| 780 |
+
|
| 781 |
+
def _AddStaticMethods(cls):
|
| 782 |
+
# TODO: This probably needs to be thread-safe(?)
|
| 783 |
+
def RegisterExtension(field_descriptor):
|
| 784 |
+
field_descriptor.containing_type = cls.DESCRIPTOR
|
| 785 |
+
# TODO: Use cls.MESSAGE_FACTORY.pool when available.
|
| 786 |
+
# pylint: disable=protected-access
|
| 787 |
+
cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(field_descriptor)
|
| 788 |
+
_AttachFieldHelpers(cls, field_descriptor)
|
| 789 |
+
cls.RegisterExtension = staticmethod(RegisterExtension)
|
| 790 |
+
|
| 791 |
+
def FromString(s):
|
| 792 |
+
message = cls()
|
| 793 |
+
message.MergeFromString(s)
|
| 794 |
+
return message
|
| 795 |
+
cls.FromString = staticmethod(FromString)
|
| 796 |
+
|
| 797 |
+
|
| 798 |
+
def _IsPresent(item):
|
| 799 |
+
"""Given a (FieldDescriptor, value) tuple from _fields, return true if the
|
| 800 |
+
value should be included in the list returned by ListFields()."""
|
| 801 |
+
|
| 802 |
+
if item[0].label == _FieldDescriptor.LABEL_REPEATED:
|
| 803 |
+
return bool(item[1])
|
| 804 |
+
elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 805 |
+
return item[1]._is_present_in_parent
|
| 806 |
+
else:
|
| 807 |
+
return True
|
| 808 |
+
|
| 809 |
+
|
| 810 |
+
def _AddListFieldsMethod(message_descriptor, cls):
|
| 811 |
+
"""Helper for _AddMessageMethods()."""
|
| 812 |
+
|
| 813 |
+
def ListFields(self):
|
| 814 |
+
all_fields = [item for item in self._fields.items() if _IsPresent(item)]
|
| 815 |
+
all_fields.sort(key = lambda item: item[0].number)
|
| 816 |
+
return all_fields
|
| 817 |
+
|
| 818 |
+
cls.ListFields = ListFields
|
| 819 |
+
|
| 820 |
+
|
| 821 |
+
def _AddHasFieldMethod(message_descriptor, cls):
|
| 822 |
+
"""Helper for _AddMessageMethods()."""
|
| 823 |
+
|
| 824 |
+
hassable_fields = {}
|
| 825 |
+
for field in message_descriptor.fields:
|
| 826 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 827 |
+
continue
|
| 828 |
+
# For proto3, only submessages and fields inside a oneof have presence.
|
| 829 |
+
if not field.has_presence:
|
| 830 |
+
continue
|
| 831 |
+
hassable_fields[field.name] = field
|
| 832 |
+
|
| 833 |
+
# Has methods are supported for oneof descriptors.
|
| 834 |
+
for oneof in message_descriptor.oneofs:
|
| 835 |
+
hassable_fields[oneof.name] = oneof
|
| 836 |
+
|
| 837 |
+
def HasField(self, field_name):
|
| 838 |
+
try:
|
| 839 |
+
field = hassable_fields[field_name]
|
| 840 |
+
except KeyError as exc:
|
| 841 |
+
raise ValueError('Protocol message %s has no non-repeated field "%s" '
|
| 842 |
+
'nor has presence is not available for this field.' % (
|
| 843 |
+
message_descriptor.full_name, field_name)) from exc
|
| 844 |
+
|
| 845 |
+
if isinstance(field, descriptor_mod.OneofDescriptor):
|
| 846 |
+
try:
|
| 847 |
+
return HasField(self, self._oneofs[field].name)
|
| 848 |
+
except KeyError:
|
| 849 |
+
return False
|
| 850 |
+
else:
|
| 851 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 852 |
+
value = self._fields.get(field)
|
| 853 |
+
return value is not None and value._is_present_in_parent
|
| 854 |
+
else:
|
| 855 |
+
return field in self._fields
|
| 856 |
+
|
| 857 |
+
cls.HasField = HasField
|
| 858 |
+
|
| 859 |
+
|
| 860 |
+
def _AddClearFieldMethod(message_descriptor, cls):
|
| 861 |
+
"""Helper for _AddMessageMethods()."""
|
| 862 |
+
def ClearField(self, field_name):
|
| 863 |
+
try:
|
| 864 |
+
field = message_descriptor.fields_by_name[field_name]
|
| 865 |
+
except KeyError:
|
| 866 |
+
try:
|
| 867 |
+
field = message_descriptor.oneofs_by_name[field_name]
|
| 868 |
+
if field in self._oneofs:
|
| 869 |
+
field = self._oneofs[field]
|
| 870 |
+
else:
|
| 871 |
+
return
|
| 872 |
+
except KeyError:
|
| 873 |
+
raise ValueError('Protocol message %s has no "%s" field.' %
|
| 874 |
+
(message_descriptor.name, field_name))
|
| 875 |
+
|
| 876 |
+
if field in self._fields:
|
| 877 |
+
# To match the C++ implementation, we need to invalidate iterators
|
| 878 |
+
# for map fields when ClearField() happens.
|
| 879 |
+
if hasattr(self._fields[field], 'InvalidateIterators'):
|
| 880 |
+
self._fields[field].InvalidateIterators()
|
| 881 |
+
|
| 882 |
+
# Note: If the field is a sub-message, its listener will still point
|
| 883 |
+
# at us. That's fine, because the worst than can happen is that it
|
| 884 |
+
# will call _Modified() and invalidate our byte size. Big deal.
|
| 885 |
+
del self._fields[field]
|
| 886 |
+
|
| 887 |
+
if self._oneofs.get(field.containing_oneof, None) is field:
|
| 888 |
+
del self._oneofs[field.containing_oneof]
|
| 889 |
+
|
| 890 |
+
# Always call _Modified() -- even if nothing was changed, this is
|
| 891 |
+
# a mutating method, and thus calling it should cause the field to become
|
| 892 |
+
# present in the parent message.
|
| 893 |
+
self._Modified()
|
| 894 |
+
|
| 895 |
+
cls.ClearField = ClearField
|
| 896 |
+
|
| 897 |
+
|
| 898 |
+
def _AddClearExtensionMethod(cls):
|
| 899 |
+
"""Helper for _AddMessageMethods()."""
|
| 900 |
+
def ClearExtension(self, field_descriptor):
|
| 901 |
+
extension_dict._VerifyExtensionHandle(self, field_descriptor)
|
| 902 |
+
|
| 903 |
+
# Similar to ClearField(), above.
|
| 904 |
+
if field_descriptor in self._fields:
|
| 905 |
+
del self._fields[field_descriptor]
|
| 906 |
+
self._Modified()
|
| 907 |
+
cls.ClearExtension = ClearExtension
|
| 908 |
+
|
| 909 |
+
|
| 910 |
+
def _AddHasExtensionMethod(cls):
|
| 911 |
+
"""Helper for _AddMessageMethods()."""
|
| 912 |
+
def HasExtension(self, field_descriptor):
|
| 913 |
+
extension_dict._VerifyExtensionHandle(self, field_descriptor)
|
| 914 |
+
if field_descriptor.label == _FieldDescriptor.LABEL_REPEATED:
|
| 915 |
+
raise KeyError('"%s" is repeated.' % field_descriptor.full_name)
|
| 916 |
+
|
| 917 |
+
if field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 918 |
+
value = self._fields.get(field_descriptor)
|
| 919 |
+
return value is not None and value._is_present_in_parent
|
| 920 |
+
else:
|
| 921 |
+
return field_descriptor in self._fields
|
| 922 |
+
cls.HasExtension = HasExtension
|
| 923 |
+
|
| 924 |
+
def _InternalUnpackAny(msg):
|
| 925 |
+
"""Unpacks Any message and returns the unpacked message.
|
| 926 |
+
|
| 927 |
+
This internal method is different from public Any Unpack method which takes
|
| 928 |
+
the target message as argument. _InternalUnpackAny method does not have
|
| 929 |
+
target message type and need to find the message type in descriptor pool.
|
| 930 |
+
|
| 931 |
+
Args:
|
| 932 |
+
msg: An Any message to be unpacked.
|
| 933 |
+
|
| 934 |
+
Returns:
|
| 935 |
+
The unpacked message.
|
| 936 |
+
"""
|
| 937 |
+
# TODO: Don't use the factory of generated messages.
|
| 938 |
+
# To make Any work with custom factories, use the message factory of the
|
| 939 |
+
# parent message.
|
| 940 |
+
# pylint: disable=g-import-not-at-top
|
| 941 |
+
from google.protobuf import symbol_database
|
| 942 |
+
factory = symbol_database.Default()
|
| 943 |
+
|
| 944 |
+
type_url = msg.type_url
|
| 945 |
+
|
| 946 |
+
if not type_url:
|
| 947 |
+
return None
|
| 948 |
+
|
| 949 |
+
# TODO: For now we just strip the hostname. Better logic will be
|
| 950 |
+
# required.
|
| 951 |
+
type_name = type_url.split('/')[-1]
|
| 952 |
+
descriptor = factory.pool.FindMessageTypeByName(type_name)
|
| 953 |
+
|
| 954 |
+
if descriptor is None:
|
| 955 |
+
return None
|
| 956 |
+
|
| 957 |
+
message_class = factory.GetPrototype(descriptor)
|
| 958 |
+
message = message_class()
|
| 959 |
+
|
| 960 |
+
message.ParseFromString(msg.value)
|
| 961 |
+
return message
|
| 962 |
+
|
| 963 |
+
|
| 964 |
+
def _AddEqualsMethod(message_descriptor, cls):
|
| 965 |
+
"""Helper for _AddMessageMethods()."""
|
| 966 |
+
def __eq__(self, other):
|
| 967 |
+
if (not isinstance(other, message_mod.Message) or
|
| 968 |
+
other.DESCRIPTOR != self.DESCRIPTOR):
|
| 969 |
+
return NotImplemented
|
| 970 |
+
|
| 971 |
+
if self is other:
|
| 972 |
+
return True
|
| 973 |
+
|
| 974 |
+
if self.DESCRIPTOR.full_name == _AnyFullTypeName:
|
| 975 |
+
any_a = _InternalUnpackAny(self)
|
| 976 |
+
any_b = _InternalUnpackAny(other)
|
| 977 |
+
if any_a and any_b:
|
| 978 |
+
return any_a == any_b
|
| 979 |
+
|
| 980 |
+
if not self.ListFields() == other.ListFields():
|
| 981 |
+
return False
|
| 982 |
+
|
| 983 |
+
# TODO: Fix UnknownFieldSet to consider MessageSet extensions,
|
| 984 |
+
# then use it for the comparison.
|
| 985 |
+
unknown_fields = list(self._unknown_fields)
|
| 986 |
+
unknown_fields.sort()
|
| 987 |
+
other_unknown_fields = list(other._unknown_fields)
|
| 988 |
+
other_unknown_fields.sort()
|
| 989 |
+
return unknown_fields == other_unknown_fields
|
| 990 |
+
|
| 991 |
+
cls.__eq__ = __eq__
|
| 992 |
+
|
| 993 |
+
|
| 994 |
+
def _AddStrMethod(message_descriptor, cls):
|
| 995 |
+
"""Helper for _AddMessageMethods()."""
|
| 996 |
+
def __str__(self):
|
| 997 |
+
return text_format.MessageToString(self)
|
| 998 |
+
cls.__str__ = __str__
|
| 999 |
+
|
| 1000 |
+
|
| 1001 |
+
def _AddReprMethod(message_descriptor, cls):
|
| 1002 |
+
"""Helper for _AddMessageMethods()."""
|
| 1003 |
+
def __repr__(self):
|
| 1004 |
+
return text_format.MessageToString(self)
|
| 1005 |
+
cls.__repr__ = __repr__
|
| 1006 |
+
|
| 1007 |
+
|
| 1008 |
+
def _AddUnicodeMethod(unused_message_descriptor, cls):
|
| 1009 |
+
"""Helper for _AddMessageMethods()."""
|
| 1010 |
+
|
| 1011 |
+
def __unicode__(self):
|
| 1012 |
+
return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
|
| 1013 |
+
cls.__unicode__ = __unicode__
|
| 1014 |
+
|
| 1015 |
+
|
| 1016 |
+
def _BytesForNonRepeatedElement(value, field_number, field_type):
|
| 1017 |
+
"""Returns the number of bytes needed to serialize a non-repeated element.
|
| 1018 |
+
The returned byte count includes space for tag information and any
|
| 1019 |
+
other additional space associated with serializing value.
|
| 1020 |
+
|
| 1021 |
+
Args:
|
| 1022 |
+
value: Value we're serializing.
|
| 1023 |
+
field_number: Field number of this value. (Since the field number
|
| 1024 |
+
is stored as part of a varint-encoded tag, this has an impact
|
| 1025 |
+
on the total bytes required to serialize the value).
|
| 1026 |
+
field_type: The type of the field. One of the TYPE_* constants
|
| 1027 |
+
within FieldDescriptor.
|
| 1028 |
+
"""
|
| 1029 |
+
try:
|
| 1030 |
+
fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
|
| 1031 |
+
return fn(field_number, value)
|
| 1032 |
+
except KeyError:
|
| 1033 |
+
raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
|
| 1034 |
+
|
| 1035 |
+
|
| 1036 |
+
def _AddByteSizeMethod(message_descriptor, cls):
|
| 1037 |
+
"""Helper for _AddMessageMethods()."""
|
| 1038 |
+
|
| 1039 |
+
def ByteSize(self):
|
| 1040 |
+
if not self._cached_byte_size_dirty:
|
| 1041 |
+
return self._cached_byte_size
|
| 1042 |
+
|
| 1043 |
+
size = 0
|
| 1044 |
+
descriptor = self.DESCRIPTOR
|
| 1045 |
+
if descriptor._is_map_entry:
|
| 1046 |
+
# Fields of map entry should always be serialized.
|
| 1047 |
+
key_field = descriptor.fields_by_name['key']
|
| 1048 |
+
_MaybeAddEncoder(cls, key_field)
|
| 1049 |
+
size = key_field._sizer(self.key)
|
| 1050 |
+
value_field = descriptor.fields_by_name['value']
|
| 1051 |
+
_MaybeAddEncoder(cls, value_field)
|
| 1052 |
+
size += value_field._sizer(self.value)
|
| 1053 |
+
else:
|
| 1054 |
+
for field_descriptor, field_value in self.ListFields():
|
| 1055 |
+
_MaybeAddEncoder(cls, field_descriptor)
|
| 1056 |
+
size += field_descriptor._sizer(field_value)
|
| 1057 |
+
for tag_bytes, value_bytes in self._unknown_fields:
|
| 1058 |
+
size += len(tag_bytes) + len(value_bytes)
|
| 1059 |
+
|
| 1060 |
+
self._cached_byte_size = size
|
| 1061 |
+
self._cached_byte_size_dirty = False
|
| 1062 |
+
self._listener_for_children.dirty = False
|
| 1063 |
+
return size
|
| 1064 |
+
|
| 1065 |
+
cls.ByteSize = ByteSize
|
| 1066 |
+
|
| 1067 |
+
|
| 1068 |
+
def _AddSerializeToStringMethod(message_descriptor, cls):
|
| 1069 |
+
"""Helper for _AddMessageMethods()."""
|
| 1070 |
+
|
| 1071 |
+
def SerializeToString(self, **kwargs):
|
| 1072 |
+
# Check if the message has all of its required fields set.
|
| 1073 |
+
if not self.IsInitialized():
|
| 1074 |
+
raise message_mod.EncodeError(
|
| 1075 |
+
'Message %s is missing required fields: %s' % (
|
| 1076 |
+
self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
|
| 1077 |
+
return self.SerializePartialToString(**kwargs)
|
| 1078 |
+
cls.SerializeToString = SerializeToString
|
| 1079 |
+
|
| 1080 |
+
|
| 1081 |
+
def _AddSerializePartialToStringMethod(message_descriptor, cls):
|
| 1082 |
+
"""Helper for _AddMessageMethods()."""
|
| 1083 |
+
|
| 1084 |
+
def SerializePartialToString(self, **kwargs):
|
| 1085 |
+
out = BytesIO()
|
| 1086 |
+
self._InternalSerialize(out.write, **kwargs)
|
| 1087 |
+
return out.getvalue()
|
| 1088 |
+
cls.SerializePartialToString = SerializePartialToString
|
| 1089 |
+
|
| 1090 |
+
def InternalSerialize(self, write_bytes, deterministic=None):
|
| 1091 |
+
if deterministic is None:
|
| 1092 |
+
deterministic = (
|
| 1093 |
+
api_implementation.IsPythonDefaultSerializationDeterministic())
|
| 1094 |
+
else:
|
| 1095 |
+
deterministic = bool(deterministic)
|
| 1096 |
+
|
| 1097 |
+
descriptor = self.DESCRIPTOR
|
| 1098 |
+
if descriptor._is_map_entry:
|
| 1099 |
+
# Fields of map entry should always be serialized.
|
| 1100 |
+
key_field = descriptor.fields_by_name['key']
|
| 1101 |
+
_MaybeAddEncoder(cls, key_field)
|
| 1102 |
+
key_field._encoder(write_bytes, self.key, deterministic)
|
| 1103 |
+
value_field = descriptor.fields_by_name['value']
|
| 1104 |
+
_MaybeAddEncoder(cls, value_field)
|
| 1105 |
+
value_field._encoder(write_bytes, self.value, deterministic)
|
| 1106 |
+
else:
|
| 1107 |
+
for field_descriptor, field_value in self.ListFields():
|
| 1108 |
+
_MaybeAddEncoder(cls, field_descriptor)
|
| 1109 |
+
field_descriptor._encoder(write_bytes, field_value, deterministic)
|
| 1110 |
+
for tag_bytes, value_bytes in self._unknown_fields:
|
| 1111 |
+
write_bytes(tag_bytes)
|
| 1112 |
+
write_bytes(value_bytes)
|
| 1113 |
+
cls._InternalSerialize = InternalSerialize
|
| 1114 |
+
|
| 1115 |
+
|
| 1116 |
+
def _AddMergeFromStringMethod(message_descriptor, cls):
|
| 1117 |
+
"""Helper for _AddMessageMethods()."""
|
| 1118 |
+
def MergeFromString(self, serialized):
|
| 1119 |
+
serialized = memoryview(serialized)
|
| 1120 |
+
length = len(serialized)
|
| 1121 |
+
try:
|
| 1122 |
+
if self._InternalParse(serialized, 0, length) != length:
|
| 1123 |
+
# The only reason _InternalParse would return early is if it
|
| 1124 |
+
# encountered an end-group tag.
|
| 1125 |
+
raise message_mod.DecodeError('Unexpected end-group tag.')
|
| 1126 |
+
except (IndexError, TypeError):
|
| 1127 |
+
# Now ord(buf[p:p+1]) == ord('') gets TypeError.
|
| 1128 |
+
raise message_mod.DecodeError('Truncated message.')
|
| 1129 |
+
except struct.error as e:
|
| 1130 |
+
raise message_mod.DecodeError(e)
|
| 1131 |
+
return length # Return this for legacy reasons.
|
| 1132 |
+
cls.MergeFromString = MergeFromString
|
| 1133 |
+
|
| 1134 |
+
local_ReadTag = decoder.ReadTag
|
| 1135 |
+
local_SkipField = decoder.SkipField
|
| 1136 |
+
fields_by_tag = cls._fields_by_tag
|
| 1137 |
+
message_set_decoders_by_tag = cls._message_set_decoders_by_tag
|
| 1138 |
+
|
| 1139 |
+
def InternalParse(self, buffer, pos, end):
|
| 1140 |
+
"""Create a message from serialized bytes.
|
| 1141 |
+
|
| 1142 |
+
Args:
|
| 1143 |
+
self: Message, instance of the proto message object.
|
| 1144 |
+
buffer: memoryview of the serialized data.
|
| 1145 |
+
pos: int, position to start in the serialized data.
|
| 1146 |
+
end: int, end position of the serialized data.
|
| 1147 |
+
|
| 1148 |
+
Returns:
|
| 1149 |
+
Message object.
|
| 1150 |
+
"""
|
| 1151 |
+
# Guard against internal misuse, since this function is called internally
|
| 1152 |
+
# quite extensively, and its easy to accidentally pass bytes.
|
| 1153 |
+
assert isinstance(buffer, memoryview)
|
| 1154 |
+
self._Modified()
|
| 1155 |
+
field_dict = self._fields
|
| 1156 |
+
# pylint: disable=protected-access
|
| 1157 |
+
unknown_field_set = self._unknown_field_set
|
| 1158 |
+
while pos != end:
|
| 1159 |
+
(tag_bytes, new_pos) = local_ReadTag(buffer, pos)
|
| 1160 |
+
field_decoder, field_des = message_set_decoders_by_tag.get(
|
| 1161 |
+
tag_bytes, (None, None)
|
| 1162 |
+
)
|
| 1163 |
+
if field_decoder:
|
| 1164 |
+
pos = field_decoder(buffer, new_pos, end, self, field_dict)
|
| 1165 |
+
continue
|
| 1166 |
+
field_des, is_packed = fields_by_tag.get(tag_bytes, (None, None))
|
| 1167 |
+
if field_des is None:
|
| 1168 |
+
if not self._unknown_fields: # pylint: disable=protected-access
|
| 1169 |
+
self._unknown_fields = [] # pylint: disable=protected-access
|
| 1170 |
+
if unknown_field_set is None:
|
| 1171 |
+
# pylint: disable=protected-access
|
| 1172 |
+
self._unknown_field_set = containers.UnknownFieldSet()
|
| 1173 |
+
# pylint: disable=protected-access
|
| 1174 |
+
unknown_field_set = self._unknown_field_set
|
| 1175 |
+
# pylint: disable=protected-access
|
| 1176 |
+
(tag, _) = decoder._DecodeVarint(tag_bytes, 0)
|
| 1177 |
+
field_number, wire_type = wire_format.UnpackTag(tag)
|
| 1178 |
+
if field_number == 0:
|
| 1179 |
+
raise message_mod.DecodeError('Field number 0 is illegal.')
|
| 1180 |
+
# TODO: remove old_pos.
|
| 1181 |
+
old_pos = new_pos
|
| 1182 |
+
(data, new_pos) = decoder._DecodeUnknownField(
|
| 1183 |
+
buffer, new_pos, wire_type) # pylint: disable=protected-access
|
| 1184 |
+
if new_pos == -1:
|
| 1185 |
+
return pos
|
| 1186 |
+
# pylint: disable=protected-access
|
| 1187 |
+
unknown_field_set._add(field_number, wire_type, data)
|
| 1188 |
+
# TODO: remove _unknown_fields.
|
| 1189 |
+
new_pos = local_SkipField(buffer, old_pos, end, tag_bytes)
|
| 1190 |
+
if new_pos == -1:
|
| 1191 |
+
return pos
|
| 1192 |
+
self._unknown_fields.append(
|
| 1193 |
+
(tag_bytes, buffer[old_pos:new_pos].tobytes()))
|
| 1194 |
+
pos = new_pos
|
| 1195 |
+
else:
|
| 1196 |
+
_MaybeAddDecoder(cls, field_des)
|
| 1197 |
+
field_decoder = field_des._decoders[is_packed]
|
| 1198 |
+
pos = field_decoder(buffer, new_pos, end, self, field_dict)
|
| 1199 |
+
if field_des.containing_oneof:
|
| 1200 |
+
self._UpdateOneofState(field_des)
|
| 1201 |
+
return pos
|
| 1202 |
+
cls._InternalParse = InternalParse
|
| 1203 |
+
|
| 1204 |
+
|
| 1205 |
+
def _AddIsInitializedMethod(message_descriptor, cls):
|
| 1206 |
+
"""Adds the IsInitialized and FindInitializationError methods to the
|
| 1207 |
+
protocol message class."""
|
| 1208 |
+
|
| 1209 |
+
required_fields = [field for field in message_descriptor.fields
|
| 1210 |
+
if field.label == _FieldDescriptor.LABEL_REQUIRED]
|
| 1211 |
+
|
| 1212 |
+
def IsInitialized(self, errors=None):
|
| 1213 |
+
"""Checks if all required fields of a message are set.
|
| 1214 |
+
|
| 1215 |
+
Args:
|
| 1216 |
+
errors: A list which, if provided, will be populated with the field
|
| 1217 |
+
paths of all missing required fields.
|
| 1218 |
+
|
| 1219 |
+
Returns:
|
| 1220 |
+
True iff the specified message has all required fields set.
|
| 1221 |
+
"""
|
| 1222 |
+
|
| 1223 |
+
# Performance is critical so we avoid HasField() and ListFields().
|
| 1224 |
+
|
| 1225 |
+
for field in required_fields:
|
| 1226 |
+
if (field not in self._fields or
|
| 1227 |
+
(field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
|
| 1228 |
+
not self._fields[field]._is_present_in_parent)):
|
| 1229 |
+
if errors is not None:
|
| 1230 |
+
errors.extend(self.FindInitializationErrors())
|
| 1231 |
+
return False
|
| 1232 |
+
|
| 1233 |
+
for field, value in list(self._fields.items()): # dict can change size!
|
| 1234 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 1235 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 1236 |
+
if (field.message_type._is_map_entry):
|
| 1237 |
+
continue
|
| 1238 |
+
for element in value:
|
| 1239 |
+
if not element.IsInitialized():
|
| 1240 |
+
if errors is not None:
|
| 1241 |
+
errors.extend(self.FindInitializationErrors())
|
| 1242 |
+
return False
|
| 1243 |
+
elif value._is_present_in_parent and not value.IsInitialized():
|
| 1244 |
+
if errors is not None:
|
| 1245 |
+
errors.extend(self.FindInitializationErrors())
|
| 1246 |
+
return False
|
| 1247 |
+
|
| 1248 |
+
return True
|
| 1249 |
+
|
| 1250 |
+
cls.IsInitialized = IsInitialized
|
| 1251 |
+
|
| 1252 |
+
def FindInitializationErrors(self):
|
| 1253 |
+
"""Finds required fields which are not initialized.
|
| 1254 |
+
|
| 1255 |
+
Returns:
|
| 1256 |
+
A list of strings. Each string is a path to an uninitialized field from
|
| 1257 |
+
the top-level message, e.g. "foo.bar[5].baz".
|
| 1258 |
+
"""
|
| 1259 |
+
|
| 1260 |
+
errors = [] # simplify things
|
| 1261 |
+
|
| 1262 |
+
for field in required_fields:
|
| 1263 |
+
if not self.HasField(field.name):
|
| 1264 |
+
errors.append(field.name)
|
| 1265 |
+
|
| 1266 |
+
for field, value in self.ListFields():
|
| 1267 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 1268 |
+
if field.is_extension:
|
| 1269 |
+
name = '(%s)' % field.full_name
|
| 1270 |
+
else:
|
| 1271 |
+
name = field.name
|
| 1272 |
+
|
| 1273 |
+
if _IsMapField(field):
|
| 1274 |
+
if _IsMessageMapField(field):
|
| 1275 |
+
for key in value:
|
| 1276 |
+
element = value[key]
|
| 1277 |
+
prefix = '%s[%s].' % (name, key)
|
| 1278 |
+
sub_errors = element.FindInitializationErrors()
|
| 1279 |
+
errors += [prefix + error for error in sub_errors]
|
| 1280 |
+
else:
|
| 1281 |
+
# ScalarMaps can't have any initialization errors.
|
| 1282 |
+
pass
|
| 1283 |
+
elif field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 1284 |
+
for i in range(len(value)):
|
| 1285 |
+
element = value[i]
|
| 1286 |
+
prefix = '%s[%d].' % (name, i)
|
| 1287 |
+
sub_errors = element.FindInitializationErrors()
|
| 1288 |
+
errors += [prefix + error for error in sub_errors]
|
| 1289 |
+
else:
|
| 1290 |
+
prefix = name + '.'
|
| 1291 |
+
sub_errors = value.FindInitializationErrors()
|
| 1292 |
+
errors += [prefix + error for error in sub_errors]
|
| 1293 |
+
|
| 1294 |
+
return errors
|
| 1295 |
+
|
| 1296 |
+
cls.FindInitializationErrors = FindInitializationErrors
|
| 1297 |
+
|
| 1298 |
+
|
| 1299 |
+
def _FullyQualifiedClassName(klass):
|
| 1300 |
+
module = klass.__module__
|
| 1301 |
+
name = getattr(klass, '__qualname__', klass.__name__)
|
| 1302 |
+
if module in (None, 'builtins', '__builtin__'):
|
| 1303 |
+
return name
|
| 1304 |
+
return module + '.' + name
|
| 1305 |
+
|
| 1306 |
+
|
| 1307 |
+
def _AddMergeFromMethod(cls):
|
| 1308 |
+
LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
|
| 1309 |
+
CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
|
| 1310 |
+
|
| 1311 |
+
def MergeFrom(self, msg):
|
| 1312 |
+
if not isinstance(msg, cls):
|
| 1313 |
+
raise TypeError(
|
| 1314 |
+
'Parameter to MergeFrom() must be instance of same class: '
|
| 1315 |
+
'expected %s got %s.' % (_FullyQualifiedClassName(cls),
|
| 1316 |
+
_FullyQualifiedClassName(msg.__class__)))
|
| 1317 |
+
|
| 1318 |
+
assert msg is not self
|
| 1319 |
+
self._Modified()
|
| 1320 |
+
|
| 1321 |
+
fields = self._fields
|
| 1322 |
+
|
| 1323 |
+
for field, value in msg._fields.items():
|
| 1324 |
+
if field.label == LABEL_REPEATED:
|
| 1325 |
+
field_value = fields.get(field)
|
| 1326 |
+
if field_value is None:
|
| 1327 |
+
# Construct a new object to represent this field.
|
| 1328 |
+
field_value = field._default_constructor(self)
|
| 1329 |
+
fields[field] = field_value
|
| 1330 |
+
field_value.MergeFrom(value)
|
| 1331 |
+
elif field.cpp_type == CPPTYPE_MESSAGE:
|
| 1332 |
+
if value._is_present_in_parent:
|
| 1333 |
+
field_value = fields.get(field)
|
| 1334 |
+
if field_value is None:
|
| 1335 |
+
# Construct a new object to represent this field.
|
| 1336 |
+
field_value = field._default_constructor(self)
|
| 1337 |
+
fields[field] = field_value
|
| 1338 |
+
field_value.MergeFrom(value)
|
| 1339 |
+
else:
|
| 1340 |
+
self._fields[field] = value
|
| 1341 |
+
if field.containing_oneof:
|
| 1342 |
+
self._UpdateOneofState(field)
|
| 1343 |
+
|
| 1344 |
+
if msg._unknown_fields:
|
| 1345 |
+
if not self._unknown_fields:
|
| 1346 |
+
self._unknown_fields = []
|
| 1347 |
+
self._unknown_fields.extend(msg._unknown_fields)
|
| 1348 |
+
# pylint: disable=protected-access
|
| 1349 |
+
if self._unknown_field_set is None:
|
| 1350 |
+
self._unknown_field_set = containers.UnknownFieldSet()
|
| 1351 |
+
self._unknown_field_set._extend(msg._unknown_field_set)
|
| 1352 |
+
|
| 1353 |
+
cls.MergeFrom = MergeFrom
|
| 1354 |
+
|
| 1355 |
+
|
| 1356 |
+
def _AddWhichOneofMethod(message_descriptor, cls):
|
| 1357 |
+
def WhichOneof(self, oneof_name):
|
| 1358 |
+
"""Returns the name of the currently set field inside a oneof, or None."""
|
| 1359 |
+
try:
|
| 1360 |
+
field = message_descriptor.oneofs_by_name[oneof_name]
|
| 1361 |
+
except KeyError:
|
| 1362 |
+
raise ValueError(
|
| 1363 |
+
'Protocol message has no oneof "%s" field.' % oneof_name)
|
| 1364 |
+
|
| 1365 |
+
nested_field = self._oneofs.get(field, None)
|
| 1366 |
+
if nested_field is not None and self.HasField(nested_field.name):
|
| 1367 |
+
return nested_field.name
|
| 1368 |
+
else:
|
| 1369 |
+
return None
|
| 1370 |
+
|
| 1371 |
+
cls.WhichOneof = WhichOneof
|
| 1372 |
+
|
| 1373 |
+
|
| 1374 |
+
def _Clear(self):
|
| 1375 |
+
# Clear fields.
|
| 1376 |
+
self._fields = {}
|
| 1377 |
+
self._unknown_fields = ()
|
| 1378 |
+
# pylint: disable=protected-access
|
| 1379 |
+
if self._unknown_field_set is not None:
|
| 1380 |
+
self._unknown_field_set._clear()
|
| 1381 |
+
self._unknown_field_set = None
|
| 1382 |
+
|
| 1383 |
+
self._oneofs = {}
|
| 1384 |
+
self._Modified()
|
| 1385 |
+
|
| 1386 |
+
|
| 1387 |
+
def _UnknownFields(self):
|
| 1388 |
+
warnings.warn(
|
| 1389 |
+
'message.UnknownFields() is deprecated. Please use the add one '
|
| 1390 |
+
'feature unknown_fields.UnknownFieldSet(message) in '
|
| 1391 |
+
'unknown_fields.py instead.'
|
| 1392 |
+
)
|
| 1393 |
+
if self._unknown_field_set is None: # pylint: disable=protected-access
|
| 1394 |
+
# pylint: disable=protected-access
|
| 1395 |
+
self._unknown_field_set = containers.UnknownFieldSet()
|
| 1396 |
+
return self._unknown_field_set # pylint: disable=protected-access
|
| 1397 |
+
|
| 1398 |
+
|
| 1399 |
+
def _DiscardUnknownFields(self):
|
| 1400 |
+
self._unknown_fields = []
|
| 1401 |
+
self._unknown_field_set = None # pylint: disable=protected-access
|
| 1402 |
+
for field, value in self.ListFields():
|
| 1403 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 1404 |
+
if _IsMapField(field):
|
| 1405 |
+
if _IsMessageMapField(field):
|
| 1406 |
+
for key in value:
|
| 1407 |
+
value[key].DiscardUnknownFields()
|
| 1408 |
+
elif field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 1409 |
+
for sub_message in value:
|
| 1410 |
+
sub_message.DiscardUnknownFields()
|
| 1411 |
+
else:
|
| 1412 |
+
value.DiscardUnknownFields()
|
| 1413 |
+
|
| 1414 |
+
|
| 1415 |
+
def _SetListener(self, listener):
|
| 1416 |
+
if listener is None:
|
| 1417 |
+
self._listener = message_listener_mod.NullMessageListener()
|
| 1418 |
+
else:
|
| 1419 |
+
self._listener = listener
|
| 1420 |
+
|
| 1421 |
+
|
| 1422 |
+
def _AddMessageMethods(message_descriptor, cls):
|
| 1423 |
+
"""Adds implementations of all Message methods to cls."""
|
| 1424 |
+
_AddListFieldsMethod(message_descriptor, cls)
|
| 1425 |
+
_AddHasFieldMethod(message_descriptor, cls)
|
| 1426 |
+
_AddClearFieldMethod(message_descriptor, cls)
|
| 1427 |
+
if message_descriptor.is_extendable:
|
| 1428 |
+
_AddClearExtensionMethod(cls)
|
| 1429 |
+
_AddHasExtensionMethod(cls)
|
| 1430 |
+
_AddEqualsMethod(message_descriptor, cls)
|
| 1431 |
+
_AddStrMethod(message_descriptor, cls)
|
| 1432 |
+
_AddReprMethod(message_descriptor, cls)
|
| 1433 |
+
_AddUnicodeMethod(message_descriptor, cls)
|
| 1434 |
+
_AddByteSizeMethod(message_descriptor, cls)
|
| 1435 |
+
_AddSerializeToStringMethod(message_descriptor, cls)
|
| 1436 |
+
_AddSerializePartialToStringMethod(message_descriptor, cls)
|
| 1437 |
+
_AddMergeFromStringMethod(message_descriptor, cls)
|
| 1438 |
+
_AddIsInitializedMethod(message_descriptor, cls)
|
| 1439 |
+
_AddMergeFromMethod(cls)
|
| 1440 |
+
_AddWhichOneofMethod(message_descriptor, cls)
|
| 1441 |
+
# Adds methods which do not depend on cls.
|
| 1442 |
+
cls.Clear = _Clear
|
| 1443 |
+
cls.UnknownFields = _UnknownFields
|
| 1444 |
+
cls.DiscardUnknownFields = _DiscardUnknownFields
|
| 1445 |
+
cls._SetListener = _SetListener
|
| 1446 |
+
|
| 1447 |
+
|
| 1448 |
+
def _AddPrivateHelperMethods(message_descriptor, cls):
|
| 1449 |
+
"""Adds implementation of private helper methods to cls."""
|
| 1450 |
+
|
| 1451 |
+
def Modified(self):
|
| 1452 |
+
"""Sets the _cached_byte_size_dirty bit to true,
|
| 1453 |
+
and propagates this to our listener iff this was a state change.
|
| 1454 |
+
"""
|
| 1455 |
+
|
| 1456 |
+
# Note: Some callers check _cached_byte_size_dirty before calling
|
| 1457 |
+
# _Modified() as an extra optimization. So, if this method is ever
|
| 1458 |
+
# changed such that it does stuff even when _cached_byte_size_dirty is
|
| 1459 |
+
# already true, the callers need to be updated.
|
| 1460 |
+
if not self._cached_byte_size_dirty:
|
| 1461 |
+
self._cached_byte_size_dirty = True
|
| 1462 |
+
self._listener_for_children.dirty = True
|
| 1463 |
+
self._is_present_in_parent = True
|
| 1464 |
+
self._listener.Modified()
|
| 1465 |
+
|
| 1466 |
+
def _UpdateOneofState(self, field):
|
| 1467 |
+
"""Sets field as the active field in its containing oneof.
|
| 1468 |
+
|
| 1469 |
+
Will also delete currently active field in the oneof, if it is different
|
| 1470 |
+
from the argument. Does not mark the message as modified.
|
| 1471 |
+
"""
|
| 1472 |
+
other_field = self._oneofs.setdefault(field.containing_oneof, field)
|
| 1473 |
+
if other_field is not field:
|
| 1474 |
+
del self._fields[other_field]
|
| 1475 |
+
self._oneofs[field.containing_oneof] = field
|
| 1476 |
+
|
| 1477 |
+
cls._Modified = Modified
|
| 1478 |
+
cls.SetInParent = Modified
|
| 1479 |
+
cls._UpdateOneofState = _UpdateOneofState
|
| 1480 |
+
|
| 1481 |
+
|
| 1482 |
+
class _Listener(object):
|
| 1483 |
+
|
| 1484 |
+
"""MessageListener implementation that a parent message registers with its
|
| 1485 |
+
child message.
|
| 1486 |
+
|
| 1487 |
+
In order to support semantics like:
|
| 1488 |
+
|
| 1489 |
+
foo.bar.baz.moo = 23
|
| 1490 |
+
assert foo.HasField('bar')
|
| 1491 |
+
|
| 1492 |
+
...child objects must have back references to their parents.
|
| 1493 |
+
This helper class is at the heart of this support.
|
| 1494 |
+
"""
|
| 1495 |
+
|
| 1496 |
+
def __init__(self, parent_message):
|
| 1497 |
+
"""Args:
|
| 1498 |
+
parent_message: The message whose _Modified() method we should call when
|
| 1499 |
+
we receive Modified() messages.
|
| 1500 |
+
"""
|
| 1501 |
+
# This listener establishes a back reference from a child (contained) object
|
| 1502 |
+
# to its parent (containing) object. We make this a weak reference to avoid
|
| 1503 |
+
# creating cyclic garbage when the client finishes with the 'parent' object
|
| 1504 |
+
# in the tree.
|
| 1505 |
+
if isinstance(parent_message, weakref.ProxyType):
|
| 1506 |
+
self._parent_message_weakref = parent_message
|
| 1507 |
+
else:
|
| 1508 |
+
self._parent_message_weakref = weakref.proxy(parent_message)
|
| 1509 |
+
|
| 1510 |
+
# As an optimization, we also indicate directly on the listener whether
|
| 1511 |
+
# or not the parent message is dirty. This way we can avoid traversing
|
| 1512 |
+
# up the tree in the common case.
|
| 1513 |
+
self.dirty = False
|
| 1514 |
+
|
| 1515 |
+
def Modified(self):
|
| 1516 |
+
if self.dirty:
|
| 1517 |
+
return
|
| 1518 |
+
try:
|
| 1519 |
+
# Propagate the signal to our parents iff this is the first field set.
|
| 1520 |
+
self._parent_message_weakref._Modified()
|
| 1521 |
+
except ReferenceError:
|
| 1522 |
+
# We can get here if a client has kept a reference to a child object,
|
| 1523 |
+
# and is now setting a field on it, but the child's parent has been
|
| 1524 |
+
# garbage-collected. This is not an error.
|
| 1525 |
+
pass
|
| 1526 |
+
|
| 1527 |
+
|
| 1528 |
+
class _OneofListener(_Listener):
|
| 1529 |
+
"""Special listener implementation for setting composite oneof fields."""
|
| 1530 |
+
|
| 1531 |
+
def __init__(self, parent_message, field):
|
| 1532 |
+
"""Args:
|
| 1533 |
+
parent_message: The message whose _Modified() method we should call when
|
| 1534 |
+
we receive Modified() messages.
|
| 1535 |
+
field: The descriptor of the field being set in the parent message.
|
| 1536 |
+
"""
|
| 1537 |
+
super(_OneofListener, self).__init__(parent_message)
|
| 1538 |
+
self._field = field
|
| 1539 |
+
|
| 1540 |
+
def Modified(self):
|
| 1541 |
+
"""Also updates the state of the containing oneof in the parent message."""
|
| 1542 |
+
try:
|
| 1543 |
+
self._parent_message_weakref._UpdateOneofState(self._field)
|
| 1544 |
+
super(_OneofListener, self).Modified()
|
| 1545 |
+
except ReferenceError:
|
| 1546 |
+
pass
|
lib/python3.10/site-packages/google/protobuf/internal/well_known_types.py
ADDED
|
@@ -0,0 +1,567 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Contains well known classes.
|
| 9 |
+
|
| 10 |
+
This files defines well known classes which need extra maintenance including:
|
| 11 |
+
- Any
|
| 12 |
+
- Duration
|
| 13 |
+
- FieldMask
|
| 14 |
+
- Struct
|
| 15 |
+
- Timestamp
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
__author__ = 'jieluo@google.com (Jie Luo)'
|
| 19 |
+
|
| 20 |
+
import calendar
|
| 21 |
+
import collections.abc
|
| 22 |
+
import datetime
|
| 23 |
+
|
| 24 |
+
from google.protobuf.internal import field_mask
|
| 25 |
+
|
| 26 |
+
FieldMask = field_mask.FieldMask
|
| 27 |
+
|
| 28 |
+
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
|
| 29 |
+
_NANOS_PER_SECOND = 1000000000
|
| 30 |
+
_NANOS_PER_MILLISECOND = 1000000
|
| 31 |
+
_NANOS_PER_MICROSECOND = 1000
|
| 32 |
+
_MILLIS_PER_SECOND = 1000
|
| 33 |
+
_MICROS_PER_SECOND = 1000000
|
| 34 |
+
_SECONDS_PER_DAY = 24 * 3600
|
| 35 |
+
_DURATION_SECONDS_MAX = 315576000000
|
| 36 |
+
|
| 37 |
+
_EPOCH_DATETIME_NAIVE = datetime.datetime(1970, 1, 1, tzinfo=None)
|
| 38 |
+
_EPOCH_DATETIME_AWARE = _EPOCH_DATETIME_NAIVE.replace(
|
| 39 |
+
tzinfo=datetime.timezone.utc
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class Any(object):
|
| 44 |
+
"""Class for Any Message type."""
|
| 45 |
+
|
| 46 |
+
__slots__ = ()
|
| 47 |
+
|
| 48 |
+
def Pack(self, msg, type_url_prefix='type.googleapis.com/',
|
| 49 |
+
deterministic=None):
|
| 50 |
+
"""Packs the specified message into current Any message."""
|
| 51 |
+
if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
|
| 52 |
+
self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
|
| 53 |
+
else:
|
| 54 |
+
self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
|
| 55 |
+
self.value = msg.SerializeToString(deterministic=deterministic)
|
| 56 |
+
|
| 57 |
+
def Unpack(self, msg):
|
| 58 |
+
"""Unpacks the current Any message into specified message."""
|
| 59 |
+
descriptor = msg.DESCRIPTOR
|
| 60 |
+
if not self.Is(descriptor):
|
| 61 |
+
return False
|
| 62 |
+
msg.ParseFromString(self.value)
|
| 63 |
+
return True
|
| 64 |
+
|
| 65 |
+
def TypeName(self):
|
| 66 |
+
"""Returns the protobuf type name of the inner message."""
|
| 67 |
+
# Only last part is to be used: b/25630112
|
| 68 |
+
return self.type_url.split('/')[-1]
|
| 69 |
+
|
| 70 |
+
def Is(self, descriptor):
|
| 71 |
+
"""Checks if this Any represents the given protobuf type."""
|
| 72 |
+
return '/' in self.type_url and self.TypeName() == descriptor.full_name
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class Timestamp(object):
|
| 76 |
+
"""Class for Timestamp message type."""
|
| 77 |
+
|
| 78 |
+
__slots__ = ()
|
| 79 |
+
|
| 80 |
+
def ToJsonString(self):
|
| 81 |
+
"""Converts Timestamp to RFC 3339 date string format.
|
| 82 |
+
|
| 83 |
+
Returns:
|
| 84 |
+
A string converted from timestamp. The string is always Z-normalized
|
| 85 |
+
and uses 3, 6 or 9 fractional digits as required to represent the
|
| 86 |
+
exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
|
| 87 |
+
"""
|
| 88 |
+
nanos = self.nanos % _NANOS_PER_SECOND
|
| 89 |
+
total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND
|
| 90 |
+
seconds = total_sec % _SECONDS_PER_DAY
|
| 91 |
+
days = (total_sec - seconds) // _SECONDS_PER_DAY
|
| 92 |
+
dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds)
|
| 93 |
+
|
| 94 |
+
result = dt.isoformat()
|
| 95 |
+
if (nanos % 1e9) == 0:
|
| 96 |
+
# If there are 0 fractional digits, the fractional
|
| 97 |
+
# point '.' should be omitted when serializing.
|
| 98 |
+
return result + 'Z'
|
| 99 |
+
if (nanos % 1e6) == 0:
|
| 100 |
+
# Serialize 3 fractional digits.
|
| 101 |
+
return result + '.%03dZ' % (nanos / 1e6)
|
| 102 |
+
if (nanos % 1e3) == 0:
|
| 103 |
+
# Serialize 6 fractional digits.
|
| 104 |
+
return result + '.%06dZ' % (nanos / 1e3)
|
| 105 |
+
# Serialize 9 fractional digits.
|
| 106 |
+
return result + '.%09dZ' % nanos
|
| 107 |
+
|
| 108 |
+
def FromJsonString(self, value):
|
| 109 |
+
"""Parse a RFC 3339 date string format to Timestamp.
|
| 110 |
+
|
| 111 |
+
Args:
|
| 112 |
+
value: A date string. Any fractional digits (or none) and any offset are
|
| 113 |
+
accepted as long as they fit into nano-seconds precision.
|
| 114 |
+
Example of accepted format: '1972-01-01T10:00:20.021-05:00'
|
| 115 |
+
|
| 116 |
+
Raises:
|
| 117 |
+
ValueError: On parsing problems.
|
| 118 |
+
"""
|
| 119 |
+
if not isinstance(value, str):
|
| 120 |
+
raise ValueError('Timestamp JSON value not a string: {!r}'.format(value))
|
| 121 |
+
timezone_offset = value.find('Z')
|
| 122 |
+
if timezone_offset == -1:
|
| 123 |
+
timezone_offset = value.find('+')
|
| 124 |
+
if timezone_offset == -1:
|
| 125 |
+
timezone_offset = value.rfind('-')
|
| 126 |
+
if timezone_offset == -1:
|
| 127 |
+
raise ValueError(
|
| 128 |
+
'Failed to parse timestamp: missing valid timezone offset.')
|
| 129 |
+
time_value = value[0:timezone_offset]
|
| 130 |
+
# Parse datetime and nanos.
|
| 131 |
+
point_position = time_value.find('.')
|
| 132 |
+
if point_position == -1:
|
| 133 |
+
second_value = time_value
|
| 134 |
+
nano_value = ''
|
| 135 |
+
else:
|
| 136 |
+
second_value = time_value[:point_position]
|
| 137 |
+
nano_value = time_value[point_position + 1:]
|
| 138 |
+
if 't' in second_value:
|
| 139 |
+
raise ValueError(
|
| 140 |
+
'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', '
|
| 141 |
+
'lowercase \'t\' is not accepted'.format(second_value))
|
| 142 |
+
date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT)
|
| 143 |
+
td = date_object - datetime.datetime(1970, 1, 1)
|
| 144 |
+
seconds = td.seconds + td.days * _SECONDS_PER_DAY
|
| 145 |
+
if len(nano_value) > 9:
|
| 146 |
+
raise ValueError(
|
| 147 |
+
'Failed to parse Timestamp: nanos {0} more than '
|
| 148 |
+
'9 fractional digits.'.format(nano_value))
|
| 149 |
+
if nano_value:
|
| 150 |
+
nanos = round(float('0.' + nano_value) * 1e9)
|
| 151 |
+
else:
|
| 152 |
+
nanos = 0
|
| 153 |
+
# Parse timezone offsets.
|
| 154 |
+
if value[timezone_offset] == 'Z':
|
| 155 |
+
if len(value) != timezone_offset + 1:
|
| 156 |
+
raise ValueError('Failed to parse timestamp: invalid trailing'
|
| 157 |
+
' data {0}.'.format(value))
|
| 158 |
+
else:
|
| 159 |
+
timezone = value[timezone_offset:]
|
| 160 |
+
pos = timezone.find(':')
|
| 161 |
+
if pos == -1:
|
| 162 |
+
raise ValueError(
|
| 163 |
+
'Invalid timezone offset value: {0}.'.format(timezone))
|
| 164 |
+
if timezone[0] == '+':
|
| 165 |
+
seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
|
| 166 |
+
else:
|
| 167 |
+
seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
|
| 168 |
+
# Set seconds and nanos
|
| 169 |
+
self.seconds = int(seconds)
|
| 170 |
+
self.nanos = int(nanos)
|
| 171 |
+
|
| 172 |
+
def GetCurrentTime(self):
|
| 173 |
+
"""Get the current UTC into Timestamp."""
|
| 174 |
+
self.FromDatetime(datetime.datetime.utcnow())
|
| 175 |
+
|
| 176 |
+
def ToNanoseconds(self):
|
| 177 |
+
"""Converts Timestamp to nanoseconds since epoch."""
|
| 178 |
+
return self.seconds * _NANOS_PER_SECOND + self.nanos
|
| 179 |
+
|
| 180 |
+
def ToMicroseconds(self):
|
| 181 |
+
"""Converts Timestamp to microseconds since epoch."""
|
| 182 |
+
return (self.seconds * _MICROS_PER_SECOND +
|
| 183 |
+
self.nanos // _NANOS_PER_MICROSECOND)
|
| 184 |
+
|
| 185 |
+
def ToMilliseconds(self):
|
| 186 |
+
"""Converts Timestamp to milliseconds since epoch."""
|
| 187 |
+
return (self.seconds * _MILLIS_PER_SECOND +
|
| 188 |
+
self.nanos // _NANOS_PER_MILLISECOND)
|
| 189 |
+
|
| 190 |
+
def ToSeconds(self):
|
| 191 |
+
"""Converts Timestamp to seconds since epoch."""
|
| 192 |
+
return self.seconds
|
| 193 |
+
|
| 194 |
+
def FromNanoseconds(self, nanos):
|
| 195 |
+
"""Converts nanoseconds since epoch to Timestamp."""
|
| 196 |
+
self.seconds = nanos // _NANOS_PER_SECOND
|
| 197 |
+
self.nanos = nanos % _NANOS_PER_SECOND
|
| 198 |
+
|
| 199 |
+
def FromMicroseconds(self, micros):
|
| 200 |
+
"""Converts microseconds since epoch to Timestamp."""
|
| 201 |
+
self.seconds = micros // _MICROS_PER_SECOND
|
| 202 |
+
self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
|
| 203 |
+
|
| 204 |
+
def FromMilliseconds(self, millis):
|
| 205 |
+
"""Converts milliseconds since epoch to Timestamp."""
|
| 206 |
+
self.seconds = millis // _MILLIS_PER_SECOND
|
| 207 |
+
self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
|
| 208 |
+
|
| 209 |
+
def FromSeconds(self, seconds):
|
| 210 |
+
"""Converts seconds since epoch to Timestamp."""
|
| 211 |
+
self.seconds = seconds
|
| 212 |
+
self.nanos = 0
|
| 213 |
+
|
| 214 |
+
def ToDatetime(self, tzinfo=None):
|
| 215 |
+
"""Converts Timestamp to a datetime.
|
| 216 |
+
|
| 217 |
+
Args:
|
| 218 |
+
tzinfo: A datetime.tzinfo subclass; defaults to None.
|
| 219 |
+
|
| 220 |
+
Returns:
|
| 221 |
+
If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone
|
| 222 |
+
information, i.e. not aware that it's UTC).
|
| 223 |
+
|
| 224 |
+
Otherwise, returns a timezone-aware datetime in the input timezone.
|
| 225 |
+
"""
|
| 226 |
+
# Using datetime.fromtimestamp for this would avoid constructing an extra
|
| 227 |
+
# timedelta object and possibly an extra datetime. Unfortuantely, that has
|
| 228 |
+
# the disadvantage of not handling the full precision (on all platforms, see
|
| 229 |
+
# https://github.com/python/cpython/issues/109849) or full range (on some
|
| 230 |
+
# platforms, see https://github.com/python/cpython/issues/110042) of
|
| 231 |
+
# datetime.
|
| 232 |
+
delta = datetime.timedelta(
|
| 233 |
+
seconds=self.seconds,
|
| 234 |
+
microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND),
|
| 235 |
+
)
|
| 236 |
+
if tzinfo is None:
|
| 237 |
+
return _EPOCH_DATETIME_NAIVE + delta
|
| 238 |
+
else:
|
| 239 |
+
# Note the tz conversion has to come after the timedelta arithmetic.
|
| 240 |
+
return (_EPOCH_DATETIME_AWARE + delta).astimezone(tzinfo)
|
| 241 |
+
|
| 242 |
+
def FromDatetime(self, dt):
|
| 243 |
+
"""Converts datetime to Timestamp.
|
| 244 |
+
|
| 245 |
+
Args:
|
| 246 |
+
dt: A datetime. If it's timezone-naive, it's assumed to be in UTC.
|
| 247 |
+
"""
|
| 248 |
+
# Using this guide: http://wiki.python.org/moin/WorkingWithTime
|
| 249 |
+
# And this conversion guide: http://docs.python.org/library/time.html
|
| 250 |
+
|
| 251 |
+
# Turn the date parameter into a tuple (struct_time) that can then be
|
| 252 |
+
# manipulated into a long value of seconds. During the conversion from
|
| 253 |
+
# struct_time to long, the source date in UTC, and so it follows that the
|
| 254 |
+
# correct transformation is calendar.timegm()
|
| 255 |
+
self.seconds = calendar.timegm(dt.utctimetuple())
|
| 256 |
+
self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
class Duration(object):
|
| 260 |
+
"""Class for Duration message type."""
|
| 261 |
+
|
| 262 |
+
__slots__ = ()
|
| 263 |
+
|
| 264 |
+
def ToJsonString(self):
|
| 265 |
+
"""Converts Duration to string format.
|
| 266 |
+
|
| 267 |
+
Returns:
|
| 268 |
+
A string converted from self. The string format will contains
|
| 269 |
+
3, 6, or 9 fractional digits depending on the precision required to
|
| 270 |
+
represent the exact Duration value. For example: "1s", "1.010s",
|
| 271 |
+
"1.000000100s", "-3.100s"
|
| 272 |
+
"""
|
| 273 |
+
_CheckDurationValid(self.seconds, self.nanos)
|
| 274 |
+
if self.seconds < 0 or self.nanos < 0:
|
| 275 |
+
result = '-'
|
| 276 |
+
seconds = - self.seconds + int((0 - self.nanos) // 1e9)
|
| 277 |
+
nanos = (0 - self.nanos) % 1e9
|
| 278 |
+
else:
|
| 279 |
+
result = ''
|
| 280 |
+
seconds = self.seconds + int(self.nanos // 1e9)
|
| 281 |
+
nanos = self.nanos % 1e9
|
| 282 |
+
result += '%d' % seconds
|
| 283 |
+
if (nanos % 1e9) == 0:
|
| 284 |
+
# If there are 0 fractional digits, the fractional
|
| 285 |
+
# point '.' should be omitted when serializing.
|
| 286 |
+
return result + 's'
|
| 287 |
+
if (nanos % 1e6) == 0:
|
| 288 |
+
# Serialize 3 fractional digits.
|
| 289 |
+
return result + '.%03ds' % (nanos / 1e6)
|
| 290 |
+
if (nanos % 1e3) == 0:
|
| 291 |
+
# Serialize 6 fractional digits.
|
| 292 |
+
return result + '.%06ds' % (nanos / 1e3)
|
| 293 |
+
# Serialize 9 fractional digits.
|
| 294 |
+
return result + '.%09ds' % nanos
|
| 295 |
+
|
| 296 |
+
def FromJsonString(self, value):
|
| 297 |
+
"""Converts a string to Duration.
|
| 298 |
+
|
| 299 |
+
Args:
|
| 300 |
+
value: A string to be converted. The string must end with 's'. Any
|
| 301 |
+
fractional digits (or none) are accepted as long as they fit into
|
| 302 |
+
precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
|
| 303 |
+
|
| 304 |
+
Raises:
|
| 305 |
+
ValueError: On parsing problems.
|
| 306 |
+
"""
|
| 307 |
+
if not isinstance(value, str):
|
| 308 |
+
raise ValueError('Duration JSON value not a string: {!r}'.format(value))
|
| 309 |
+
if len(value) < 1 or value[-1] != 's':
|
| 310 |
+
raise ValueError(
|
| 311 |
+
'Duration must end with letter "s": {0}.'.format(value))
|
| 312 |
+
try:
|
| 313 |
+
pos = value.find('.')
|
| 314 |
+
if pos == -1:
|
| 315 |
+
seconds = int(value[:-1])
|
| 316 |
+
nanos = 0
|
| 317 |
+
else:
|
| 318 |
+
seconds = int(value[:pos])
|
| 319 |
+
if value[0] == '-':
|
| 320 |
+
nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
|
| 321 |
+
else:
|
| 322 |
+
nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
|
| 323 |
+
_CheckDurationValid(seconds, nanos)
|
| 324 |
+
self.seconds = seconds
|
| 325 |
+
self.nanos = nanos
|
| 326 |
+
except ValueError as e:
|
| 327 |
+
raise ValueError(
|
| 328 |
+
'Couldn\'t parse duration: {0} : {1}.'.format(value, e))
|
| 329 |
+
|
| 330 |
+
def ToNanoseconds(self):
|
| 331 |
+
"""Converts a Duration to nanoseconds."""
|
| 332 |
+
return self.seconds * _NANOS_PER_SECOND + self.nanos
|
| 333 |
+
|
| 334 |
+
def ToMicroseconds(self):
|
| 335 |
+
"""Converts a Duration to microseconds."""
|
| 336 |
+
micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
|
| 337 |
+
return self.seconds * _MICROS_PER_SECOND + micros
|
| 338 |
+
|
| 339 |
+
def ToMilliseconds(self):
|
| 340 |
+
"""Converts a Duration to milliseconds."""
|
| 341 |
+
millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
|
| 342 |
+
return self.seconds * _MILLIS_PER_SECOND + millis
|
| 343 |
+
|
| 344 |
+
def ToSeconds(self):
|
| 345 |
+
"""Converts a Duration to seconds."""
|
| 346 |
+
return self.seconds
|
| 347 |
+
|
| 348 |
+
def FromNanoseconds(self, nanos):
|
| 349 |
+
"""Converts nanoseconds to Duration."""
|
| 350 |
+
self._NormalizeDuration(nanos // _NANOS_PER_SECOND,
|
| 351 |
+
nanos % _NANOS_PER_SECOND)
|
| 352 |
+
|
| 353 |
+
def FromMicroseconds(self, micros):
|
| 354 |
+
"""Converts microseconds to Duration."""
|
| 355 |
+
self._NormalizeDuration(
|
| 356 |
+
micros // _MICROS_PER_SECOND,
|
| 357 |
+
(micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND)
|
| 358 |
+
|
| 359 |
+
def FromMilliseconds(self, millis):
|
| 360 |
+
"""Converts milliseconds to Duration."""
|
| 361 |
+
self._NormalizeDuration(
|
| 362 |
+
millis // _MILLIS_PER_SECOND,
|
| 363 |
+
(millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND)
|
| 364 |
+
|
| 365 |
+
def FromSeconds(self, seconds):
|
| 366 |
+
"""Converts seconds to Duration."""
|
| 367 |
+
self.seconds = seconds
|
| 368 |
+
self.nanos = 0
|
| 369 |
+
|
| 370 |
+
def ToTimedelta(self):
|
| 371 |
+
"""Converts Duration to timedelta."""
|
| 372 |
+
return datetime.timedelta(
|
| 373 |
+
seconds=self.seconds, microseconds=_RoundTowardZero(
|
| 374 |
+
self.nanos, _NANOS_PER_MICROSECOND))
|
| 375 |
+
|
| 376 |
+
def FromTimedelta(self, td):
|
| 377 |
+
"""Converts timedelta to Duration."""
|
| 378 |
+
self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY,
|
| 379 |
+
td.microseconds * _NANOS_PER_MICROSECOND)
|
| 380 |
+
|
| 381 |
+
def _NormalizeDuration(self, seconds, nanos):
|
| 382 |
+
"""Set Duration by seconds and nanos."""
|
| 383 |
+
# Force nanos to be negative if the duration is negative.
|
| 384 |
+
if seconds < 0 and nanos > 0:
|
| 385 |
+
seconds += 1
|
| 386 |
+
nanos -= _NANOS_PER_SECOND
|
| 387 |
+
self.seconds = seconds
|
| 388 |
+
self.nanos = nanos
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
def _CheckDurationValid(seconds, nanos):
|
| 392 |
+
if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
|
| 393 |
+
raise ValueError(
|
| 394 |
+
'Duration is not valid: Seconds {0} must be in range '
|
| 395 |
+
'[-315576000000, 315576000000].'.format(seconds))
|
| 396 |
+
if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
|
| 397 |
+
raise ValueError(
|
| 398 |
+
'Duration is not valid: Nanos {0} must be in range '
|
| 399 |
+
'[-999999999, 999999999].'.format(nanos))
|
| 400 |
+
if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
|
| 401 |
+
raise ValueError(
|
| 402 |
+
'Duration is not valid: Sign mismatch.')
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def _RoundTowardZero(value, divider):
|
| 406 |
+
"""Truncates the remainder part after division."""
|
| 407 |
+
# For some languages, the sign of the remainder is implementation
|
| 408 |
+
# dependent if any of the operands is negative. Here we enforce
|
| 409 |
+
# "rounded toward zero" semantics. For example, for (-5) / 2 an
|
| 410 |
+
# implementation may give -3 as the result with the remainder being
|
| 411 |
+
# 1. This function ensures we always return -2 (closer to zero).
|
| 412 |
+
result = value // divider
|
| 413 |
+
remainder = value % divider
|
| 414 |
+
if result < 0 and remainder > 0:
|
| 415 |
+
return result + 1
|
| 416 |
+
else:
|
| 417 |
+
return result
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
def _SetStructValue(struct_value, value):
|
| 421 |
+
if value is None:
|
| 422 |
+
struct_value.null_value = 0
|
| 423 |
+
elif isinstance(value, bool):
|
| 424 |
+
# Note: this check must come before the number check because in Python
|
| 425 |
+
# True and False are also considered numbers.
|
| 426 |
+
struct_value.bool_value = value
|
| 427 |
+
elif isinstance(value, str):
|
| 428 |
+
struct_value.string_value = value
|
| 429 |
+
elif isinstance(value, (int, float)):
|
| 430 |
+
struct_value.number_value = value
|
| 431 |
+
elif isinstance(value, (dict, Struct)):
|
| 432 |
+
struct_value.struct_value.Clear()
|
| 433 |
+
struct_value.struct_value.update(value)
|
| 434 |
+
elif isinstance(value, (list, tuple, ListValue)):
|
| 435 |
+
struct_value.list_value.Clear()
|
| 436 |
+
struct_value.list_value.extend(value)
|
| 437 |
+
else:
|
| 438 |
+
raise ValueError('Unexpected type')
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
def _GetStructValue(struct_value):
|
| 442 |
+
which = struct_value.WhichOneof('kind')
|
| 443 |
+
if which == 'struct_value':
|
| 444 |
+
return struct_value.struct_value
|
| 445 |
+
elif which == 'null_value':
|
| 446 |
+
return None
|
| 447 |
+
elif which == 'number_value':
|
| 448 |
+
return struct_value.number_value
|
| 449 |
+
elif which == 'string_value':
|
| 450 |
+
return struct_value.string_value
|
| 451 |
+
elif which == 'bool_value':
|
| 452 |
+
return struct_value.bool_value
|
| 453 |
+
elif which == 'list_value':
|
| 454 |
+
return struct_value.list_value
|
| 455 |
+
elif which is None:
|
| 456 |
+
raise ValueError('Value not set')
|
| 457 |
+
|
| 458 |
+
|
| 459 |
+
class Struct(object):
|
| 460 |
+
"""Class for Struct message type."""
|
| 461 |
+
|
| 462 |
+
__slots__ = ()
|
| 463 |
+
|
| 464 |
+
def __getitem__(self, key):
|
| 465 |
+
return _GetStructValue(self.fields[key])
|
| 466 |
+
|
| 467 |
+
def __contains__(self, item):
|
| 468 |
+
return item in self.fields
|
| 469 |
+
|
| 470 |
+
def __setitem__(self, key, value):
|
| 471 |
+
_SetStructValue(self.fields[key], value)
|
| 472 |
+
|
| 473 |
+
def __delitem__(self, key):
|
| 474 |
+
del self.fields[key]
|
| 475 |
+
|
| 476 |
+
def __len__(self):
|
| 477 |
+
return len(self.fields)
|
| 478 |
+
|
| 479 |
+
def __iter__(self):
|
| 480 |
+
return iter(self.fields)
|
| 481 |
+
|
| 482 |
+
def keys(self): # pylint: disable=invalid-name
|
| 483 |
+
return self.fields.keys()
|
| 484 |
+
|
| 485 |
+
def values(self): # pylint: disable=invalid-name
|
| 486 |
+
return [self[key] for key in self]
|
| 487 |
+
|
| 488 |
+
def items(self): # pylint: disable=invalid-name
|
| 489 |
+
return [(key, self[key]) for key in self]
|
| 490 |
+
|
| 491 |
+
def get_or_create_list(self, key):
|
| 492 |
+
"""Returns a list for this key, creating if it didn't exist already."""
|
| 493 |
+
if not self.fields[key].HasField('list_value'):
|
| 494 |
+
# Clear will mark list_value modified which will indeed create a list.
|
| 495 |
+
self.fields[key].list_value.Clear()
|
| 496 |
+
return self.fields[key].list_value
|
| 497 |
+
|
| 498 |
+
def get_or_create_struct(self, key):
|
| 499 |
+
"""Returns a struct for this key, creating if it didn't exist already."""
|
| 500 |
+
if not self.fields[key].HasField('struct_value'):
|
| 501 |
+
# Clear will mark struct_value modified which will indeed create a struct.
|
| 502 |
+
self.fields[key].struct_value.Clear()
|
| 503 |
+
return self.fields[key].struct_value
|
| 504 |
+
|
| 505 |
+
def update(self, dictionary): # pylint: disable=invalid-name
|
| 506 |
+
for key, value in dictionary.items():
|
| 507 |
+
_SetStructValue(self.fields[key], value)
|
| 508 |
+
|
| 509 |
+
collections.abc.MutableMapping.register(Struct)
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
class ListValue(object):
|
| 513 |
+
"""Class for ListValue message type."""
|
| 514 |
+
|
| 515 |
+
__slots__ = ()
|
| 516 |
+
|
| 517 |
+
def __len__(self):
|
| 518 |
+
return len(self.values)
|
| 519 |
+
|
| 520 |
+
def append(self, value):
|
| 521 |
+
_SetStructValue(self.values.add(), value)
|
| 522 |
+
|
| 523 |
+
def extend(self, elem_seq):
|
| 524 |
+
for value in elem_seq:
|
| 525 |
+
self.append(value)
|
| 526 |
+
|
| 527 |
+
def __getitem__(self, index):
|
| 528 |
+
"""Retrieves item by the specified index."""
|
| 529 |
+
return _GetStructValue(self.values.__getitem__(index))
|
| 530 |
+
|
| 531 |
+
def __setitem__(self, index, value):
|
| 532 |
+
_SetStructValue(self.values.__getitem__(index), value)
|
| 533 |
+
|
| 534 |
+
def __delitem__(self, key):
|
| 535 |
+
del self.values[key]
|
| 536 |
+
|
| 537 |
+
def items(self):
|
| 538 |
+
for i in range(len(self)):
|
| 539 |
+
yield self[i]
|
| 540 |
+
|
| 541 |
+
def add_struct(self):
|
| 542 |
+
"""Appends and returns a struct value as the next value in the list."""
|
| 543 |
+
struct_value = self.values.add().struct_value
|
| 544 |
+
# Clear will mark struct_value modified which will indeed create a struct.
|
| 545 |
+
struct_value.Clear()
|
| 546 |
+
return struct_value
|
| 547 |
+
|
| 548 |
+
def add_list(self):
|
| 549 |
+
"""Appends and returns a list value as the next value in the list."""
|
| 550 |
+
list_value = self.values.add().list_value
|
| 551 |
+
# Clear will mark list_value modified which will indeed create a list.
|
| 552 |
+
list_value.Clear()
|
| 553 |
+
return list_value
|
| 554 |
+
|
| 555 |
+
collections.abc.MutableSequence.register(ListValue)
|
| 556 |
+
|
| 557 |
+
|
| 558 |
+
# LINT.IfChange(wktbases)
|
| 559 |
+
WKTBASES = {
|
| 560 |
+
'google.protobuf.Any': Any,
|
| 561 |
+
'google.protobuf.Duration': Duration,
|
| 562 |
+
'google.protobuf.FieldMask': FieldMask,
|
| 563 |
+
'google.protobuf.ListValue': ListValue,
|
| 564 |
+
'google.protobuf.Struct': Struct,
|
| 565 |
+
'google.protobuf.Timestamp': Timestamp,
|
| 566 |
+
}
|
| 567 |
+
# LINT.ThenChange(//depot/google.protobuf/compiler/python/pyi_generator.cc:wktbases)
|
lib/python3.10/site-packages/google/protobuf/json_format.py
ADDED
|
@@ -0,0 +1,904 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Contains routines for printing protocol messages in JSON format.
|
| 9 |
+
|
| 10 |
+
Simple usage example:
|
| 11 |
+
|
| 12 |
+
# Create a proto object and serialize it to a json format string.
|
| 13 |
+
message = my_proto_pb2.MyMessage(foo='bar')
|
| 14 |
+
json_string = json_format.MessageToJson(message)
|
| 15 |
+
|
| 16 |
+
# Parse a json format string to proto object.
|
| 17 |
+
message = json_format.Parse(json_string, my_proto_pb2.MyMessage())
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
__author__ = 'jieluo@google.com (Jie Luo)'
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
import base64
|
| 24 |
+
from collections import OrderedDict
|
| 25 |
+
import json
|
| 26 |
+
import math
|
| 27 |
+
from operator import methodcaller
|
| 28 |
+
import re
|
| 29 |
+
|
| 30 |
+
from google.protobuf.internal import type_checkers
|
| 31 |
+
from google.protobuf import descriptor
|
| 32 |
+
from google.protobuf import message_factory
|
| 33 |
+
from google.protobuf import symbol_database
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32,
|
| 37 |
+
descriptor.FieldDescriptor.CPPTYPE_UINT32,
|
| 38 |
+
descriptor.FieldDescriptor.CPPTYPE_INT64,
|
| 39 |
+
descriptor.FieldDescriptor.CPPTYPE_UINT64])
|
| 40 |
+
_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64,
|
| 41 |
+
descriptor.FieldDescriptor.CPPTYPE_UINT64])
|
| 42 |
+
_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
|
| 43 |
+
descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
|
| 44 |
+
_INFINITY = 'Infinity'
|
| 45 |
+
_NEG_INFINITY = '-Infinity'
|
| 46 |
+
_NAN = 'NaN'
|
| 47 |
+
|
| 48 |
+
_UNPAIRED_SURROGATE_PATTERN = re.compile(
|
| 49 |
+
u'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]')
|
| 50 |
+
|
| 51 |
+
_VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$')
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class Error(Exception):
|
| 55 |
+
"""Top-level module error for json_format."""
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class SerializeToJsonError(Error):
|
| 59 |
+
"""Thrown if serialization to JSON fails."""
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class ParseError(Error):
|
| 63 |
+
"""Thrown in case of parsing error."""
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def MessageToJson(
|
| 67 |
+
message,
|
| 68 |
+
including_default_value_fields=False,
|
| 69 |
+
preserving_proto_field_name=False,
|
| 70 |
+
indent=2,
|
| 71 |
+
sort_keys=False,
|
| 72 |
+
use_integers_for_enums=False,
|
| 73 |
+
descriptor_pool=None,
|
| 74 |
+
float_precision=None,
|
| 75 |
+
ensure_ascii=True):
|
| 76 |
+
"""Converts protobuf message to JSON format.
|
| 77 |
+
|
| 78 |
+
Args:
|
| 79 |
+
message: The protocol buffers message instance to serialize.
|
| 80 |
+
including_default_value_fields: If True, singular primitive fields,
|
| 81 |
+
repeated fields, and map fields will always be serialized. If
|
| 82 |
+
False, only serialize non-empty fields. Singular message fields
|
| 83 |
+
and oneof fields are not affected by this option.
|
| 84 |
+
preserving_proto_field_name: If True, use the original proto field
|
| 85 |
+
names as defined in the .proto file. If False, convert the field
|
| 86 |
+
names to lowerCamelCase.
|
| 87 |
+
indent: The JSON object will be pretty-printed with this indent level.
|
| 88 |
+
An indent level of 0 or negative will only insert newlines. If the
|
| 89 |
+
indent level is None, no newlines will be inserted.
|
| 90 |
+
sort_keys: If True, then the output will be sorted by field names.
|
| 91 |
+
use_integers_for_enums: If true, print integers instead of enum names.
|
| 92 |
+
descriptor_pool: A Descriptor Pool for resolving types. If None use the
|
| 93 |
+
default.
|
| 94 |
+
float_precision: If set, use this to specify float field valid digits.
|
| 95 |
+
ensure_ascii: If True, strings with non-ASCII characters are escaped.
|
| 96 |
+
If False, Unicode strings are returned unchanged.
|
| 97 |
+
|
| 98 |
+
Returns:
|
| 99 |
+
A string containing the JSON formatted protocol buffer message.
|
| 100 |
+
"""
|
| 101 |
+
printer = _Printer(
|
| 102 |
+
including_default_value_fields,
|
| 103 |
+
preserving_proto_field_name,
|
| 104 |
+
use_integers_for_enums,
|
| 105 |
+
descriptor_pool,
|
| 106 |
+
float_precision=float_precision)
|
| 107 |
+
return printer.ToJsonString(message, indent, sort_keys, ensure_ascii)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def MessageToDict(
|
| 111 |
+
message,
|
| 112 |
+
including_default_value_fields=False,
|
| 113 |
+
preserving_proto_field_name=False,
|
| 114 |
+
use_integers_for_enums=False,
|
| 115 |
+
descriptor_pool=None,
|
| 116 |
+
float_precision=None):
|
| 117 |
+
"""Converts protobuf message to a dictionary.
|
| 118 |
+
|
| 119 |
+
When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
|
| 120 |
+
|
| 121 |
+
Args:
|
| 122 |
+
message: The protocol buffers message instance to serialize.
|
| 123 |
+
including_default_value_fields: If True, singular primitive fields,
|
| 124 |
+
repeated fields, and map fields will always be serialized. If
|
| 125 |
+
False, only serialize non-empty fields. Singular message fields
|
| 126 |
+
and oneof fields are not affected by this option.
|
| 127 |
+
preserving_proto_field_name: If True, use the original proto field
|
| 128 |
+
names as defined in the .proto file. If False, convert the field
|
| 129 |
+
names to lowerCamelCase.
|
| 130 |
+
use_integers_for_enums: If true, print integers instead of enum names.
|
| 131 |
+
descriptor_pool: A Descriptor Pool for resolving types. If None use the
|
| 132 |
+
default.
|
| 133 |
+
float_precision: If set, use this to specify float field valid digits.
|
| 134 |
+
|
| 135 |
+
Returns:
|
| 136 |
+
A dict representation of the protocol buffer message.
|
| 137 |
+
"""
|
| 138 |
+
printer = _Printer(
|
| 139 |
+
including_default_value_fields,
|
| 140 |
+
preserving_proto_field_name,
|
| 141 |
+
use_integers_for_enums,
|
| 142 |
+
descriptor_pool,
|
| 143 |
+
float_precision=float_precision)
|
| 144 |
+
# pylint: disable=protected-access
|
| 145 |
+
return printer._MessageToJsonObject(message)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _IsMapEntry(field):
|
| 149 |
+
return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
|
| 150 |
+
field.message_type.has_options and
|
| 151 |
+
field.message_type.GetOptions().map_entry)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
class _Printer(object):
|
| 155 |
+
"""JSON format printer for protocol message."""
|
| 156 |
+
|
| 157 |
+
def __init__(
|
| 158 |
+
self,
|
| 159 |
+
including_default_value_fields=False,
|
| 160 |
+
preserving_proto_field_name=False,
|
| 161 |
+
use_integers_for_enums=False,
|
| 162 |
+
descriptor_pool=None,
|
| 163 |
+
float_precision=None):
|
| 164 |
+
self.including_default_value_fields = including_default_value_fields
|
| 165 |
+
self.preserving_proto_field_name = preserving_proto_field_name
|
| 166 |
+
self.use_integers_for_enums = use_integers_for_enums
|
| 167 |
+
self.descriptor_pool = descriptor_pool
|
| 168 |
+
if float_precision:
|
| 169 |
+
self.float_format = '.{}g'.format(float_precision)
|
| 170 |
+
else:
|
| 171 |
+
self.float_format = None
|
| 172 |
+
|
| 173 |
+
def ToJsonString(self, message, indent, sort_keys, ensure_ascii):
|
| 174 |
+
js = self._MessageToJsonObject(message)
|
| 175 |
+
return json.dumps(
|
| 176 |
+
js, indent=indent, sort_keys=sort_keys, ensure_ascii=ensure_ascii)
|
| 177 |
+
|
| 178 |
+
def _MessageToJsonObject(self, message):
|
| 179 |
+
"""Converts message to an object according to Proto3 JSON Specification."""
|
| 180 |
+
message_descriptor = message.DESCRIPTOR
|
| 181 |
+
full_name = message_descriptor.full_name
|
| 182 |
+
if _IsWrapperMessage(message_descriptor):
|
| 183 |
+
return self._WrapperMessageToJsonObject(message)
|
| 184 |
+
if full_name in _WKTJSONMETHODS:
|
| 185 |
+
return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self)
|
| 186 |
+
js = {}
|
| 187 |
+
return self._RegularMessageToJsonObject(message, js)
|
| 188 |
+
|
| 189 |
+
def _RegularMessageToJsonObject(self, message, js):
|
| 190 |
+
"""Converts normal message according to Proto3 JSON Specification."""
|
| 191 |
+
fields = message.ListFields()
|
| 192 |
+
|
| 193 |
+
try:
|
| 194 |
+
for field, value in fields:
|
| 195 |
+
if self.preserving_proto_field_name:
|
| 196 |
+
name = field.name
|
| 197 |
+
else:
|
| 198 |
+
name = field.json_name
|
| 199 |
+
if _IsMapEntry(field):
|
| 200 |
+
# Convert a map field.
|
| 201 |
+
v_field = field.message_type.fields_by_name['value']
|
| 202 |
+
js_map = {}
|
| 203 |
+
for key in value:
|
| 204 |
+
if isinstance(key, bool):
|
| 205 |
+
if key:
|
| 206 |
+
recorded_key = 'true'
|
| 207 |
+
else:
|
| 208 |
+
recorded_key = 'false'
|
| 209 |
+
else:
|
| 210 |
+
recorded_key = str(key)
|
| 211 |
+
js_map[recorded_key] = self._FieldToJsonObject(
|
| 212 |
+
v_field, value[key])
|
| 213 |
+
js[name] = js_map
|
| 214 |
+
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
| 215 |
+
# Convert a repeated field.
|
| 216 |
+
js[name] = [self._FieldToJsonObject(field, k)
|
| 217 |
+
for k in value]
|
| 218 |
+
elif field.is_extension:
|
| 219 |
+
name = '[%s]' % field.full_name
|
| 220 |
+
js[name] = self._FieldToJsonObject(field, value)
|
| 221 |
+
else:
|
| 222 |
+
js[name] = self._FieldToJsonObject(field, value)
|
| 223 |
+
|
| 224 |
+
# Serialize default value if including_default_value_fields is True.
|
| 225 |
+
if self.including_default_value_fields:
|
| 226 |
+
message_descriptor = message.DESCRIPTOR
|
| 227 |
+
for field in message_descriptor.fields:
|
| 228 |
+
# Singular message fields and oneof fields will not be affected.
|
| 229 |
+
if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and
|
| 230 |
+
field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or
|
| 231 |
+
field.containing_oneof):
|
| 232 |
+
continue
|
| 233 |
+
if self.preserving_proto_field_name:
|
| 234 |
+
name = field.name
|
| 235 |
+
else:
|
| 236 |
+
name = field.json_name
|
| 237 |
+
if name in js:
|
| 238 |
+
# Skip the field which has been serialized already.
|
| 239 |
+
continue
|
| 240 |
+
if _IsMapEntry(field):
|
| 241 |
+
js[name] = {}
|
| 242 |
+
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
| 243 |
+
js[name] = []
|
| 244 |
+
else:
|
| 245 |
+
js[name] = self._FieldToJsonObject(field, field.default_value)
|
| 246 |
+
|
| 247 |
+
except ValueError as e:
|
| 248 |
+
raise SerializeToJsonError(
|
| 249 |
+
'Failed to serialize {0} field: {1}.'.format(field.name, e)) from e
|
| 250 |
+
|
| 251 |
+
return js
|
| 252 |
+
|
| 253 |
+
def _FieldToJsonObject(self, field, value):
|
| 254 |
+
"""Converts field value according to Proto3 JSON Specification."""
|
| 255 |
+
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
| 256 |
+
return self._MessageToJsonObject(value)
|
| 257 |
+
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
|
| 258 |
+
if self.use_integers_for_enums:
|
| 259 |
+
return value
|
| 260 |
+
if field.enum_type.full_name == 'google.protobuf.NullValue':
|
| 261 |
+
return None
|
| 262 |
+
enum_value = field.enum_type.values_by_number.get(value, None)
|
| 263 |
+
if enum_value is not None:
|
| 264 |
+
return enum_value.name
|
| 265 |
+
else:
|
| 266 |
+
if field.enum_type.is_closed:
|
| 267 |
+
raise SerializeToJsonError('Enum field contains an integer value '
|
| 268 |
+
'which can not mapped to an enum value.')
|
| 269 |
+
else:
|
| 270 |
+
return value
|
| 271 |
+
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
|
| 272 |
+
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
| 273 |
+
# Use base64 Data encoding for bytes
|
| 274 |
+
return base64.b64encode(value).decode('utf-8')
|
| 275 |
+
else:
|
| 276 |
+
return value
|
| 277 |
+
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
|
| 278 |
+
return bool(value)
|
| 279 |
+
elif field.cpp_type in _INT64_TYPES:
|
| 280 |
+
return str(value)
|
| 281 |
+
elif field.cpp_type in _FLOAT_TYPES:
|
| 282 |
+
if math.isinf(value):
|
| 283 |
+
if value < 0.0:
|
| 284 |
+
return _NEG_INFINITY
|
| 285 |
+
else:
|
| 286 |
+
return _INFINITY
|
| 287 |
+
if math.isnan(value):
|
| 288 |
+
return _NAN
|
| 289 |
+
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
|
| 290 |
+
if self.float_format:
|
| 291 |
+
return float(format(value, self.float_format))
|
| 292 |
+
else:
|
| 293 |
+
return type_checkers.ToShortestFloat(value)
|
| 294 |
+
|
| 295 |
+
return value
|
| 296 |
+
|
| 297 |
+
def _AnyMessageToJsonObject(self, message):
|
| 298 |
+
"""Converts Any message according to Proto3 JSON Specification."""
|
| 299 |
+
if not message.ListFields():
|
| 300 |
+
return {}
|
| 301 |
+
# Must print @type first, use OrderedDict instead of {}
|
| 302 |
+
js = OrderedDict()
|
| 303 |
+
type_url = message.type_url
|
| 304 |
+
js['@type'] = type_url
|
| 305 |
+
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
|
| 306 |
+
sub_message.ParseFromString(message.value)
|
| 307 |
+
message_descriptor = sub_message.DESCRIPTOR
|
| 308 |
+
full_name = message_descriptor.full_name
|
| 309 |
+
if _IsWrapperMessage(message_descriptor):
|
| 310 |
+
js['value'] = self._WrapperMessageToJsonObject(sub_message)
|
| 311 |
+
return js
|
| 312 |
+
if full_name in _WKTJSONMETHODS:
|
| 313 |
+
js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0],
|
| 314 |
+
sub_message)(self)
|
| 315 |
+
return js
|
| 316 |
+
return self._RegularMessageToJsonObject(sub_message, js)
|
| 317 |
+
|
| 318 |
+
def _GenericMessageToJsonObject(self, message):
|
| 319 |
+
"""Converts message according to Proto3 JSON Specification."""
|
| 320 |
+
# Duration, Timestamp and FieldMask have ToJsonString method to do the
|
| 321 |
+
# convert. Users can also call the method directly.
|
| 322 |
+
return message.ToJsonString()
|
| 323 |
+
|
| 324 |
+
def _ValueMessageToJsonObject(self, message):
|
| 325 |
+
"""Converts Value message according to Proto3 JSON Specification."""
|
| 326 |
+
which = message.WhichOneof('kind')
|
| 327 |
+
# If the Value message is not set treat as null_value when serialize
|
| 328 |
+
# to JSON. The parse back result will be different from original message.
|
| 329 |
+
if which is None or which == 'null_value':
|
| 330 |
+
return None
|
| 331 |
+
if which == 'list_value':
|
| 332 |
+
return self._ListValueMessageToJsonObject(message.list_value)
|
| 333 |
+
if which == 'number_value':
|
| 334 |
+
value = message.number_value
|
| 335 |
+
if math.isinf(value):
|
| 336 |
+
raise ValueError('Fail to serialize Infinity for Value.number_value, '
|
| 337 |
+
'which would parse as string_value')
|
| 338 |
+
if math.isnan(value):
|
| 339 |
+
raise ValueError('Fail to serialize NaN for Value.number_value, '
|
| 340 |
+
'which would parse as string_value')
|
| 341 |
+
else:
|
| 342 |
+
value = getattr(message, which)
|
| 343 |
+
oneof_descriptor = message.DESCRIPTOR.fields_by_name[which]
|
| 344 |
+
return self._FieldToJsonObject(oneof_descriptor, value)
|
| 345 |
+
|
| 346 |
+
def _ListValueMessageToJsonObject(self, message):
|
| 347 |
+
"""Converts ListValue message according to Proto3 JSON Specification."""
|
| 348 |
+
return [self._ValueMessageToJsonObject(value)
|
| 349 |
+
for value in message.values]
|
| 350 |
+
|
| 351 |
+
def _StructMessageToJsonObject(self, message):
|
| 352 |
+
"""Converts Struct message according to Proto3 JSON Specification."""
|
| 353 |
+
fields = message.fields
|
| 354 |
+
ret = {}
|
| 355 |
+
for key in fields:
|
| 356 |
+
ret[key] = self._ValueMessageToJsonObject(fields[key])
|
| 357 |
+
return ret
|
| 358 |
+
|
| 359 |
+
def _WrapperMessageToJsonObject(self, message):
|
| 360 |
+
return self._FieldToJsonObject(
|
| 361 |
+
message.DESCRIPTOR.fields_by_name['value'], message.value)
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def _IsWrapperMessage(message_descriptor):
|
| 365 |
+
return message_descriptor.file.name == 'google/protobuf/wrappers.proto'
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def _DuplicateChecker(js):
|
| 369 |
+
result = {}
|
| 370 |
+
for name, value in js:
|
| 371 |
+
if name in result:
|
| 372 |
+
raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name))
|
| 373 |
+
result[name] = value
|
| 374 |
+
return result
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
def _CreateMessageFromTypeUrl(type_url, descriptor_pool):
|
| 378 |
+
"""Creates a message from a type URL."""
|
| 379 |
+
db = symbol_database.Default()
|
| 380 |
+
pool = db.pool if descriptor_pool is None else descriptor_pool
|
| 381 |
+
type_name = type_url.split('/')[-1]
|
| 382 |
+
try:
|
| 383 |
+
message_descriptor = pool.FindMessageTypeByName(type_name)
|
| 384 |
+
except KeyError as e:
|
| 385 |
+
raise TypeError(
|
| 386 |
+
'Can not find message descriptor by type_url: {0}'.format(type_url)
|
| 387 |
+
) from e
|
| 388 |
+
message_class = message_factory.GetMessageClass(message_descriptor)
|
| 389 |
+
return message_class()
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
def Parse(text,
|
| 393 |
+
message,
|
| 394 |
+
ignore_unknown_fields=False,
|
| 395 |
+
descriptor_pool=None,
|
| 396 |
+
max_recursion_depth=100):
|
| 397 |
+
"""Parses a JSON representation of a protocol message into a message.
|
| 398 |
+
|
| 399 |
+
Args:
|
| 400 |
+
text: Message JSON representation.
|
| 401 |
+
message: A protocol buffer message to merge into.
|
| 402 |
+
ignore_unknown_fields: If True, do not raise errors for unknown fields.
|
| 403 |
+
descriptor_pool: A Descriptor Pool for resolving types. If None use the
|
| 404 |
+
default.
|
| 405 |
+
max_recursion_depth: max recursion depth of JSON message to be
|
| 406 |
+
deserialized. JSON messages over this depth will fail to be
|
| 407 |
+
deserialized. Default value is 100.
|
| 408 |
+
|
| 409 |
+
Returns:
|
| 410 |
+
The same message passed as argument.
|
| 411 |
+
|
| 412 |
+
Raises::
|
| 413 |
+
ParseError: On JSON parsing problems.
|
| 414 |
+
"""
|
| 415 |
+
if not isinstance(text, str):
|
| 416 |
+
text = text.decode('utf-8')
|
| 417 |
+
try:
|
| 418 |
+
js = json.loads(text, object_pairs_hook=_DuplicateChecker)
|
| 419 |
+
except ValueError as e:
|
| 420 |
+
raise ParseError('Failed to load JSON: {0}.'.format(str(e))) from e
|
| 421 |
+
return ParseDict(js, message, ignore_unknown_fields, descriptor_pool,
|
| 422 |
+
max_recursion_depth)
|
| 423 |
+
|
| 424 |
+
|
| 425 |
+
def ParseDict(js_dict,
|
| 426 |
+
message,
|
| 427 |
+
ignore_unknown_fields=False,
|
| 428 |
+
descriptor_pool=None,
|
| 429 |
+
max_recursion_depth=100):
|
| 430 |
+
"""Parses a JSON dictionary representation into a message.
|
| 431 |
+
|
| 432 |
+
Args:
|
| 433 |
+
js_dict: Dict representation of a JSON message.
|
| 434 |
+
message: A protocol buffer message to merge into.
|
| 435 |
+
ignore_unknown_fields: If True, do not raise errors for unknown fields.
|
| 436 |
+
descriptor_pool: A Descriptor Pool for resolving types. If None use the
|
| 437 |
+
default.
|
| 438 |
+
max_recursion_depth: max recursion depth of JSON message to be
|
| 439 |
+
deserialized. JSON messages over this depth will fail to be
|
| 440 |
+
deserialized. Default value is 100.
|
| 441 |
+
|
| 442 |
+
Returns:
|
| 443 |
+
The same message passed as argument.
|
| 444 |
+
"""
|
| 445 |
+
parser = _Parser(ignore_unknown_fields, descriptor_pool, max_recursion_depth)
|
| 446 |
+
parser.ConvertMessage(js_dict, message, '')
|
| 447 |
+
return message
|
| 448 |
+
|
| 449 |
+
|
| 450 |
+
_INT_OR_FLOAT = (int, float)
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
class _Parser(object):
|
| 454 |
+
"""JSON format parser for protocol message."""
|
| 455 |
+
|
| 456 |
+
def __init__(self, ignore_unknown_fields, descriptor_pool,
|
| 457 |
+
max_recursion_depth):
|
| 458 |
+
self.ignore_unknown_fields = ignore_unknown_fields
|
| 459 |
+
self.descriptor_pool = descriptor_pool
|
| 460 |
+
self.max_recursion_depth = max_recursion_depth
|
| 461 |
+
self.recursion_depth = 0
|
| 462 |
+
|
| 463 |
+
def ConvertMessage(self, value, message, path):
|
| 464 |
+
"""Convert a JSON object into a message.
|
| 465 |
+
|
| 466 |
+
Args:
|
| 467 |
+
value: A JSON object.
|
| 468 |
+
message: A WKT or regular protocol message to record the data.
|
| 469 |
+
path: parent path to log parse error info.
|
| 470 |
+
|
| 471 |
+
Raises:
|
| 472 |
+
ParseError: In case of convert problems.
|
| 473 |
+
"""
|
| 474 |
+
self.recursion_depth += 1
|
| 475 |
+
if self.recursion_depth > self.max_recursion_depth:
|
| 476 |
+
raise ParseError('Message too deep. Max recursion depth is {0}'.format(
|
| 477 |
+
self.max_recursion_depth))
|
| 478 |
+
message_descriptor = message.DESCRIPTOR
|
| 479 |
+
full_name = message_descriptor.full_name
|
| 480 |
+
if not path:
|
| 481 |
+
path = message_descriptor.name
|
| 482 |
+
if _IsWrapperMessage(message_descriptor):
|
| 483 |
+
self._ConvertWrapperMessage(value, message, path)
|
| 484 |
+
elif full_name in _WKTJSONMETHODS:
|
| 485 |
+
methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self)
|
| 486 |
+
else:
|
| 487 |
+
self._ConvertFieldValuePair(value, message, path)
|
| 488 |
+
self.recursion_depth -= 1
|
| 489 |
+
|
| 490 |
+
def _ConvertFieldValuePair(self, js, message, path):
|
| 491 |
+
"""Convert field value pairs into regular message.
|
| 492 |
+
|
| 493 |
+
Args:
|
| 494 |
+
js: A JSON object to convert the field value pairs.
|
| 495 |
+
message: A regular protocol message to record the data.
|
| 496 |
+
path: parent path to log parse error info.
|
| 497 |
+
|
| 498 |
+
Raises:
|
| 499 |
+
ParseError: In case of problems converting.
|
| 500 |
+
"""
|
| 501 |
+
names = []
|
| 502 |
+
message_descriptor = message.DESCRIPTOR
|
| 503 |
+
fields_by_json_name = dict((f.json_name, f)
|
| 504 |
+
for f in message_descriptor.fields)
|
| 505 |
+
for name in js:
|
| 506 |
+
try:
|
| 507 |
+
field = fields_by_json_name.get(name, None)
|
| 508 |
+
if not field:
|
| 509 |
+
field = message_descriptor.fields_by_name.get(name, None)
|
| 510 |
+
if not field and _VALID_EXTENSION_NAME.match(name):
|
| 511 |
+
if not message_descriptor.is_extendable:
|
| 512 |
+
raise ParseError(
|
| 513 |
+
'Message type {0} does not have extensions at {1}'.format(
|
| 514 |
+
message_descriptor.full_name, path))
|
| 515 |
+
identifier = name[1:-1] # strip [] brackets
|
| 516 |
+
# pylint: disable=protected-access
|
| 517 |
+
field = message.Extensions._FindExtensionByName(identifier)
|
| 518 |
+
# pylint: enable=protected-access
|
| 519 |
+
if not field:
|
| 520 |
+
# Try looking for extension by the message type name, dropping the
|
| 521 |
+
# field name following the final . separator in full_name.
|
| 522 |
+
identifier = '.'.join(identifier.split('.')[:-1])
|
| 523 |
+
# pylint: disable=protected-access
|
| 524 |
+
field = message.Extensions._FindExtensionByName(identifier)
|
| 525 |
+
# pylint: enable=protected-access
|
| 526 |
+
if not field:
|
| 527 |
+
if self.ignore_unknown_fields:
|
| 528 |
+
continue
|
| 529 |
+
raise ParseError(
|
| 530 |
+
('Message type "{0}" has no field named "{1}" at "{2}".\n'
|
| 531 |
+
' Available Fields(except extensions): "{3}"').format(
|
| 532 |
+
message_descriptor.full_name, name, path,
|
| 533 |
+
[f.json_name for f in message_descriptor.fields]))
|
| 534 |
+
if name in names:
|
| 535 |
+
raise ParseError('Message type "{0}" should not have multiple '
|
| 536 |
+
'"{1}" fields at "{2}".'.format(
|
| 537 |
+
message.DESCRIPTOR.full_name, name, path))
|
| 538 |
+
names.append(name)
|
| 539 |
+
value = js[name]
|
| 540 |
+
# Check no other oneof field is parsed.
|
| 541 |
+
if field.containing_oneof is not None and value is not None:
|
| 542 |
+
oneof_name = field.containing_oneof.name
|
| 543 |
+
if oneof_name in names:
|
| 544 |
+
raise ParseError('Message type "{0}" should not have multiple '
|
| 545 |
+
'"{1}" oneof fields at "{2}".'.format(
|
| 546 |
+
message.DESCRIPTOR.full_name, oneof_name,
|
| 547 |
+
path))
|
| 548 |
+
names.append(oneof_name)
|
| 549 |
+
|
| 550 |
+
if value is None:
|
| 551 |
+
if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE
|
| 552 |
+
and field.message_type.full_name == 'google.protobuf.Value'):
|
| 553 |
+
sub_message = getattr(message, field.name)
|
| 554 |
+
sub_message.null_value = 0
|
| 555 |
+
elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM
|
| 556 |
+
and field.enum_type.full_name == 'google.protobuf.NullValue'):
|
| 557 |
+
setattr(message, field.name, 0)
|
| 558 |
+
else:
|
| 559 |
+
message.ClearField(field.name)
|
| 560 |
+
continue
|
| 561 |
+
|
| 562 |
+
# Parse field value.
|
| 563 |
+
if _IsMapEntry(field):
|
| 564 |
+
message.ClearField(field.name)
|
| 565 |
+
self._ConvertMapFieldValue(value, message, field,
|
| 566 |
+
'{0}.{1}'.format(path, name))
|
| 567 |
+
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
| 568 |
+
message.ClearField(field.name)
|
| 569 |
+
if not isinstance(value, list):
|
| 570 |
+
raise ParseError('repeated field {0} must be in [] which is '
|
| 571 |
+
'{1} at {2}'.format(name, value, path))
|
| 572 |
+
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
| 573 |
+
# Repeated message field.
|
| 574 |
+
for index, item in enumerate(value):
|
| 575 |
+
sub_message = getattr(message, field.name).add()
|
| 576 |
+
# None is a null_value in Value.
|
| 577 |
+
if (item is None and
|
| 578 |
+
sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'):
|
| 579 |
+
raise ParseError('null is not allowed to be used as an element'
|
| 580 |
+
' in a repeated field at {0}.{1}[{2}]'.format(
|
| 581 |
+
path, name, index))
|
| 582 |
+
self.ConvertMessage(item, sub_message,
|
| 583 |
+
'{0}.{1}[{2}]'.format(path, name, index))
|
| 584 |
+
else:
|
| 585 |
+
# Repeated scalar field.
|
| 586 |
+
for index, item in enumerate(value):
|
| 587 |
+
if item is None:
|
| 588 |
+
raise ParseError('null is not allowed to be used as an element'
|
| 589 |
+
' in a repeated field at {0}.{1}[{2}]'.format(
|
| 590 |
+
path, name, index))
|
| 591 |
+
getattr(message, field.name).append(
|
| 592 |
+
_ConvertScalarFieldValue(
|
| 593 |
+
item, field, '{0}.{1}[{2}]'.format(path, name, index)))
|
| 594 |
+
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
| 595 |
+
if field.is_extension:
|
| 596 |
+
sub_message = message.Extensions[field]
|
| 597 |
+
else:
|
| 598 |
+
sub_message = getattr(message, field.name)
|
| 599 |
+
sub_message.SetInParent()
|
| 600 |
+
self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name))
|
| 601 |
+
else:
|
| 602 |
+
if field.is_extension:
|
| 603 |
+
message.Extensions[field] = _ConvertScalarFieldValue(
|
| 604 |
+
value, field, '{0}.{1}'.format(path, name))
|
| 605 |
+
else:
|
| 606 |
+
setattr(
|
| 607 |
+
message, field.name,
|
| 608 |
+
_ConvertScalarFieldValue(value, field,
|
| 609 |
+
'{0}.{1}'.format(path, name)))
|
| 610 |
+
except ParseError as e:
|
| 611 |
+
if field and field.containing_oneof is None:
|
| 612 |
+
raise ParseError(
|
| 613 |
+
'Failed to parse {0} field: {1}.'.format(name, e)
|
| 614 |
+
) from e
|
| 615 |
+
else:
|
| 616 |
+
raise ParseError(str(e)) from e
|
| 617 |
+
except ValueError as e:
|
| 618 |
+
raise ParseError(
|
| 619 |
+
'Failed to parse {0} field: {1}.'.format(name, e)
|
| 620 |
+
) from e
|
| 621 |
+
except TypeError as e:
|
| 622 |
+
raise ParseError(
|
| 623 |
+
'Failed to parse {0} field: {1}.'.format(name, e)
|
| 624 |
+
) from e
|
| 625 |
+
|
| 626 |
+
def _ConvertAnyMessage(self, value, message, path):
|
| 627 |
+
"""Convert a JSON representation into Any message."""
|
| 628 |
+
if isinstance(value, dict) and not value:
|
| 629 |
+
return
|
| 630 |
+
try:
|
| 631 |
+
type_url = value['@type']
|
| 632 |
+
except KeyError as e:
|
| 633 |
+
raise ParseError(
|
| 634 |
+
'@type is missing when parsing any message at {0}'.format(path)
|
| 635 |
+
) from e
|
| 636 |
+
|
| 637 |
+
try:
|
| 638 |
+
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
|
| 639 |
+
except TypeError as e:
|
| 640 |
+
raise ParseError('{0} at {1}'.format(e, path)) from e
|
| 641 |
+
message_descriptor = sub_message.DESCRIPTOR
|
| 642 |
+
full_name = message_descriptor.full_name
|
| 643 |
+
if _IsWrapperMessage(message_descriptor):
|
| 644 |
+
self._ConvertWrapperMessage(value['value'], sub_message,
|
| 645 |
+
'{0}.value'.format(path))
|
| 646 |
+
elif full_name in _WKTJSONMETHODS:
|
| 647 |
+
methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message,
|
| 648 |
+
'{0}.value'.format(path))(
|
| 649 |
+
self)
|
| 650 |
+
else:
|
| 651 |
+
del value['@type']
|
| 652 |
+
self._ConvertFieldValuePair(value, sub_message, path)
|
| 653 |
+
value['@type'] = type_url
|
| 654 |
+
# Sets Any message
|
| 655 |
+
message.value = sub_message.SerializeToString()
|
| 656 |
+
message.type_url = type_url
|
| 657 |
+
|
| 658 |
+
def _ConvertGenericMessage(self, value, message, path):
|
| 659 |
+
"""Convert a JSON representation into message with FromJsonString."""
|
| 660 |
+
# Duration, Timestamp, FieldMask have a FromJsonString method to do the
|
| 661 |
+
# conversion. Users can also call the method directly.
|
| 662 |
+
try:
|
| 663 |
+
message.FromJsonString(value)
|
| 664 |
+
except ValueError as e:
|
| 665 |
+
raise ParseError('{0} at {1}'.format(e, path)) from e
|
| 666 |
+
|
| 667 |
+
def _ConvertValueMessage(self, value, message, path):
|
| 668 |
+
"""Convert a JSON representation into Value message."""
|
| 669 |
+
if isinstance(value, dict):
|
| 670 |
+
self._ConvertStructMessage(value, message.struct_value, path)
|
| 671 |
+
elif isinstance(value, list):
|
| 672 |
+
self._ConvertListValueMessage(value, message.list_value, path)
|
| 673 |
+
elif value is None:
|
| 674 |
+
message.null_value = 0
|
| 675 |
+
elif isinstance(value, bool):
|
| 676 |
+
message.bool_value = value
|
| 677 |
+
elif isinstance(value, str):
|
| 678 |
+
message.string_value = value
|
| 679 |
+
elif isinstance(value, _INT_OR_FLOAT):
|
| 680 |
+
message.number_value = value
|
| 681 |
+
else:
|
| 682 |
+
raise ParseError('Value {0} has unexpected type {1} at {2}'.format(
|
| 683 |
+
value, type(value), path))
|
| 684 |
+
|
| 685 |
+
def _ConvertListValueMessage(self, value, message, path):
|
| 686 |
+
"""Convert a JSON representation into ListValue message."""
|
| 687 |
+
if not isinstance(value, list):
|
| 688 |
+
raise ParseError('ListValue must be in [] which is {0} at {1}'.format(
|
| 689 |
+
value, path))
|
| 690 |
+
message.ClearField('values')
|
| 691 |
+
for index, item in enumerate(value):
|
| 692 |
+
self._ConvertValueMessage(item, message.values.add(),
|
| 693 |
+
'{0}[{1}]'.format(path, index))
|
| 694 |
+
|
| 695 |
+
def _ConvertStructMessage(self, value, message, path):
|
| 696 |
+
"""Convert a JSON representation into Struct message."""
|
| 697 |
+
if not isinstance(value, dict):
|
| 698 |
+
raise ParseError('Struct must be in a dict which is {0} at {1}'.format(
|
| 699 |
+
value, path))
|
| 700 |
+
# Clear will mark the struct as modified so it will be created even if
|
| 701 |
+
# there are no values.
|
| 702 |
+
message.Clear()
|
| 703 |
+
for key in value:
|
| 704 |
+
self._ConvertValueMessage(value[key], message.fields[key],
|
| 705 |
+
'{0}.{1}'.format(path, key))
|
| 706 |
+
return
|
| 707 |
+
|
| 708 |
+
def _ConvertWrapperMessage(self, value, message, path):
|
| 709 |
+
"""Convert a JSON representation into Wrapper message."""
|
| 710 |
+
field = message.DESCRIPTOR.fields_by_name['value']
|
| 711 |
+
setattr(
|
| 712 |
+
message, 'value',
|
| 713 |
+
_ConvertScalarFieldValue(value, field, path='{0}.value'.format(path)))
|
| 714 |
+
|
| 715 |
+
def _ConvertMapFieldValue(self, value, message, field, path):
|
| 716 |
+
"""Convert map field value for a message map field.
|
| 717 |
+
|
| 718 |
+
Args:
|
| 719 |
+
value: A JSON object to convert the map field value.
|
| 720 |
+
message: A protocol message to record the converted data.
|
| 721 |
+
field: The descriptor of the map field to be converted.
|
| 722 |
+
path: parent path to log parse error info.
|
| 723 |
+
|
| 724 |
+
Raises:
|
| 725 |
+
ParseError: In case of convert problems.
|
| 726 |
+
"""
|
| 727 |
+
if not isinstance(value, dict):
|
| 728 |
+
raise ParseError(
|
| 729 |
+
'Map field {0} must be in a dict which is {1} at {2}'.format(
|
| 730 |
+
field.name, value, path))
|
| 731 |
+
key_field = field.message_type.fields_by_name['key']
|
| 732 |
+
value_field = field.message_type.fields_by_name['value']
|
| 733 |
+
for key in value:
|
| 734 |
+
key_value = _ConvertScalarFieldValue(key, key_field,
|
| 735 |
+
'{0}.key'.format(path), True)
|
| 736 |
+
if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
| 737 |
+
self.ConvertMessage(value[key],
|
| 738 |
+
getattr(message, field.name)[key_value],
|
| 739 |
+
'{0}[{1}]'.format(path, key_value))
|
| 740 |
+
else:
|
| 741 |
+
getattr(message, field.name)[key_value] = _ConvertScalarFieldValue(
|
| 742 |
+
value[key], value_field, path='{0}[{1}]'.format(path, key_value))
|
| 743 |
+
|
| 744 |
+
|
| 745 |
+
def _ConvertScalarFieldValue(value, field, path, require_str=False):
|
| 746 |
+
"""Convert a single scalar field value.
|
| 747 |
+
|
| 748 |
+
Args:
|
| 749 |
+
value: A scalar value to convert the scalar field value.
|
| 750 |
+
field: The descriptor of the field to convert.
|
| 751 |
+
path: parent path to log parse error info.
|
| 752 |
+
require_str: If True, the field value must be a str.
|
| 753 |
+
|
| 754 |
+
Returns:
|
| 755 |
+
The converted scalar field value
|
| 756 |
+
|
| 757 |
+
Raises:
|
| 758 |
+
ParseError: In case of convert problems.
|
| 759 |
+
"""
|
| 760 |
+
try:
|
| 761 |
+
if field.cpp_type in _INT_TYPES:
|
| 762 |
+
return _ConvertInteger(value)
|
| 763 |
+
elif field.cpp_type in _FLOAT_TYPES:
|
| 764 |
+
return _ConvertFloat(value, field)
|
| 765 |
+
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
|
| 766 |
+
return _ConvertBool(value, require_str)
|
| 767 |
+
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
|
| 768 |
+
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
| 769 |
+
if isinstance(value, str):
|
| 770 |
+
encoded = value.encode('utf-8')
|
| 771 |
+
else:
|
| 772 |
+
encoded = value
|
| 773 |
+
# Add extra padding '='
|
| 774 |
+
padded_value = encoded + b'=' * (4 - len(encoded) % 4)
|
| 775 |
+
return base64.urlsafe_b64decode(padded_value)
|
| 776 |
+
else:
|
| 777 |
+
# Checking for unpaired surrogates appears to be unreliable,
|
| 778 |
+
# depending on the specific Python version, so we check manually.
|
| 779 |
+
if _UNPAIRED_SURROGATE_PATTERN.search(value):
|
| 780 |
+
raise ParseError('Unpaired surrogate')
|
| 781 |
+
return value
|
| 782 |
+
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
|
| 783 |
+
# Convert an enum value.
|
| 784 |
+
enum_value = field.enum_type.values_by_name.get(value, None)
|
| 785 |
+
if enum_value is None:
|
| 786 |
+
try:
|
| 787 |
+
number = int(value)
|
| 788 |
+
enum_value = field.enum_type.values_by_number.get(number, None)
|
| 789 |
+
except ValueError as e:
|
| 790 |
+
raise ParseError('Invalid enum value {0} for enum type {1}'.format(
|
| 791 |
+
value, field.enum_type.full_name)) from e
|
| 792 |
+
if enum_value is None:
|
| 793 |
+
if field.enum_type.is_closed:
|
| 794 |
+
raise ParseError('Invalid enum value {0} for enum type {1}'.format(
|
| 795 |
+
value, field.enum_type.full_name))
|
| 796 |
+
else:
|
| 797 |
+
return number
|
| 798 |
+
return enum_value.number
|
| 799 |
+
except ParseError as e:
|
| 800 |
+
raise ParseError('{0} at {1}'.format(e, path)) from e
|
| 801 |
+
|
| 802 |
+
|
| 803 |
+
def _ConvertInteger(value):
|
| 804 |
+
"""Convert an integer.
|
| 805 |
+
|
| 806 |
+
Args:
|
| 807 |
+
value: A scalar value to convert.
|
| 808 |
+
|
| 809 |
+
Returns:
|
| 810 |
+
The integer value.
|
| 811 |
+
|
| 812 |
+
Raises:
|
| 813 |
+
ParseError: If an integer couldn't be consumed.
|
| 814 |
+
"""
|
| 815 |
+
if isinstance(value, float) and not value.is_integer():
|
| 816 |
+
raise ParseError('Couldn\'t parse integer: {0}'.format(value))
|
| 817 |
+
|
| 818 |
+
if isinstance(value, str) and value.find(' ') != -1:
|
| 819 |
+
raise ParseError('Couldn\'t parse integer: "{0}"'.format(value))
|
| 820 |
+
|
| 821 |
+
if isinstance(value, bool):
|
| 822 |
+
raise ParseError('Bool value {0} is not acceptable for '
|
| 823 |
+
'integer field'.format(value))
|
| 824 |
+
|
| 825 |
+
return int(value)
|
| 826 |
+
|
| 827 |
+
|
| 828 |
+
def _ConvertFloat(value, field):
|
| 829 |
+
"""Convert an floating point number."""
|
| 830 |
+
if isinstance(value, float):
|
| 831 |
+
if math.isnan(value):
|
| 832 |
+
raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead')
|
| 833 |
+
if math.isinf(value):
|
| 834 |
+
if value > 0:
|
| 835 |
+
raise ParseError('Couldn\'t parse Infinity or value too large, '
|
| 836 |
+
'use quoted "Infinity" instead')
|
| 837 |
+
else:
|
| 838 |
+
raise ParseError('Couldn\'t parse -Infinity or value too small, '
|
| 839 |
+
'use quoted "-Infinity" instead')
|
| 840 |
+
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
|
| 841 |
+
# pylint: disable=protected-access
|
| 842 |
+
if value > type_checkers._FLOAT_MAX:
|
| 843 |
+
raise ParseError('Float value too large')
|
| 844 |
+
# pylint: disable=protected-access
|
| 845 |
+
if value < type_checkers._FLOAT_MIN:
|
| 846 |
+
raise ParseError('Float value too small')
|
| 847 |
+
if value == 'nan':
|
| 848 |
+
raise ParseError('Couldn\'t parse float "nan", use "NaN" instead')
|
| 849 |
+
try:
|
| 850 |
+
# Assume Python compatible syntax.
|
| 851 |
+
return float(value)
|
| 852 |
+
except ValueError as e:
|
| 853 |
+
# Check alternative spellings.
|
| 854 |
+
if value == _NEG_INFINITY:
|
| 855 |
+
return float('-inf')
|
| 856 |
+
elif value == _INFINITY:
|
| 857 |
+
return float('inf')
|
| 858 |
+
elif value == _NAN:
|
| 859 |
+
return float('nan')
|
| 860 |
+
else:
|
| 861 |
+
raise ParseError('Couldn\'t parse float: {0}'.format(value)) from e
|
| 862 |
+
|
| 863 |
+
|
| 864 |
+
def _ConvertBool(value, require_str):
|
| 865 |
+
"""Convert a boolean value.
|
| 866 |
+
|
| 867 |
+
Args:
|
| 868 |
+
value: A scalar value to convert.
|
| 869 |
+
require_str: If True, value must be a str.
|
| 870 |
+
|
| 871 |
+
Returns:
|
| 872 |
+
The bool parsed.
|
| 873 |
+
|
| 874 |
+
Raises:
|
| 875 |
+
ParseError: If a boolean value couldn't be consumed.
|
| 876 |
+
"""
|
| 877 |
+
if require_str:
|
| 878 |
+
if value == 'true':
|
| 879 |
+
return True
|
| 880 |
+
elif value == 'false':
|
| 881 |
+
return False
|
| 882 |
+
else:
|
| 883 |
+
raise ParseError('Expected "true" or "false", not {0}'.format(value))
|
| 884 |
+
|
| 885 |
+
if not isinstance(value, bool):
|
| 886 |
+
raise ParseError('Expected true or false without quotes')
|
| 887 |
+
return value
|
| 888 |
+
|
| 889 |
+
_WKTJSONMETHODS = {
|
| 890 |
+
'google.protobuf.Any': ['_AnyMessageToJsonObject',
|
| 891 |
+
'_ConvertAnyMessage'],
|
| 892 |
+
'google.protobuf.Duration': ['_GenericMessageToJsonObject',
|
| 893 |
+
'_ConvertGenericMessage'],
|
| 894 |
+
'google.protobuf.FieldMask': ['_GenericMessageToJsonObject',
|
| 895 |
+
'_ConvertGenericMessage'],
|
| 896 |
+
'google.protobuf.ListValue': ['_ListValueMessageToJsonObject',
|
| 897 |
+
'_ConvertListValueMessage'],
|
| 898 |
+
'google.protobuf.Struct': ['_StructMessageToJsonObject',
|
| 899 |
+
'_ConvertStructMessage'],
|
| 900 |
+
'google.protobuf.Timestamp': ['_GenericMessageToJsonObject',
|
| 901 |
+
'_ConvertGenericMessage'],
|
| 902 |
+
'google.protobuf.Value': ['_ValueMessageToJsonObject',
|
| 903 |
+
'_ConvertValueMessage']
|
| 904 |
+
}
|
lib/python3.10/site-packages/google/protobuf/message.py
ADDED
|
@@ -0,0 +1,399 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
# TODO: We should just make these methods all "pure-virtual" and move
|
| 9 |
+
# all implementation out, into reflection.py for now.
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
"""Contains an abstract base class for protocol messages."""
|
| 13 |
+
|
| 14 |
+
__author__ = 'robinson@google.com (Will Robinson)'
|
| 15 |
+
|
| 16 |
+
class Error(Exception):
|
| 17 |
+
"""Base error type for this module."""
|
| 18 |
+
pass
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class DecodeError(Error):
|
| 22 |
+
"""Exception raised when deserializing messages."""
|
| 23 |
+
pass
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class EncodeError(Error):
|
| 27 |
+
"""Exception raised when serializing messages."""
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class Message(object):
|
| 32 |
+
|
| 33 |
+
"""Abstract base class for protocol messages.
|
| 34 |
+
|
| 35 |
+
Protocol message classes are almost always generated by the protocol
|
| 36 |
+
compiler. These generated types subclass Message and implement the methods
|
| 37 |
+
shown below.
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
# TODO: Link to an HTML document here.
|
| 41 |
+
|
| 42 |
+
# TODO: Document that instances of this class will also
|
| 43 |
+
# have an Extensions attribute with __getitem__ and __setitem__.
|
| 44 |
+
# Again, not sure how to best convey this.
|
| 45 |
+
|
| 46 |
+
# TODO: Document these fields and methods.
|
| 47 |
+
|
| 48 |
+
__slots__ = []
|
| 49 |
+
|
| 50 |
+
#: The :class:`google.protobuf.Descriptor`
|
| 51 |
+
# for this message type.
|
| 52 |
+
DESCRIPTOR = None
|
| 53 |
+
|
| 54 |
+
def __deepcopy__(self, memo=None):
|
| 55 |
+
clone = type(self)()
|
| 56 |
+
clone.MergeFrom(self)
|
| 57 |
+
return clone
|
| 58 |
+
|
| 59 |
+
def __eq__(self, other_msg):
|
| 60 |
+
"""Recursively compares two messages by value and structure."""
|
| 61 |
+
raise NotImplementedError
|
| 62 |
+
|
| 63 |
+
def __ne__(self, other_msg):
|
| 64 |
+
# Can't just say self != other_msg, since that would infinitely recurse. :)
|
| 65 |
+
return not self == other_msg
|
| 66 |
+
|
| 67 |
+
def __hash__(self):
|
| 68 |
+
raise TypeError('unhashable object')
|
| 69 |
+
|
| 70 |
+
def __str__(self):
|
| 71 |
+
"""Outputs a human-readable representation of the message."""
|
| 72 |
+
raise NotImplementedError
|
| 73 |
+
|
| 74 |
+
def __unicode__(self):
|
| 75 |
+
"""Outputs a human-readable representation of the message."""
|
| 76 |
+
raise NotImplementedError
|
| 77 |
+
|
| 78 |
+
def MergeFrom(self, other_msg):
|
| 79 |
+
"""Merges the contents of the specified message into current message.
|
| 80 |
+
|
| 81 |
+
This method merges the contents of the specified message into the current
|
| 82 |
+
message. Singular fields that are set in the specified message overwrite
|
| 83 |
+
the corresponding fields in the current message. Repeated fields are
|
| 84 |
+
appended. Singular sub-messages and groups are recursively merged.
|
| 85 |
+
|
| 86 |
+
Args:
|
| 87 |
+
other_msg (Message): A message to merge into the current message.
|
| 88 |
+
"""
|
| 89 |
+
raise NotImplementedError
|
| 90 |
+
|
| 91 |
+
def CopyFrom(self, other_msg):
|
| 92 |
+
"""Copies the content of the specified message into the current message.
|
| 93 |
+
|
| 94 |
+
The method clears the current message and then merges the specified
|
| 95 |
+
message using MergeFrom.
|
| 96 |
+
|
| 97 |
+
Args:
|
| 98 |
+
other_msg (Message): A message to copy into the current one.
|
| 99 |
+
"""
|
| 100 |
+
if self is other_msg:
|
| 101 |
+
return
|
| 102 |
+
self.Clear()
|
| 103 |
+
self.MergeFrom(other_msg)
|
| 104 |
+
|
| 105 |
+
def Clear(self):
|
| 106 |
+
"""Clears all data that was set in the message."""
|
| 107 |
+
raise NotImplementedError
|
| 108 |
+
|
| 109 |
+
def SetInParent(self):
|
| 110 |
+
"""Mark this as present in the parent.
|
| 111 |
+
|
| 112 |
+
This normally happens automatically when you assign a field of a
|
| 113 |
+
sub-message, but sometimes you want to make the sub-message
|
| 114 |
+
present while keeping it empty. If you find yourself using this,
|
| 115 |
+
you may want to reconsider your design.
|
| 116 |
+
"""
|
| 117 |
+
raise NotImplementedError
|
| 118 |
+
|
| 119 |
+
def IsInitialized(self):
|
| 120 |
+
"""Checks if the message is initialized.
|
| 121 |
+
|
| 122 |
+
Returns:
|
| 123 |
+
bool: The method returns True if the message is initialized (i.e. all of
|
| 124 |
+
its required fields are set).
|
| 125 |
+
"""
|
| 126 |
+
raise NotImplementedError
|
| 127 |
+
|
| 128 |
+
# TODO: MergeFromString() should probably return None and be
|
| 129 |
+
# implemented in terms of a helper that returns the # of bytes read. Our
|
| 130 |
+
# deserialization routines would use the helper when recursively
|
| 131 |
+
# deserializing, but the end user would almost always just want the no-return
|
| 132 |
+
# MergeFromString().
|
| 133 |
+
|
| 134 |
+
def MergeFromString(self, serialized):
|
| 135 |
+
"""Merges serialized protocol buffer data into this message.
|
| 136 |
+
|
| 137 |
+
When we find a field in `serialized` that is already present
|
| 138 |
+
in this message:
|
| 139 |
+
|
| 140 |
+
- If it's a "repeated" field, we append to the end of our list.
|
| 141 |
+
- Else, if it's a scalar, we overwrite our field.
|
| 142 |
+
- Else, (it's a nonrepeated composite), we recursively merge
|
| 143 |
+
into the existing composite.
|
| 144 |
+
|
| 145 |
+
Args:
|
| 146 |
+
serialized (bytes): Any object that allows us to call
|
| 147 |
+
``memoryview(serialized)`` to access a string of bytes using the
|
| 148 |
+
buffer interface.
|
| 149 |
+
|
| 150 |
+
Returns:
|
| 151 |
+
int: The number of bytes read from `serialized`.
|
| 152 |
+
For non-group messages, this will always be `len(serialized)`,
|
| 153 |
+
but for messages which are actually groups, this will
|
| 154 |
+
generally be less than `len(serialized)`, since we must
|
| 155 |
+
stop when we reach an ``END_GROUP`` tag. Note that if
|
| 156 |
+
we *do* stop because of an ``END_GROUP`` tag, the number
|
| 157 |
+
of bytes returned does not include the bytes
|
| 158 |
+
for the ``END_GROUP`` tag information.
|
| 159 |
+
|
| 160 |
+
Raises:
|
| 161 |
+
DecodeError: if the input cannot be parsed.
|
| 162 |
+
"""
|
| 163 |
+
# TODO: Document handling of unknown fields.
|
| 164 |
+
# TODO: When we switch to a helper, this will return None.
|
| 165 |
+
raise NotImplementedError
|
| 166 |
+
|
| 167 |
+
def ParseFromString(self, serialized):
|
| 168 |
+
"""Parse serialized protocol buffer data in binary form into this message.
|
| 169 |
+
|
| 170 |
+
Like :func:`MergeFromString()`, except we clear the object first.
|
| 171 |
+
|
| 172 |
+
Raises:
|
| 173 |
+
message.DecodeError if the input cannot be parsed.
|
| 174 |
+
"""
|
| 175 |
+
self.Clear()
|
| 176 |
+
return self.MergeFromString(serialized)
|
| 177 |
+
|
| 178 |
+
def SerializeToString(self, **kwargs):
|
| 179 |
+
"""Serializes the protocol message to a binary string.
|
| 180 |
+
|
| 181 |
+
Keyword Args:
|
| 182 |
+
deterministic (bool): If true, requests deterministic serialization
|
| 183 |
+
of the protobuf, with predictable ordering of map keys.
|
| 184 |
+
|
| 185 |
+
Returns:
|
| 186 |
+
A binary string representation of the message if all of the required
|
| 187 |
+
fields in the message are set (i.e. the message is initialized).
|
| 188 |
+
|
| 189 |
+
Raises:
|
| 190 |
+
EncodeError: if the message isn't initialized (see :func:`IsInitialized`).
|
| 191 |
+
"""
|
| 192 |
+
raise NotImplementedError
|
| 193 |
+
|
| 194 |
+
def SerializePartialToString(self, **kwargs):
|
| 195 |
+
"""Serializes the protocol message to a binary string.
|
| 196 |
+
|
| 197 |
+
This method is similar to SerializeToString but doesn't check if the
|
| 198 |
+
message is initialized.
|
| 199 |
+
|
| 200 |
+
Keyword Args:
|
| 201 |
+
deterministic (bool): If true, requests deterministic serialization
|
| 202 |
+
of the protobuf, with predictable ordering of map keys.
|
| 203 |
+
|
| 204 |
+
Returns:
|
| 205 |
+
bytes: A serialized representation of the partial message.
|
| 206 |
+
"""
|
| 207 |
+
raise NotImplementedError
|
| 208 |
+
|
| 209 |
+
# TODO: Decide whether we like these better
|
| 210 |
+
# than auto-generated has_foo() and clear_foo() methods
|
| 211 |
+
# on the instances themselves. This way is less consistent
|
| 212 |
+
# with C++, but it makes reflection-type access easier and
|
| 213 |
+
# reduces the number of magically autogenerated things.
|
| 214 |
+
#
|
| 215 |
+
# TODO: Be sure to document (and test) exactly
|
| 216 |
+
# which field names are accepted here. Are we case-sensitive?
|
| 217 |
+
# What do we do with fields that share names with Python keywords
|
| 218 |
+
# like 'lambda' and 'yield'?
|
| 219 |
+
#
|
| 220 |
+
# nnorwitz says:
|
| 221 |
+
# """
|
| 222 |
+
# Typically (in python), an underscore is appended to names that are
|
| 223 |
+
# keywords. So they would become lambda_ or yield_.
|
| 224 |
+
# """
|
| 225 |
+
def ListFields(self):
|
| 226 |
+
"""Returns a list of (FieldDescriptor, value) tuples for present fields.
|
| 227 |
+
|
| 228 |
+
A message field is non-empty if HasField() would return true. A singular
|
| 229 |
+
primitive field is non-empty if HasField() would return true in proto2 or it
|
| 230 |
+
is non zero in proto3. A repeated field is non-empty if it contains at least
|
| 231 |
+
one element. The fields are ordered by field number.
|
| 232 |
+
|
| 233 |
+
Returns:
|
| 234 |
+
list[tuple(FieldDescriptor, value)]: field descriptors and values
|
| 235 |
+
for all fields in the message which are not empty. The values vary by
|
| 236 |
+
field type.
|
| 237 |
+
"""
|
| 238 |
+
raise NotImplementedError
|
| 239 |
+
|
| 240 |
+
def HasField(self, field_name):
|
| 241 |
+
"""Checks if a certain field is set for the message.
|
| 242 |
+
|
| 243 |
+
For a oneof group, checks if any field inside is set. Note that if the
|
| 244 |
+
field_name is not defined in the message descriptor, :exc:`ValueError` will
|
| 245 |
+
be raised.
|
| 246 |
+
|
| 247 |
+
Args:
|
| 248 |
+
field_name (str): The name of the field to check for presence.
|
| 249 |
+
|
| 250 |
+
Returns:
|
| 251 |
+
bool: Whether a value has been set for the named field.
|
| 252 |
+
|
| 253 |
+
Raises:
|
| 254 |
+
ValueError: if the `field_name` is not a member of this message.
|
| 255 |
+
"""
|
| 256 |
+
raise NotImplementedError
|
| 257 |
+
|
| 258 |
+
def ClearField(self, field_name):
|
| 259 |
+
"""Clears the contents of a given field.
|
| 260 |
+
|
| 261 |
+
Inside a oneof group, clears the field set. If the name neither refers to a
|
| 262 |
+
defined field or oneof group, :exc:`ValueError` is raised.
|
| 263 |
+
|
| 264 |
+
Args:
|
| 265 |
+
field_name (str): The name of the field to check for presence.
|
| 266 |
+
|
| 267 |
+
Raises:
|
| 268 |
+
ValueError: if the `field_name` is not a member of this message.
|
| 269 |
+
"""
|
| 270 |
+
raise NotImplementedError
|
| 271 |
+
|
| 272 |
+
def WhichOneof(self, oneof_group):
|
| 273 |
+
"""Returns the name of the field that is set inside a oneof group.
|
| 274 |
+
|
| 275 |
+
If no field is set, returns None.
|
| 276 |
+
|
| 277 |
+
Args:
|
| 278 |
+
oneof_group (str): the name of the oneof group to check.
|
| 279 |
+
|
| 280 |
+
Returns:
|
| 281 |
+
str or None: The name of the group that is set, or None.
|
| 282 |
+
|
| 283 |
+
Raises:
|
| 284 |
+
ValueError: no group with the given name exists
|
| 285 |
+
"""
|
| 286 |
+
raise NotImplementedError
|
| 287 |
+
|
| 288 |
+
def HasExtension(self, field_descriptor):
|
| 289 |
+
"""Checks if a certain extension is present for this message.
|
| 290 |
+
|
| 291 |
+
Extensions are retrieved using the :attr:`Extensions` mapping (if present).
|
| 292 |
+
|
| 293 |
+
Args:
|
| 294 |
+
field_descriptor: The field descriptor for the extension to check.
|
| 295 |
+
|
| 296 |
+
Returns:
|
| 297 |
+
bool: Whether the extension is present for this message.
|
| 298 |
+
|
| 299 |
+
Raises:
|
| 300 |
+
KeyError: if the extension is repeated. Similar to repeated fields,
|
| 301 |
+
there is no separate notion of presence: a "not present" repeated
|
| 302 |
+
extension is an empty list.
|
| 303 |
+
"""
|
| 304 |
+
raise NotImplementedError
|
| 305 |
+
|
| 306 |
+
def ClearExtension(self, field_descriptor):
|
| 307 |
+
"""Clears the contents of a given extension.
|
| 308 |
+
|
| 309 |
+
Args:
|
| 310 |
+
field_descriptor: The field descriptor for the extension to clear.
|
| 311 |
+
"""
|
| 312 |
+
raise NotImplementedError
|
| 313 |
+
|
| 314 |
+
def UnknownFields(self):
|
| 315 |
+
"""Returns the UnknownFieldSet.
|
| 316 |
+
|
| 317 |
+
Returns:
|
| 318 |
+
UnknownFieldSet: The unknown fields stored in this message.
|
| 319 |
+
"""
|
| 320 |
+
raise NotImplementedError
|
| 321 |
+
|
| 322 |
+
def DiscardUnknownFields(self):
|
| 323 |
+
"""Clears all fields in the :class:`UnknownFieldSet`.
|
| 324 |
+
|
| 325 |
+
This operation is recursive for nested message.
|
| 326 |
+
"""
|
| 327 |
+
raise NotImplementedError
|
| 328 |
+
|
| 329 |
+
def ByteSize(self):
|
| 330 |
+
"""Returns the serialized size of this message.
|
| 331 |
+
|
| 332 |
+
Recursively calls ByteSize() on all contained messages.
|
| 333 |
+
|
| 334 |
+
Returns:
|
| 335 |
+
int: The number of bytes required to serialize this message.
|
| 336 |
+
"""
|
| 337 |
+
raise NotImplementedError
|
| 338 |
+
|
| 339 |
+
@classmethod
|
| 340 |
+
def FromString(cls, s):
|
| 341 |
+
raise NotImplementedError
|
| 342 |
+
|
| 343 |
+
# TODO: Remove it in OSS
|
| 344 |
+
@staticmethod
|
| 345 |
+
def RegisterExtension(field_descriptor):
|
| 346 |
+
raise NotImplementedError
|
| 347 |
+
|
| 348 |
+
def _SetListener(self, message_listener):
|
| 349 |
+
"""Internal method used by the protocol message implementation.
|
| 350 |
+
Clients should not call this directly.
|
| 351 |
+
|
| 352 |
+
Sets a listener that this message will call on certain state transitions.
|
| 353 |
+
|
| 354 |
+
The purpose of this method is to register back-edges from children to
|
| 355 |
+
parents at runtime, for the purpose of setting "has" bits and
|
| 356 |
+
byte-size-dirty bits in the parent and ancestor objects whenever a child or
|
| 357 |
+
descendant object is modified.
|
| 358 |
+
|
| 359 |
+
If the client wants to disconnect this Message from the object tree, she
|
| 360 |
+
explicitly sets callback to None.
|
| 361 |
+
|
| 362 |
+
If message_listener is None, unregisters any existing listener. Otherwise,
|
| 363 |
+
message_listener must implement the MessageListener interface in
|
| 364 |
+
internal/message_listener.py, and we discard any listener registered
|
| 365 |
+
via a previous _SetListener() call.
|
| 366 |
+
"""
|
| 367 |
+
raise NotImplementedError
|
| 368 |
+
|
| 369 |
+
def __getstate__(self):
|
| 370 |
+
"""Support the pickle protocol."""
|
| 371 |
+
return dict(serialized=self.SerializePartialToString())
|
| 372 |
+
|
| 373 |
+
def __setstate__(self, state):
|
| 374 |
+
"""Support the pickle protocol."""
|
| 375 |
+
self.__init__()
|
| 376 |
+
serialized = state['serialized']
|
| 377 |
+
# On Python 3, using encoding='latin1' is required for unpickling
|
| 378 |
+
# protos pickled by Python 2.
|
| 379 |
+
if not isinstance(serialized, bytes):
|
| 380 |
+
serialized = serialized.encode('latin1')
|
| 381 |
+
self.ParseFromString(serialized)
|
| 382 |
+
|
| 383 |
+
def __reduce__(self):
|
| 384 |
+
message_descriptor = self.DESCRIPTOR
|
| 385 |
+
if message_descriptor.containing_type is None:
|
| 386 |
+
return type(self), (), self.__getstate__()
|
| 387 |
+
# the message type must be nested.
|
| 388 |
+
# Python does not pickle nested classes; use the symbol_database on the
|
| 389 |
+
# receiving end.
|
| 390 |
+
container = message_descriptor
|
| 391 |
+
return (_InternalConstructMessage, (container.full_name,),
|
| 392 |
+
self.__getstate__())
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def _InternalConstructMessage(full_name):
|
| 396 |
+
"""Constructs a nested message."""
|
| 397 |
+
from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top
|
| 398 |
+
|
| 399 |
+
return symbol_database.Default().GetSymbol(full_name)()
|
lib/python3.10/site-packages/google/protobuf/message_factory.py
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Provides a factory class for generating dynamic messages.
|
| 9 |
+
|
| 10 |
+
The easiest way to use this class is if you have access to the FileDescriptor
|
| 11 |
+
protos containing the messages you want to create you can just do the following:
|
| 12 |
+
|
| 13 |
+
message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
|
| 14 |
+
my_proto_instance = message_classes['some.proto.package.MessageName']()
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
| 18 |
+
|
| 19 |
+
import warnings
|
| 20 |
+
|
| 21 |
+
from google.protobuf.internal import api_implementation
|
| 22 |
+
from google.protobuf import descriptor_pool
|
| 23 |
+
from google.protobuf import message
|
| 24 |
+
|
| 25 |
+
if api_implementation.Type() == 'python':
|
| 26 |
+
from google.protobuf.internal import python_message as message_impl
|
| 27 |
+
else:
|
| 28 |
+
from google.protobuf.pyext import cpp_message as message_impl # pylint: disable=g-import-not-at-top
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# The type of all Message classes.
|
| 32 |
+
_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def GetMessageClass(descriptor):
|
| 36 |
+
"""Obtains a proto2 message class based on the passed in descriptor.
|
| 37 |
+
|
| 38 |
+
Passing a descriptor with a fully qualified name matching a previous
|
| 39 |
+
invocation will cause the same class to be returned.
|
| 40 |
+
|
| 41 |
+
Args:
|
| 42 |
+
descriptor: The descriptor to build from.
|
| 43 |
+
|
| 44 |
+
Returns:
|
| 45 |
+
A class describing the passed in descriptor.
|
| 46 |
+
"""
|
| 47 |
+
concrete_class = getattr(descriptor, '_concrete_class', None)
|
| 48 |
+
if concrete_class:
|
| 49 |
+
return concrete_class
|
| 50 |
+
return _InternalCreateMessageClass(descriptor)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def GetMessageClassesForFiles(files, pool):
|
| 54 |
+
"""Gets all the messages from specified files.
|
| 55 |
+
|
| 56 |
+
This will find and resolve dependencies, failing if the descriptor
|
| 57 |
+
pool cannot satisfy them.
|
| 58 |
+
|
| 59 |
+
Args:
|
| 60 |
+
files: The file names to extract messages from.
|
| 61 |
+
pool: The descriptor pool to find the files including the dependent
|
| 62 |
+
files.
|
| 63 |
+
|
| 64 |
+
Returns:
|
| 65 |
+
A dictionary mapping proto names to the message classes.
|
| 66 |
+
"""
|
| 67 |
+
result = {}
|
| 68 |
+
for file_name in files:
|
| 69 |
+
file_desc = pool.FindFileByName(file_name)
|
| 70 |
+
for desc in file_desc.message_types_by_name.values():
|
| 71 |
+
result[desc.full_name] = GetMessageClass(desc)
|
| 72 |
+
|
| 73 |
+
# While the extension FieldDescriptors are created by the descriptor pool,
|
| 74 |
+
# the python classes created in the factory need them to be registered
|
| 75 |
+
# explicitly, which is done below.
|
| 76 |
+
#
|
| 77 |
+
# The call to RegisterExtension will specifically check if the
|
| 78 |
+
# extension was already registered on the object and either
|
| 79 |
+
# ignore the registration if the original was the same, or raise
|
| 80 |
+
# an error if they were different.
|
| 81 |
+
|
| 82 |
+
for extension in file_desc.extensions_by_name.values():
|
| 83 |
+
extended_class = GetMessageClass(extension.containing_type)
|
| 84 |
+
if api_implementation.Type() != 'python':
|
| 85 |
+
# TODO: Remove this check here. Duplicate extension
|
| 86 |
+
# register check should be in descriptor_pool.
|
| 87 |
+
if extension is not pool.FindExtensionByNumber(
|
| 88 |
+
extension.containing_type, extension.number
|
| 89 |
+
):
|
| 90 |
+
raise ValueError('Double registration of Extensions')
|
| 91 |
+
# Recursively load protos for extension field, in order to be able to
|
| 92 |
+
# fully represent the extension. This matches the behavior for regular
|
| 93 |
+
# fields too.
|
| 94 |
+
if extension.message_type:
|
| 95 |
+
GetMessageClass(extension.message_type)
|
| 96 |
+
return result
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def _InternalCreateMessageClass(descriptor):
|
| 100 |
+
"""Builds a proto2 message class based on the passed in descriptor.
|
| 101 |
+
|
| 102 |
+
Args:
|
| 103 |
+
descriptor: The descriptor to build from.
|
| 104 |
+
|
| 105 |
+
Returns:
|
| 106 |
+
A class describing the passed in descriptor.
|
| 107 |
+
"""
|
| 108 |
+
descriptor_name = descriptor.name
|
| 109 |
+
result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
|
| 110 |
+
descriptor_name,
|
| 111 |
+
(message.Message,),
|
| 112 |
+
{
|
| 113 |
+
'DESCRIPTOR': descriptor,
|
| 114 |
+
# If module not set, it wrongly points to message_factory module.
|
| 115 |
+
'__module__': None,
|
| 116 |
+
})
|
| 117 |
+
for field in descriptor.fields:
|
| 118 |
+
if field.message_type:
|
| 119 |
+
GetMessageClass(field.message_type)
|
| 120 |
+
for extension in result_class.DESCRIPTOR.extensions:
|
| 121 |
+
extended_class = GetMessageClass(extension.containing_type)
|
| 122 |
+
if api_implementation.Type() != 'python':
|
| 123 |
+
# TODO: Remove this check here. Duplicate extension
|
| 124 |
+
# register check should be in descriptor_pool.
|
| 125 |
+
pool = extension.containing_type.file.pool
|
| 126 |
+
if extension is not pool.FindExtensionByNumber(
|
| 127 |
+
extension.containing_type, extension.number
|
| 128 |
+
):
|
| 129 |
+
raise ValueError('Double registration of Extensions')
|
| 130 |
+
if extension.message_type:
|
| 131 |
+
GetMessageClass(extension.message_type)
|
| 132 |
+
return result_class
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
# Deprecated. Please use GetMessageClass() or GetMessageClassesForFiles()
|
| 136 |
+
# method above instead.
|
| 137 |
+
class MessageFactory(object):
|
| 138 |
+
"""Factory for creating Proto2 messages from descriptors in a pool."""
|
| 139 |
+
|
| 140 |
+
def __init__(self, pool=None):
|
| 141 |
+
"""Initializes a new factory."""
|
| 142 |
+
self.pool = pool or descriptor_pool.DescriptorPool()
|
| 143 |
+
|
| 144 |
+
def GetPrototype(self, descriptor):
|
| 145 |
+
"""Obtains a proto2 message class based on the passed in descriptor.
|
| 146 |
+
|
| 147 |
+
Passing a descriptor with a fully qualified name matching a previous
|
| 148 |
+
invocation will cause the same class to be returned.
|
| 149 |
+
|
| 150 |
+
Args:
|
| 151 |
+
descriptor: The descriptor to build from.
|
| 152 |
+
|
| 153 |
+
Returns:
|
| 154 |
+
A class describing the passed in descriptor.
|
| 155 |
+
"""
|
| 156 |
+
warnings.warn(
|
| 157 |
+
'MessageFactory class is deprecated. Please use '
|
| 158 |
+
'GetMessageClass() instead of MessageFactory.GetPrototype. '
|
| 159 |
+
'MessageFactory class will be removed after 2024.',
|
| 160 |
+
stacklevel=2,
|
| 161 |
+
)
|
| 162 |
+
return GetMessageClass(descriptor)
|
| 163 |
+
|
| 164 |
+
def CreatePrototype(self, descriptor):
|
| 165 |
+
"""Builds a proto2 message class based on the passed in descriptor.
|
| 166 |
+
|
| 167 |
+
Don't call this function directly, it always creates a new class. Call
|
| 168 |
+
GetMessageClass() instead.
|
| 169 |
+
|
| 170 |
+
Args:
|
| 171 |
+
descriptor: The descriptor to build from.
|
| 172 |
+
|
| 173 |
+
Returns:
|
| 174 |
+
A class describing the passed in descriptor.
|
| 175 |
+
"""
|
| 176 |
+
warnings.warn(
|
| 177 |
+
'Directly call CreatePrototype is wrong. Please use '
|
| 178 |
+
'GetMessageClass() method instead. Directly use '
|
| 179 |
+
'CreatePrototype will raise error after July 2023.',
|
| 180 |
+
stacklevel=2,
|
| 181 |
+
)
|
| 182 |
+
return _InternalCreateMessageClass(descriptor)
|
| 183 |
+
|
| 184 |
+
def GetMessages(self, files):
|
| 185 |
+
"""Gets all the messages from a specified file.
|
| 186 |
+
|
| 187 |
+
This will find and resolve dependencies, failing if the descriptor
|
| 188 |
+
pool cannot satisfy them.
|
| 189 |
+
|
| 190 |
+
Args:
|
| 191 |
+
files: The file names to extract messages from.
|
| 192 |
+
|
| 193 |
+
Returns:
|
| 194 |
+
A dictionary mapping proto names to the message classes. This will include
|
| 195 |
+
any dependent messages as well as any messages defined in the same file as
|
| 196 |
+
a specified message.
|
| 197 |
+
"""
|
| 198 |
+
warnings.warn(
|
| 199 |
+
'MessageFactory class is deprecated. Please use '
|
| 200 |
+
'GetMessageClassesForFiles() instead of '
|
| 201 |
+
'MessageFactory.GetMessages(). MessageFactory class '
|
| 202 |
+
'will be removed after 2024.',
|
| 203 |
+
stacklevel=2,
|
| 204 |
+
)
|
| 205 |
+
return GetMessageClassesForFiles(files, self.pool)
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
def GetMessages(file_protos, pool=None):
|
| 209 |
+
"""Builds a dictionary of all the messages available in a set of files.
|
| 210 |
+
|
| 211 |
+
Args:
|
| 212 |
+
file_protos: Iterable of FileDescriptorProto to build messages out of.
|
| 213 |
+
pool: The descriptor pool to add the file protos.
|
| 214 |
+
|
| 215 |
+
Returns:
|
| 216 |
+
A dictionary mapping proto names to the message classes. This will include
|
| 217 |
+
any dependent messages as well as any messages defined in the same file as
|
| 218 |
+
a specified message.
|
| 219 |
+
"""
|
| 220 |
+
# The cpp implementation of the protocol buffer library requires to add the
|
| 221 |
+
# message in topological order of the dependency graph.
|
| 222 |
+
des_pool = pool or descriptor_pool.DescriptorPool()
|
| 223 |
+
file_by_name = {file_proto.name: file_proto for file_proto in file_protos}
|
| 224 |
+
def _AddFile(file_proto):
|
| 225 |
+
for dependency in file_proto.dependency:
|
| 226 |
+
if dependency in file_by_name:
|
| 227 |
+
# Remove from elements to be visited, in order to cut cycles.
|
| 228 |
+
_AddFile(file_by_name.pop(dependency))
|
| 229 |
+
des_pool.Add(file_proto)
|
| 230 |
+
while file_by_name:
|
| 231 |
+
_AddFile(file_by_name.popitem()[1])
|
| 232 |
+
return GetMessageClassesForFiles(
|
| 233 |
+
[file_proto.name for file_proto in file_protos], des_pool)
|
lib/python3.10/site-packages/google/protobuf/proto_builder.py
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Dynamic Protobuf class creator."""
|
| 9 |
+
|
| 10 |
+
from collections import OrderedDict
|
| 11 |
+
import hashlib
|
| 12 |
+
import os
|
| 13 |
+
|
| 14 |
+
from google.protobuf import descriptor_pb2
|
| 15 |
+
from google.protobuf import descriptor
|
| 16 |
+
from google.protobuf import descriptor_pool
|
| 17 |
+
from google.protobuf import message_factory
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def _GetMessageFromFactory(pool, full_name):
|
| 21 |
+
"""Get a proto class from the MessageFactory by name.
|
| 22 |
+
|
| 23 |
+
Args:
|
| 24 |
+
pool: a descriptor pool.
|
| 25 |
+
full_name: str, the fully qualified name of the proto type.
|
| 26 |
+
Returns:
|
| 27 |
+
A class, for the type identified by full_name.
|
| 28 |
+
Raises:
|
| 29 |
+
KeyError, if the proto is not found in the factory's descriptor pool.
|
| 30 |
+
"""
|
| 31 |
+
proto_descriptor = pool.FindMessageTypeByName(full_name)
|
| 32 |
+
proto_cls = message_factory.GetMessageClass(proto_descriptor)
|
| 33 |
+
return proto_cls
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def MakeSimpleProtoClass(fields, full_name=None, pool=None):
|
| 37 |
+
"""Create a Protobuf class whose fields are basic types.
|
| 38 |
+
|
| 39 |
+
Note: this doesn't validate field names!
|
| 40 |
+
|
| 41 |
+
Args:
|
| 42 |
+
fields: dict of {name: field_type} mappings for each field in the proto. If
|
| 43 |
+
this is an OrderedDict the order will be maintained, otherwise the
|
| 44 |
+
fields will be sorted by name.
|
| 45 |
+
full_name: optional str, the fully-qualified name of the proto type.
|
| 46 |
+
pool: optional DescriptorPool instance.
|
| 47 |
+
Returns:
|
| 48 |
+
a class, the new protobuf class with a FileDescriptor.
|
| 49 |
+
"""
|
| 50 |
+
pool_instance = pool or descriptor_pool.DescriptorPool()
|
| 51 |
+
if full_name is not None:
|
| 52 |
+
try:
|
| 53 |
+
proto_cls = _GetMessageFromFactory(pool_instance, full_name)
|
| 54 |
+
return proto_cls
|
| 55 |
+
except KeyError:
|
| 56 |
+
# The factory's DescriptorPool doesn't know about this class yet.
|
| 57 |
+
pass
|
| 58 |
+
|
| 59 |
+
# Get a list of (name, field_type) tuples from the fields dict. If fields was
|
| 60 |
+
# an OrderedDict we keep the order, but otherwise we sort the field to ensure
|
| 61 |
+
# consistent ordering.
|
| 62 |
+
field_items = fields.items()
|
| 63 |
+
if not isinstance(fields, OrderedDict):
|
| 64 |
+
field_items = sorted(field_items)
|
| 65 |
+
|
| 66 |
+
# Use a consistent file name that is unlikely to conflict with any imported
|
| 67 |
+
# proto files.
|
| 68 |
+
fields_hash = hashlib.sha1()
|
| 69 |
+
for f_name, f_type in field_items:
|
| 70 |
+
fields_hash.update(f_name.encode('utf-8'))
|
| 71 |
+
fields_hash.update(str(f_type).encode('utf-8'))
|
| 72 |
+
proto_file_name = fields_hash.hexdigest() + '.proto'
|
| 73 |
+
|
| 74 |
+
# If the proto is anonymous, use the same hash to name it.
|
| 75 |
+
if full_name is None:
|
| 76 |
+
full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
|
| 77 |
+
fields_hash.hexdigest())
|
| 78 |
+
try:
|
| 79 |
+
proto_cls = _GetMessageFromFactory(pool_instance, full_name)
|
| 80 |
+
return proto_cls
|
| 81 |
+
except KeyError:
|
| 82 |
+
# The factory's DescriptorPool doesn't know about this class yet.
|
| 83 |
+
pass
|
| 84 |
+
|
| 85 |
+
# This is the first time we see this proto: add a new descriptor to the pool.
|
| 86 |
+
pool_instance.Add(
|
| 87 |
+
_MakeFileDescriptorProto(proto_file_name, full_name, field_items))
|
| 88 |
+
return _GetMessageFromFactory(pool_instance, full_name)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
|
| 92 |
+
"""Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
|
| 93 |
+
package, name = full_name.rsplit('.', 1)
|
| 94 |
+
file_proto = descriptor_pb2.FileDescriptorProto()
|
| 95 |
+
file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
|
| 96 |
+
file_proto.package = package
|
| 97 |
+
desc_proto = file_proto.message_type.add()
|
| 98 |
+
desc_proto.name = name
|
| 99 |
+
for f_number, (f_name, f_type) in enumerate(field_items, 1):
|
| 100 |
+
field_proto = desc_proto.field.add()
|
| 101 |
+
field_proto.name = f_name
|
| 102 |
+
# # If the number falls in the reserved range, reassign it to the correct
|
| 103 |
+
# # number after the range.
|
| 104 |
+
if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER:
|
| 105 |
+
f_number += (
|
| 106 |
+
descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER -
|
| 107 |
+
descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1)
|
| 108 |
+
field_proto.number = f_number
|
| 109 |
+
field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
|
| 110 |
+
field_proto.type = f_type
|
| 111 |
+
return file_proto
|
lib/python3.10/site-packages/google/protobuf/pyext/__init__.py
ADDED
|
File without changes
|
lib/python3.10/site-packages/google/protobuf/pyext/cpp_message.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Protocol message implementation hooks for C++ implementation.
|
| 9 |
+
|
| 10 |
+
Contains helper functions used to create protocol message classes from
|
| 11 |
+
Descriptor objects at runtime backed by the protocol buffer C++ API.
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
__author__ = 'tibell@google.com (Johan Tibell)'
|
| 15 |
+
|
| 16 |
+
from google.protobuf.internal import api_implementation
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
# pylint: disable=protected-access
|
| 20 |
+
_message = api_implementation._c_module
|
| 21 |
+
# TODO: Remove this import after fix api_implementation
|
| 22 |
+
if _message is None:
|
| 23 |
+
from google.protobuf.pyext import _message
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class GeneratedProtocolMessageType(_message.MessageMeta):
|
| 27 |
+
|
| 28 |
+
"""Metaclass for protocol message classes created at runtime from Descriptors.
|
| 29 |
+
|
| 30 |
+
The protocol compiler currently uses this metaclass to create protocol
|
| 31 |
+
message classes at runtime. Clients can also manually create their own
|
| 32 |
+
classes at runtime, as in this example:
|
| 33 |
+
|
| 34 |
+
mydescriptor = Descriptor(.....)
|
| 35 |
+
factory = symbol_database.Default()
|
| 36 |
+
factory.pool.AddDescriptor(mydescriptor)
|
| 37 |
+
MyProtoClass = factory.GetPrototype(mydescriptor)
|
| 38 |
+
myproto_instance = MyProtoClass()
|
| 39 |
+
myproto.foo_field = 23
|
| 40 |
+
...
|
| 41 |
+
|
| 42 |
+
The above example will not work for nested types. If you wish to include them,
|
| 43 |
+
use reflection.MakeClass() instead of manually instantiating the class in
|
| 44 |
+
order to create the appropriate class structure.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
# Must be consistent with the protocol-compiler code in
|
| 48 |
+
# proto2/compiler/internal/generator.*.
|
| 49 |
+
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
lib/python3.10/site-packages/google/protobuf/reflection.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
# This code is meant to work on Python 2.4 and above only.
|
| 9 |
+
|
| 10 |
+
"""Contains a metaclass and helper functions used to create
|
| 11 |
+
protocol message classes from Descriptor objects at runtime.
|
| 12 |
+
|
| 13 |
+
Recall that a metaclass is the "type" of a class.
|
| 14 |
+
(A class is to a metaclass what an instance is to a class.)
|
| 15 |
+
|
| 16 |
+
In this case, we use the GeneratedProtocolMessageType metaclass
|
| 17 |
+
to inject all the useful functionality into the classes
|
| 18 |
+
output by the protocol compiler at compile-time.
|
| 19 |
+
|
| 20 |
+
The upshot of all this is that the real implementation
|
| 21 |
+
details for ALL pure-Python protocol buffers are *here in
|
| 22 |
+
this file*.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
__author__ = 'robinson@google.com (Will Robinson)'
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
from google.protobuf import message_factory
|
| 29 |
+
from google.protobuf import symbol_database
|
| 30 |
+
|
| 31 |
+
# The type of all Message classes.
|
| 32 |
+
# Part of the public interface, but normally only used by message factories.
|
| 33 |
+
GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE
|
| 34 |
+
|
| 35 |
+
MESSAGE_CLASS_CACHE = {}
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# Deprecated. Please NEVER use reflection.ParseMessage().
|
| 39 |
+
def ParseMessage(descriptor, byte_str):
|
| 40 |
+
"""Generate a new Message instance from this Descriptor and a byte string.
|
| 41 |
+
|
| 42 |
+
DEPRECATED: ParseMessage is deprecated because it is using MakeClass().
|
| 43 |
+
Please use MessageFactory.GetPrototype() instead.
|
| 44 |
+
|
| 45 |
+
Args:
|
| 46 |
+
descriptor: Protobuf Descriptor object
|
| 47 |
+
byte_str: Serialized protocol buffer byte string
|
| 48 |
+
|
| 49 |
+
Returns:
|
| 50 |
+
Newly created protobuf Message object.
|
| 51 |
+
"""
|
| 52 |
+
result_class = MakeClass(descriptor)
|
| 53 |
+
new_msg = result_class()
|
| 54 |
+
new_msg.ParseFromString(byte_str)
|
| 55 |
+
return new_msg
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
# Deprecated. Please NEVER use reflection.MakeClass().
|
| 59 |
+
def MakeClass(descriptor):
|
| 60 |
+
"""Construct a class object for a protobuf described by descriptor.
|
| 61 |
+
|
| 62 |
+
DEPRECATED: use MessageFactory.GetPrototype() instead.
|
| 63 |
+
|
| 64 |
+
Args:
|
| 65 |
+
descriptor: A descriptor.Descriptor object describing the protobuf.
|
| 66 |
+
Returns:
|
| 67 |
+
The Message class object described by the descriptor.
|
| 68 |
+
"""
|
| 69 |
+
# Original implementation leads to duplicate message classes, which won't play
|
| 70 |
+
# well with extensions. Message factory info is also missing.
|
| 71 |
+
# Redirect to message_factory.
|
| 72 |
+
return message_factory.GetMessageClass(descriptor)
|
lib/python3.10/site-packages/google/protobuf/service.py
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""DEPRECATED: Declares the RPC service interfaces.
|
| 9 |
+
|
| 10 |
+
This module declares the abstract interfaces underlying proto2 RPC
|
| 11 |
+
services. These are intended to be independent of any particular RPC
|
| 12 |
+
implementation, so that proto2 services can be used on top of a variety
|
| 13 |
+
of implementations. Starting with version 2.3.0, RPC implementations should
|
| 14 |
+
not try to build on these, but should instead provide code generator plugins
|
| 15 |
+
which generate code specific to the particular RPC implementation. This way
|
| 16 |
+
the generated code can be more appropriate for the implementation in use
|
| 17 |
+
and can avoid unnecessary layers of indirection.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
__author__ = 'petar@google.com (Petar Petrov)'
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class RpcException(Exception):
|
| 24 |
+
"""Exception raised on failed blocking RPC method call."""
|
| 25 |
+
pass
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class Service(object):
|
| 29 |
+
|
| 30 |
+
"""Abstract base interface for protocol-buffer-based RPC services.
|
| 31 |
+
|
| 32 |
+
Services themselves are abstract classes (implemented either by servers or as
|
| 33 |
+
stubs), but they subclass this base interface. The methods of this
|
| 34 |
+
interface can be used to call the methods of the service without knowing
|
| 35 |
+
its exact type at compile time (analogous to the Message interface).
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def GetDescriptor():
|
| 39 |
+
"""Retrieves this service's descriptor."""
|
| 40 |
+
raise NotImplementedError
|
| 41 |
+
|
| 42 |
+
def CallMethod(self, method_descriptor, rpc_controller,
|
| 43 |
+
request, done):
|
| 44 |
+
"""Calls a method of the service specified by method_descriptor.
|
| 45 |
+
|
| 46 |
+
If "done" is None then the call is blocking and the response
|
| 47 |
+
message will be returned directly. Otherwise the call is asynchronous
|
| 48 |
+
and "done" will later be called with the response value.
|
| 49 |
+
|
| 50 |
+
In the blocking case, RpcException will be raised on error.
|
| 51 |
+
|
| 52 |
+
Preconditions:
|
| 53 |
+
|
| 54 |
+
* method_descriptor.service == GetDescriptor
|
| 55 |
+
* request is of the exact same classes as returned by
|
| 56 |
+
GetRequestClass(method).
|
| 57 |
+
* After the call has started, the request must not be modified.
|
| 58 |
+
* "rpc_controller" is of the correct type for the RPC implementation being
|
| 59 |
+
used by this Service. For stubs, the "correct type" depends on the
|
| 60 |
+
RpcChannel which the stub is using.
|
| 61 |
+
|
| 62 |
+
Postconditions:
|
| 63 |
+
|
| 64 |
+
* "done" will be called when the method is complete. This may be
|
| 65 |
+
before CallMethod() returns or it may be at some point in the future.
|
| 66 |
+
* If the RPC failed, the response value passed to "done" will be None.
|
| 67 |
+
Further details about the failure can be found by querying the
|
| 68 |
+
RpcController.
|
| 69 |
+
"""
|
| 70 |
+
raise NotImplementedError
|
| 71 |
+
|
| 72 |
+
def GetRequestClass(self, method_descriptor):
|
| 73 |
+
"""Returns the class of the request message for the specified method.
|
| 74 |
+
|
| 75 |
+
CallMethod() requires that the request is of a particular subclass of
|
| 76 |
+
Message. GetRequestClass() gets the default instance of this required
|
| 77 |
+
type.
|
| 78 |
+
|
| 79 |
+
Example:
|
| 80 |
+
method = service.GetDescriptor().FindMethodByName("Foo")
|
| 81 |
+
request = stub.GetRequestClass(method)()
|
| 82 |
+
request.ParseFromString(input)
|
| 83 |
+
service.CallMethod(method, request, callback)
|
| 84 |
+
"""
|
| 85 |
+
raise NotImplementedError
|
| 86 |
+
|
| 87 |
+
def GetResponseClass(self, method_descriptor):
|
| 88 |
+
"""Returns the class of the response message for the specified method.
|
| 89 |
+
|
| 90 |
+
This method isn't really needed, as the RpcChannel's CallMethod constructs
|
| 91 |
+
the response protocol message. It's provided anyway in case it is useful
|
| 92 |
+
for the caller to know the response type in advance.
|
| 93 |
+
"""
|
| 94 |
+
raise NotImplementedError
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class RpcController(object):
|
| 98 |
+
|
| 99 |
+
"""An RpcController mediates a single method call.
|
| 100 |
+
|
| 101 |
+
The primary purpose of the controller is to provide a way to manipulate
|
| 102 |
+
settings specific to the RPC implementation and to find out about RPC-level
|
| 103 |
+
errors. The methods provided by the RpcController interface are intended
|
| 104 |
+
to be a "least common denominator" set of features which we expect all
|
| 105 |
+
implementations to support. Specific implementations may provide more
|
| 106 |
+
advanced features (e.g. deadline propagation).
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
# Client-side methods below
|
| 110 |
+
|
| 111 |
+
def Reset(self):
|
| 112 |
+
"""Resets the RpcController to its initial state.
|
| 113 |
+
|
| 114 |
+
After the RpcController has been reset, it may be reused in
|
| 115 |
+
a new call. Must not be called while an RPC is in progress.
|
| 116 |
+
"""
|
| 117 |
+
raise NotImplementedError
|
| 118 |
+
|
| 119 |
+
def Failed(self):
|
| 120 |
+
"""Returns true if the call failed.
|
| 121 |
+
|
| 122 |
+
After a call has finished, returns true if the call failed. The possible
|
| 123 |
+
reasons for failure depend on the RPC implementation. Failed() must not
|
| 124 |
+
be called before a call has finished. If Failed() returns true, the
|
| 125 |
+
contents of the response message are undefined.
|
| 126 |
+
"""
|
| 127 |
+
raise NotImplementedError
|
| 128 |
+
|
| 129 |
+
def ErrorText(self):
|
| 130 |
+
"""If Failed is true, returns a human-readable description of the error."""
|
| 131 |
+
raise NotImplementedError
|
| 132 |
+
|
| 133 |
+
def StartCancel(self):
|
| 134 |
+
"""Initiate cancellation.
|
| 135 |
+
|
| 136 |
+
Advises the RPC system that the caller desires that the RPC call be
|
| 137 |
+
canceled. The RPC system may cancel it immediately, may wait awhile and
|
| 138 |
+
then cancel it, or may not even cancel the call at all. If the call is
|
| 139 |
+
canceled, the "done" callback will still be called and the RpcController
|
| 140 |
+
will indicate that the call failed at that time.
|
| 141 |
+
"""
|
| 142 |
+
raise NotImplementedError
|
| 143 |
+
|
| 144 |
+
# Server-side methods below
|
| 145 |
+
|
| 146 |
+
def SetFailed(self, reason):
|
| 147 |
+
"""Sets a failure reason.
|
| 148 |
+
|
| 149 |
+
Causes Failed() to return true on the client side. "reason" will be
|
| 150 |
+
incorporated into the message returned by ErrorText(). If you find
|
| 151 |
+
you need to return machine-readable information about failures, you
|
| 152 |
+
should incorporate it into your response protocol buffer and should
|
| 153 |
+
NOT call SetFailed().
|
| 154 |
+
"""
|
| 155 |
+
raise NotImplementedError
|
| 156 |
+
|
| 157 |
+
def IsCanceled(self):
|
| 158 |
+
"""Checks if the client cancelled the RPC.
|
| 159 |
+
|
| 160 |
+
If true, indicates that the client canceled the RPC, so the server may
|
| 161 |
+
as well give up on replying to it. The server should still call the
|
| 162 |
+
final "done" callback.
|
| 163 |
+
"""
|
| 164 |
+
raise NotImplementedError
|
| 165 |
+
|
| 166 |
+
def NotifyOnCancel(self, callback):
|
| 167 |
+
"""Sets a callback to invoke on cancel.
|
| 168 |
+
|
| 169 |
+
Asks that the given callback be called when the RPC is canceled. The
|
| 170 |
+
callback will always be called exactly once. If the RPC completes without
|
| 171 |
+
being canceled, the callback will be called after completion. If the RPC
|
| 172 |
+
has already been canceled when NotifyOnCancel() is called, the callback
|
| 173 |
+
will be called immediately.
|
| 174 |
+
|
| 175 |
+
NotifyOnCancel() must be called no more than once per request.
|
| 176 |
+
"""
|
| 177 |
+
raise NotImplementedError
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
class RpcChannel(object):
|
| 181 |
+
|
| 182 |
+
"""Abstract interface for an RPC channel.
|
| 183 |
+
|
| 184 |
+
An RpcChannel represents a communication line to a service which can be used
|
| 185 |
+
to call that service's methods. The service may be running on another
|
| 186 |
+
machine. Normally, you should not use an RpcChannel directly, but instead
|
| 187 |
+
construct a stub {@link Service} wrapping it. Example:
|
| 188 |
+
|
| 189 |
+
Example:
|
| 190 |
+
RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
|
| 191 |
+
RpcController controller = rpcImpl.Controller()
|
| 192 |
+
MyService service = MyService_Stub(channel)
|
| 193 |
+
service.MyMethod(controller, request, callback)
|
| 194 |
+
"""
|
| 195 |
+
|
| 196 |
+
def CallMethod(self, method_descriptor, rpc_controller,
|
| 197 |
+
request, response_class, done):
|
| 198 |
+
"""Calls the method identified by the descriptor.
|
| 199 |
+
|
| 200 |
+
Call the given method of the remote service. The signature of this
|
| 201 |
+
procedure looks the same as Service.CallMethod(), but the requirements
|
| 202 |
+
are less strict in one important way: the request object doesn't have to
|
| 203 |
+
be of any specific class as long as its descriptor is method.input_type.
|
| 204 |
+
"""
|
| 205 |
+
raise NotImplementedError
|
lib/python3.10/site-packages/google/protobuf/service_reflection.py
ADDED
|
@@ -0,0 +1,272 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Contains metaclasses used to create protocol service and service stub
|
| 9 |
+
classes from ServiceDescriptor objects at runtime.
|
| 10 |
+
|
| 11 |
+
The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
|
| 12 |
+
inject all useful functionality into the classes output by the protocol
|
| 13 |
+
compiler at compile-time.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
__author__ = 'petar@google.com (Petar Petrov)'
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class GeneratedServiceType(type):
|
| 20 |
+
|
| 21 |
+
"""Metaclass for service classes created at runtime from ServiceDescriptors.
|
| 22 |
+
|
| 23 |
+
Implementations for all methods described in the Service class are added here
|
| 24 |
+
by this class. We also create properties to allow getting/setting all fields
|
| 25 |
+
in the protocol message.
|
| 26 |
+
|
| 27 |
+
The protocol compiler currently uses this metaclass to create protocol service
|
| 28 |
+
classes at runtime. Clients can also manually create their own classes at
|
| 29 |
+
runtime, as in this example::
|
| 30 |
+
|
| 31 |
+
mydescriptor = ServiceDescriptor(.....)
|
| 32 |
+
class MyProtoService(service.Service):
|
| 33 |
+
__metaclass__ = GeneratedServiceType
|
| 34 |
+
DESCRIPTOR = mydescriptor
|
| 35 |
+
myservice_instance = MyProtoService()
|
| 36 |
+
# ...
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
| 40 |
+
|
| 41 |
+
def __init__(cls, name, bases, dictionary):
|
| 42 |
+
"""Creates a message service class.
|
| 43 |
+
|
| 44 |
+
Args:
|
| 45 |
+
name: Name of the class (ignored, but required by the metaclass
|
| 46 |
+
protocol).
|
| 47 |
+
bases: Base classes of the class being constructed.
|
| 48 |
+
dictionary: The class dictionary of the class being constructed.
|
| 49 |
+
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
| 50 |
+
describing this protocol service type.
|
| 51 |
+
"""
|
| 52 |
+
# Don't do anything if this class doesn't have a descriptor. This happens
|
| 53 |
+
# when a service class is subclassed.
|
| 54 |
+
if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
|
| 55 |
+
return
|
| 56 |
+
|
| 57 |
+
descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
|
| 58 |
+
service_builder = _ServiceBuilder(descriptor)
|
| 59 |
+
service_builder.BuildService(cls)
|
| 60 |
+
cls.DESCRIPTOR = descriptor
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class GeneratedServiceStubType(GeneratedServiceType):
|
| 64 |
+
|
| 65 |
+
"""Metaclass for service stubs created at runtime from ServiceDescriptors.
|
| 66 |
+
|
| 67 |
+
This class has similar responsibilities as GeneratedServiceType, except that
|
| 68 |
+
it creates the service stub classes.
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
| 72 |
+
|
| 73 |
+
def __init__(cls, name, bases, dictionary):
|
| 74 |
+
"""Creates a message service stub class.
|
| 75 |
+
|
| 76 |
+
Args:
|
| 77 |
+
name: Name of the class (ignored, here).
|
| 78 |
+
bases: Base classes of the class being constructed.
|
| 79 |
+
dictionary: The class dictionary of the class being constructed.
|
| 80 |
+
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
| 81 |
+
describing this protocol service type.
|
| 82 |
+
"""
|
| 83 |
+
super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
|
| 84 |
+
# Don't do anything if this class doesn't have a descriptor. This happens
|
| 85 |
+
# when a service stub is subclassed.
|
| 86 |
+
if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
|
| 87 |
+
return
|
| 88 |
+
|
| 89 |
+
descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
|
| 90 |
+
service_stub_builder = _ServiceStubBuilder(descriptor)
|
| 91 |
+
service_stub_builder.BuildServiceStub(cls)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class _ServiceBuilder(object):
|
| 95 |
+
|
| 96 |
+
"""This class constructs a protocol service class using a service descriptor.
|
| 97 |
+
|
| 98 |
+
Given a service descriptor, this class constructs a class that represents
|
| 99 |
+
the specified service descriptor. One service builder instance constructs
|
| 100 |
+
exactly one service class. That means all instances of that class share the
|
| 101 |
+
same builder.
|
| 102 |
+
"""
|
| 103 |
+
|
| 104 |
+
def __init__(self, service_descriptor):
|
| 105 |
+
"""Initializes an instance of the service class builder.
|
| 106 |
+
|
| 107 |
+
Args:
|
| 108 |
+
service_descriptor: ServiceDescriptor to use when constructing the
|
| 109 |
+
service class.
|
| 110 |
+
"""
|
| 111 |
+
self.descriptor = service_descriptor
|
| 112 |
+
|
| 113 |
+
def BuildService(builder, cls):
|
| 114 |
+
"""Constructs the service class.
|
| 115 |
+
|
| 116 |
+
Args:
|
| 117 |
+
cls: The class that will be constructed.
|
| 118 |
+
"""
|
| 119 |
+
|
| 120 |
+
# CallMethod needs to operate with an instance of the Service class. This
|
| 121 |
+
# internal wrapper function exists only to be able to pass the service
|
| 122 |
+
# instance to the method that does the real CallMethod work.
|
| 123 |
+
# Making sure to use exact argument names from the abstract interface in
|
| 124 |
+
# service.py to match the type signature
|
| 125 |
+
def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done):
|
| 126 |
+
return builder._CallMethod(self, method_descriptor, rpc_controller,
|
| 127 |
+
request, done)
|
| 128 |
+
|
| 129 |
+
def _WrapGetRequestClass(self, method_descriptor):
|
| 130 |
+
return builder._GetRequestClass(method_descriptor)
|
| 131 |
+
|
| 132 |
+
def _WrapGetResponseClass(self, method_descriptor):
|
| 133 |
+
return builder._GetResponseClass(method_descriptor)
|
| 134 |
+
|
| 135 |
+
builder.cls = cls
|
| 136 |
+
cls.CallMethod = _WrapCallMethod
|
| 137 |
+
cls.GetDescriptor = staticmethod(lambda: builder.descriptor)
|
| 138 |
+
cls.GetDescriptor.__doc__ = 'Returns the service descriptor.'
|
| 139 |
+
cls.GetRequestClass = _WrapGetRequestClass
|
| 140 |
+
cls.GetResponseClass = _WrapGetResponseClass
|
| 141 |
+
for method in builder.descriptor.methods:
|
| 142 |
+
setattr(cls, method.name, builder._GenerateNonImplementedMethod(method))
|
| 143 |
+
|
| 144 |
+
def _CallMethod(self, srvc, method_descriptor,
|
| 145 |
+
rpc_controller, request, callback):
|
| 146 |
+
"""Calls the method described by a given method descriptor.
|
| 147 |
+
|
| 148 |
+
Args:
|
| 149 |
+
srvc: Instance of the service for which this method is called.
|
| 150 |
+
method_descriptor: Descriptor that represent the method to call.
|
| 151 |
+
rpc_controller: RPC controller to use for this method's execution.
|
| 152 |
+
request: Request protocol message.
|
| 153 |
+
callback: A callback to invoke after the method has completed.
|
| 154 |
+
"""
|
| 155 |
+
if method_descriptor.containing_service != self.descriptor:
|
| 156 |
+
raise RuntimeError(
|
| 157 |
+
'CallMethod() given method descriptor for wrong service type.')
|
| 158 |
+
method = getattr(srvc, method_descriptor.name)
|
| 159 |
+
return method(rpc_controller, request, callback)
|
| 160 |
+
|
| 161 |
+
def _GetRequestClass(self, method_descriptor):
|
| 162 |
+
"""Returns the class of the request protocol message.
|
| 163 |
+
|
| 164 |
+
Args:
|
| 165 |
+
method_descriptor: Descriptor of the method for which to return the
|
| 166 |
+
request protocol message class.
|
| 167 |
+
|
| 168 |
+
Returns:
|
| 169 |
+
A class that represents the input protocol message of the specified
|
| 170 |
+
method.
|
| 171 |
+
"""
|
| 172 |
+
if method_descriptor.containing_service != self.descriptor:
|
| 173 |
+
raise RuntimeError(
|
| 174 |
+
'GetRequestClass() given method descriptor for wrong service type.')
|
| 175 |
+
return method_descriptor.input_type._concrete_class
|
| 176 |
+
|
| 177 |
+
def _GetResponseClass(self, method_descriptor):
|
| 178 |
+
"""Returns the class of the response protocol message.
|
| 179 |
+
|
| 180 |
+
Args:
|
| 181 |
+
method_descriptor: Descriptor of the method for which to return the
|
| 182 |
+
response protocol message class.
|
| 183 |
+
|
| 184 |
+
Returns:
|
| 185 |
+
A class that represents the output protocol message of the specified
|
| 186 |
+
method.
|
| 187 |
+
"""
|
| 188 |
+
if method_descriptor.containing_service != self.descriptor:
|
| 189 |
+
raise RuntimeError(
|
| 190 |
+
'GetResponseClass() given method descriptor for wrong service type.')
|
| 191 |
+
return method_descriptor.output_type._concrete_class
|
| 192 |
+
|
| 193 |
+
def _GenerateNonImplementedMethod(self, method):
|
| 194 |
+
"""Generates and returns a method that can be set for a service methods.
|
| 195 |
+
|
| 196 |
+
Args:
|
| 197 |
+
method: Descriptor of the service method for which a method is to be
|
| 198 |
+
generated.
|
| 199 |
+
|
| 200 |
+
Returns:
|
| 201 |
+
A method that can be added to the service class.
|
| 202 |
+
"""
|
| 203 |
+
return lambda inst, rpc_controller, request, callback: (
|
| 204 |
+
self._NonImplementedMethod(method.name, rpc_controller, callback))
|
| 205 |
+
|
| 206 |
+
def _NonImplementedMethod(self, method_name, rpc_controller, callback):
|
| 207 |
+
"""The body of all methods in the generated service class.
|
| 208 |
+
|
| 209 |
+
Args:
|
| 210 |
+
method_name: Name of the method being executed.
|
| 211 |
+
rpc_controller: RPC controller used to execute this method.
|
| 212 |
+
callback: A callback which will be invoked when the method finishes.
|
| 213 |
+
"""
|
| 214 |
+
rpc_controller.SetFailed('Method %s not implemented.' % method_name)
|
| 215 |
+
callback(None)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
class _ServiceStubBuilder(object):
|
| 219 |
+
|
| 220 |
+
"""Constructs a protocol service stub class using a service descriptor.
|
| 221 |
+
|
| 222 |
+
Given a service descriptor, this class constructs a suitable stub class.
|
| 223 |
+
A stub is just a type-safe wrapper around an RpcChannel which emulates a
|
| 224 |
+
local implementation of the service.
|
| 225 |
+
|
| 226 |
+
One service stub builder instance constructs exactly one class. It means all
|
| 227 |
+
instances of that class share the same service stub builder.
|
| 228 |
+
"""
|
| 229 |
+
|
| 230 |
+
def __init__(self, service_descriptor):
|
| 231 |
+
"""Initializes an instance of the service stub class builder.
|
| 232 |
+
|
| 233 |
+
Args:
|
| 234 |
+
service_descriptor: ServiceDescriptor to use when constructing the
|
| 235 |
+
stub class.
|
| 236 |
+
"""
|
| 237 |
+
self.descriptor = service_descriptor
|
| 238 |
+
|
| 239 |
+
def BuildServiceStub(self, cls):
|
| 240 |
+
"""Constructs the stub class.
|
| 241 |
+
|
| 242 |
+
Args:
|
| 243 |
+
cls: The class that will be constructed.
|
| 244 |
+
"""
|
| 245 |
+
|
| 246 |
+
def _ServiceStubInit(stub, rpc_channel):
|
| 247 |
+
stub.rpc_channel = rpc_channel
|
| 248 |
+
self.cls = cls
|
| 249 |
+
cls.__init__ = _ServiceStubInit
|
| 250 |
+
for method in self.descriptor.methods:
|
| 251 |
+
setattr(cls, method.name, self._GenerateStubMethod(method))
|
| 252 |
+
|
| 253 |
+
def _GenerateStubMethod(self, method):
|
| 254 |
+
return (lambda inst, rpc_controller, request, callback=None:
|
| 255 |
+
self._StubMethod(inst, method, rpc_controller, request, callback))
|
| 256 |
+
|
| 257 |
+
def _StubMethod(self, stub, method_descriptor,
|
| 258 |
+
rpc_controller, request, callback):
|
| 259 |
+
"""The body of all service methods in the generated stub class.
|
| 260 |
+
|
| 261 |
+
Args:
|
| 262 |
+
stub: Stub instance.
|
| 263 |
+
method_descriptor: Descriptor of the invoked method.
|
| 264 |
+
rpc_controller: Rpc controller to execute the method.
|
| 265 |
+
request: Request protocol message.
|
| 266 |
+
callback: A callback to execute when the method finishes.
|
| 267 |
+
Returns:
|
| 268 |
+
Response message (in case of blocking call).
|
| 269 |
+
"""
|
| 270 |
+
return stub.rpc_channel.CallMethod(
|
| 271 |
+
method_descriptor, rpc_controller, request,
|
| 272 |
+
method_descriptor.output_type._concrete_class, callback)
|
lib/python3.10/site-packages/google/protobuf/source_context_pb2.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/source_context.proto
|
| 4 |
+
# Protobuf Python Version: 4.25.6
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\",\n\rSourceContext\x12\x1b\n\tfile_name\x18\x01 \x01(\tR\x08\x66ileNameB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 18 |
+
|
| 19 |
+
_globals = globals()
|
| 20 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 21 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', _globals)
|
| 22 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 23 |
+
_globals['DESCRIPTOR']._options = None
|
| 24 |
+
_globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 25 |
+
_globals['_SOURCECONTEXT']._serialized_start=57
|
| 26 |
+
_globals['_SOURCECONTEXT']._serialized_end=101
|
| 27 |
+
# @@protoc_insertion_point(module_scope)
|
lib/python3.10/site-packages/google/protobuf/struct_pb2.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/struct.proto
|
| 4 |
+
# Protobuf Python Version: 4.25.6
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x98\x01\n\x06Struct\x12;\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntryR\x06\x66ields\x1aQ\n\x0b\x46ieldsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x05value:\x02\x38\x01\"\xb2\x02\n\x05Value\x12;\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00R\tnullValue\x12#\n\x0cnumber_value\x18\x02 \x01(\x01H\x00R\x0bnumberValue\x12#\n\x0cstring_value\x18\x03 \x01(\tH\x00R\x0bstringValue\x12\x1f\n\nbool_value\x18\x04 \x01(\x08H\x00R\tboolValue\x12<\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00R\x0bstructValue\x12;\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00R\tlistValueB\x06\n\x04kind\";\n\tListValue\x12.\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.ValueR\x06values*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 18 |
+
|
| 19 |
+
_globals = globals()
|
| 20 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 21 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', _globals)
|
| 22 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 23 |
+
_globals['DESCRIPTOR']._options = None
|
| 24 |
+
_globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 25 |
+
_globals['_STRUCT_FIELDSENTRY']._options = None
|
| 26 |
+
_globals['_STRUCT_FIELDSENTRY']._serialized_options = b'8\001'
|
| 27 |
+
_globals['_NULLVALUE']._serialized_start=574
|
| 28 |
+
_globals['_NULLVALUE']._serialized_end=601
|
| 29 |
+
_globals['_STRUCT']._serialized_start=50
|
| 30 |
+
_globals['_STRUCT']._serialized_end=202
|
| 31 |
+
_globals['_STRUCT_FIELDSENTRY']._serialized_start=121
|
| 32 |
+
_globals['_STRUCT_FIELDSENTRY']._serialized_end=202
|
| 33 |
+
_globals['_VALUE']._serialized_start=205
|
| 34 |
+
_globals['_VALUE']._serialized_end=511
|
| 35 |
+
_globals['_LISTVALUE']._serialized_start=513
|
| 36 |
+
_globals['_LISTVALUE']._serialized_end=572
|
| 37 |
+
# @@protoc_insertion_point(module_scope)
|
lib/python3.10/site-packages/google/protobuf/symbol_database.py
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""A database of Python protocol buffer generated symbols.
|
| 9 |
+
|
| 10 |
+
SymbolDatabase is the MessageFactory for messages generated at compile time,
|
| 11 |
+
and makes it easy to create new instances of a registered type, given only the
|
| 12 |
+
type's protocol buffer symbol name.
|
| 13 |
+
|
| 14 |
+
Example usage::
|
| 15 |
+
|
| 16 |
+
db = symbol_database.SymbolDatabase()
|
| 17 |
+
|
| 18 |
+
# Register symbols of interest, from one or multiple files.
|
| 19 |
+
db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
|
| 20 |
+
db.RegisterMessage(my_proto_pb2.MyMessage)
|
| 21 |
+
db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
|
| 22 |
+
|
| 23 |
+
# The database can be used as a MessageFactory, to generate types based on
|
| 24 |
+
# their name:
|
| 25 |
+
types = db.GetMessages(['my_proto.proto'])
|
| 26 |
+
my_message_instance = types['MyMessage']()
|
| 27 |
+
|
| 28 |
+
# The database's underlying descriptor pool can be queried, so it's not
|
| 29 |
+
# necessary to know a type's filename to be able to generate it:
|
| 30 |
+
filename = db.pool.FindFileContainingSymbol('MyMessage')
|
| 31 |
+
my_message_instance = db.GetMessages([filename])['MyMessage']()
|
| 32 |
+
|
| 33 |
+
# This functionality is also provided directly via a convenience method:
|
| 34 |
+
my_message_instance = db.GetSymbol('MyMessage')()
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
import warnings
|
| 38 |
+
|
| 39 |
+
from google.protobuf.internal import api_implementation
|
| 40 |
+
from google.protobuf import descriptor_pool
|
| 41 |
+
from google.protobuf import message_factory
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class SymbolDatabase():
|
| 45 |
+
"""A database of Python generated symbols."""
|
| 46 |
+
|
| 47 |
+
# local cache of registered classes.
|
| 48 |
+
_classes = {}
|
| 49 |
+
|
| 50 |
+
def __init__(self, pool=None):
|
| 51 |
+
"""Initializes a new SymbolDatabase."""
|
| 52 |
+
self.pool = pool or descriptor_pool.DescriptorPool()
|
| 53 |
+
|
| 54 |
+
def GetPrototype(self, descriptor):
|
| 55 |
+
warnings.warn('SymbolDatabase.GetPrototype() is deprecated. Please '
|
| 56 |
+
'use message_factory.GetMessageClass() instead. '
|
| 57 |
+
'SymbolDatabase.GetPrototype() will be removed soon.')
|
| 58 |
+
return message_factory.GetMessageClass(descriptor)
|
| 59 |
+
|
| 60 |
+
def CreatePrototype(self, descriptor):
|
| 61 |
+
warnings.warn('Directly call CreatePrototype() is wrong. Please use '
|
| 62 |
+
'message_factory.GetMessageClass() instead. '
|
| 63 |
+
'SymbolDatabase.CreatePrototype() will be removed soon.')
|
| 64 |
+
return message_factory._InternalCreateMessageClass(descriptor)
|
| 65 |
+
|
| 66 |
+
def GetMessages(self, files):
|
| 67 |
+
warnings.warn('SymbolDatabase.GetMessages() is deprecated. Please use '
|
| 68 |
+
'message_factory.GetMessageClassedForFiles() instead. '
|
| 69 |
+
'SymbolDatabase.GetMessages() will be removed soon.')
|
| 70 |
+
return message_factory.GetMessageClassedForFiles(files, self.pool)
|
| 71 |
+
|
| 72 |
+
def RegisterMessage(self, message):
|
| 73 |
+
"""Registers the given message type in the local database.
|
| 74 |
+
|
| 75 |
+
Calls to GetSymbol() and GetMessages() will return messages registered here.
|
| 76 |
+
|
| 77 |
+
Args:
|
| 78 |
+
message: A :class:`google.protobuf.message.Message` subclass (or
|
| 79 |
+
instance); its descriptor will be registered.
|
| 80 |
+
|
| 81 |
+
Returns:
|
| 82 |
+
The provided message.
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
desc = message.DESCRIPTOR
|
| 86 |
+
self._classes[desc] = message
|
| 87 |
+
self.RegisterMessageDescriptor(desc)
|
| 88 |
+
return message
|
| 89 |
+
|
| 90 |
+
def RegisterMessageDescriptor(self, message_descriptor):
|
| 91 |
+
"""Registers the given message descriptor in the local database.
|
| 92 |
+
|
| 93 |
+
Args:
|
| 94 |
+
message_descriptor (Descriptor): the message descriptor to add.
|
| 95 |
+
"""
|
| 96 |
+
if api_implementation.Type() == 'python':
|
| 97 |
+
# pylint: disable=protected-access
|
| 98 |
+
self.pool._AddDescriptor(message_descriptor)
|
| 99 |
+
|
| 100 |
+
def RegisterEnumDescriptor(self, enum_descriptor):
|
| 101 |
+
"""Registers the given enum descriptor in the local database.
|
| 102 |
+
|
| 103 |
+
Args:
|
| 104 |
+
enum_descriptor (EnumDescriptor): The enum descriptor to register.
|
| 105 |
+
|
| 106 |
+
Returns:
|
| 107 |
+
EnumDescriptor: The provided descriptor.
|
| 108 |
+
"""
|
| 109 |
+
if api_implementation.Type() == 'python':
|
| 110 |
+
# pylint: disable=protected-access
|
| 111 |
+
self.pool._AddEnumDescriptor(enum_descriptor)
|
| 112 |
+
return enum_descriptor
|
| 113 |
+
|
| 114 |
+
def RegisterServiceDescriptor(self, service_descriptor):
|
| 115 |
+
"""Registers the given service descriptor in the local database.
|
| 116 |
+
|
| 117 |
+
Args:
|
| 118 |
+
service_descriptor (ServiceDescriptor): the service descriptor to
|
| 119 |
+
register.
|
| 120 |
+
"""
|
| 121 |
+
if api_implementation.Type() == 'python':
|
| 122 |
+
# pylint: disable=protected-access
|
| 123 |
+
self.pool._AddServiceDescriptor(service_descriptor)
|
| 124 |
+
|
| 125 |
+
def RegisterFileDescriptor(self, file_descriptor):
|
| 126 |
+
"""Registers the given file descriptor in the local database.
|
| 127 |
+
|
| 128 |
+
Args:
|
| 129 |
+
file_descriptor (FileDescriptor): The file descriptor to register.
|
| 130 |
+
"""
|
| 131 |
+
if api_implementation.Type() == 'python':
|
| 132 |
+
# pylint: disable=protected-access
|
| 133 |
+
self.pool._InternalAddFileDescriptor(file_descriptor)
|
| 134 |
+
|
| 135 |
+
def GetSymbol(self, symbol):
|
| 136 |
+
"""Tries to find a symbol in the local database.
|
| 137 |
+
|
| 138 |
+
Currently, this method only returns message.Message instances, however, if
|
| 139 |
+
may be extended in future to support other symbol types.
|
| 140 |
+
|
| 141 |
+
Args:
|
| 142 |
+
symbol (str): a protocol buffer symbol.
|
| 143 |
+
|
| 144 |
+
Returns:
|
| 145 |
+
A Python class corresponding to the symbol.
|
| 146 |
+
|
| 147 |
+
Raises:
|
| 148 |
+
KeyError: if the symbol could not be found.
|
| 149 |
+
"""
|
| 150 |
+
|
| 151 |
+
return self._classes[self.pool.FindMessageTypeByName(symbol)]
|
| 152 |
+
|
| 153 |
+
def GetMessages(self, files):
|
| 154 |
+
# TODO: Fix the differences with MessageFactory.
|
| 155 |
+
"""Gets all registered messages from a specified file.
|
| 156 |
+
|
| 157 |
+
Only messages already created and registered will be returned; (this is the
|
| 158 |
+
case for imported _pb2 modules)
|
| 159 |
+
But unlike MessageFactory, this version also returns already defined nested
|
| 160 |
+
messages, but does not register any message extensions.
|
| 161 |
+
|
| 162 |
+
Args:
|
| 163 |
+
files (list[str]): The file names to extract messages from.
|
| 164 |
+
|
| 165 |
+
Returns:
|
| 166 |
+
A dictionary mapping proto names to the message classes.
|
| 167 |
+
|
| 168 |
+
Raises:
|
| 169 |
+
KeyError: if a file could not be found.
|
| 170 |
+
"""
|
| 171 |
+
|
| 172 |
+
def _GetAllMessages(desc):
|
| 173 |
+
"""Walk a message Descriptor and recursively yields all message names."""
|
| 174 |
+
yield desc
|
| 175 |
+
for msg_desc in desc.nested_types:
|
| 176 |
+
for nested_desc in _GetAllMessages(msg_desc):
|
| 177 |
+
yield nested_desc
|
| 178 |
+
|
| 179 |
+
result = {}
|
| 180 |
+
for file_name in files:
|
| 181 |
+
file_desc = self.pool.FindFileByName(file_name)
|
| 182 |
+
for msg_desc in file_desc.message_types_by_name.values():
|
| 183 |
+
for desc in _GetAllMessages(msg_desc):
|
| 184 |
+
try:
|
| 185 |
+
result[desc.full_name] = self._classes[desc]
|
| 186 |
+
except KeyError:
|
| 187 |
+
# This descriptor has no registered class, skip it.
|
| 188 |
+
pass
|
| 189 |
+
return result
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
def Default():
|
| 196 |
+
"""Returns the default SymbolDatabase."""
|
| 197 |
+
return _DEFAULT
|
lib/python3.10/site-packages/google/protobuf/testdata/__init__.py
ADDED
|
File without changes
|
lib/python3.10/site-packages/google/protobuf/text_encoding.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
#
|
| 4 |
+
# Use of this source code is governed by a BSD-style
|
| 5 |
+
# license that can be found in the LICENSE file or at
|
| 6 |
+
# https://developers.google.com/open-source/licenses/bsd
|
| 7 |
+
|
| 8 |
+
"""Encoding related utilities."""
|
| 9 |
+
import re
|
| 10 |
+
|
| 11 |
+
_cescape_chr_to_symbol_map = {}
|
| 12 |
+
_cescape_chr_to_symbol_map[9] = r'\t' # optional escape
|
| 13 |
+
_cescape_chr_to_symbol_map[10] = r'\n' # optional escape
|
| 14 |
+
_cescape_chr_to_symbol_map[13] = r'\r' # optional escape
|
| 15 |
+
_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape
|
| 16 |
+
_cescape_chr_to_symbol_map[39] = r"\'" # optional escape
|
| 17 |
+
_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape
|
| 18 |
+
|
| 19 |
+
# Lookup table for unicode
|
| 20 |
+
_cescape_unicode_to_str = [chr(i) for i in range(0, 256)]
|
| 21 |
+
for byte, string in _cescape_chr_to_symbol_map.items():
|
| 22 |
+
_cescape_unicode_to_str[byte] = string
|
| 23 |
+
|
| 24 |
+
# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32)
|
| 25 |
+
_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] +
|
| 26 |
+
[chr(i) for i in range(32, 127)] +
|
| 27 |
+
[r'\%03o' % i for i in range(127, 256)])
|
| 28 |
+
for byte, string in _cescape_chr_to_symbol_map.items():
|
| 29 |
+
_cescape_byte_to_str[byte] = string
|
| 30 |
+
del byte, string
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def CEscape(text, as_utf8) -> str:
|
| 34 |
+
"""Escape a bytes string for use in an text protocol buffer.
|
| 35 |
+
|
| 36 |
+
Args:
|
| 37 |
+
text: A byte string to be escaped.
|
| 38 |
+
as_utf8: Specifies if result may contain non-ASCII characters.
|
| 39 |
+
In Python 3 this allows unescaped non-ASCII Unicode characters.
|
| 40 |
+
In Python 2 the return value will be valid UTF-8 rather than only ASCII.
|
| 41 |
+
Returns:
|
| 42 |
+
Escaped string (str).
|
| 43 |
+
"""
|
| 44 |
+
# Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not
|
| 45 |
+
# satisfy our needs; they encodes unprintable characters using two-digit hex
|
| 46 |
+
# escapes whereas our C++ unescaping function allows hex escapes to be any
|
| 47 |
+
# length. So, "\0011".encode('string_escape') ends up being "\\x011", which
|
| 48 |
+
# will be decoded in C++ as a single-character string with char code 0x11.
|
| 49 |
+
text_is_unicode = isinstance(text, str)
|
| 50 |
+
if as_utf8 and text_is_unicode:
|
| 51 |
+
# We're already unicode, no processing beyond control char escapes.
|
| 52 |
+
return text.translate(_cescape_chr_to_symbol_map)
|
| 53 |
+
ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints.
|
| 54 |
+
if as_utf8:
|
| 55 |
+
return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text)
|
| 56 |
+
return ''.join(_cescape_byte_to_str[ord_(c)] for c in text)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def CUnescape(text: str) -> bytes:
|
| 63 |
+
"""Unescape a text string with C-style escape sequences to UTF-8 bytes.
|
| 64 |
+
|
| 65 |
+
Args:
|
| 66 |
+
text: The data to parse in a str.
|
| 67 |
+
Returns:
|
| 68 |
+
A byte string.
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
def ReplaceHex(m):
|
| 72 |
+
# Only replace the match if the number of leading back slashes is odd. i.e.
|
| 73 |
+
# the slash itself is not escaped.
|
| 74 |
+
if len(m.group(1)) & 1:
|
| 75 |
+
return m.group(1) + 'x0' + m.group(2)
|
| 76 |
+
return m.group(0)
|
| 77 |
+
|
| 78 |
+
# This is required because the 'string_escape' encoding doesn't
|
| 79 |
+
# allow single-digit hex escapes (like '\xf').
|
| 80 |
+
result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
|
| 81 |
+
|
| 82 |
+
return (result.encode('utf-8') # Make it bytes to allow decode.
|
| 83 |
+
.decode('unicode_escape')
|
| 84 |
+
# Make it bytes again to return the proper type.
|
| 85 |
+
.encode('raw_unicode_escape'))
|