Update dashboard, memory, root +2 more (+3 ~5)

This commit is contained in:
Echo
2026-02-02 16:21:41 +00:00
parent 2e8d47353b
commit 84701a062e
2212 changed files with 2938184 additions and 37 deletions

View File

@@ -0,0 +1,57 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Auth Library for Python."""
import logging
import sys
import warnings
from google.auth import version as google_auth_version
from google.auth._default import (
default,
load_credentials_from_dict,
load_credentials_from_file,
)
__version__ = google_auth_version.__version__
__all__ = ["default", "load_credentials_from_file", "load_credentials_from_dict"]
class Python37DeprecationWarning(DeprecationWarning): # pragma: NO COVER
"""
Deprecation warning raised when Python 3.7 runtime is detected.
Python 3.7 support will be dropped after January 1, 2024.
"""
pass
# Raise warnings for deprecated versions
eol_message = (
"You are using a Python version {} past its end of life. Google will update "
"google-auth with critical bug fixes on a best-effort basis, but not "
"with any other fixes or features. Please upgrade your Python version, "
"and then update google-auth."
)
if sys.version_info.major == 3 and sys.version_info.minor == 8: # pragma: NO COVER
warnings.warn(eol_message.format("3.8"), FutureWarning)
elif sys.version_info.major == 3 and sys.version_info.minor == 9: # pragma: NO COVER
warnings.warn(eol_message.format("3.9"), FutureWarning)
# Set default logging handler to avoid "No handler found" warnings.
logging.getLogger(__name__).addHandler(logging.NullHandler())

View File

@@ -0,0 +1,272 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for Agent Identity credentials."""
import base64
import hashlib
import logging
import os
import re
import time
from urllib.parse import quote, urlparse
from google.auth import environment_vars
from google.auth import exceptions
_LOGGER = logging.getLogger(__name__)
CRYPTOGRAPHY_NOT_FOUND_ERROR = (
"The cryptography library is required for certificate-based authentication."
"Please install it with `pip install google-auth[cryptography]`."
)
# SPIFFE trust domain patterns for Agent Identities.
_AGENT_IDENTITY_SPIFFE_TRUST_DOMAIN_PATTERNS = [
r"^agents\.global\.org-\d+\.system\.id\.goog$",
r"^agents\.global\.proj-\d+\.system\.id\.goog$",
]
_WELL_KNOWN_CERT_PATH = "/var/run/secrets/workload-spiffe-credentials/certificates.pem"
# Constants for polling the certificate file.
_FAST_POLL_CYCLES = 50
_FAST_POLL_INTERVAL = 0.1 # 100ms
_SLOW_POLL_INTERVAL = 0.5 # 500ms
_TOTAL_TIMEOUT = 30 # seconds
# Calculate the number of slow poll cycles based on the total timeout.
_SLOW_POLL_CYCLES = int(
(_TOTAL_TIMEOUT - (_FAST_POLL_CYCLES * _FAST_POLL_INTERVAL)) / _SLOW_POLL_INTERVAL
)
_POLLING_INTERVALS = ([_FAST_POLL_INTERVAL] * _FAST_POLL_CYCLES) + (
[_SLOW_POLL_INTERVAL] * _SLOW_POLL_CYCLES
)
def _is_certificate_file_ready(path):
"""Checks if a file exists and is not empty."""
return path and os.path.exists(path) and os.path.getsize(path) > 0
def get_agent_identity_certificate_path():
"""Gets the certificate path from the certificate config file.
The path to the certificate config file is read from the
GOOGLE_API_CERTIFICATE_CONFIG environment variable. This function
implements a retry mechanism to handle cases where the environment
variable is set before the files are available on the filesystem.
Returns:
str: The path to the leaf certificate file.
Raises:
google.auth.exceptions.RefreshError: If the certificate config file
or the certificate file cannot be found after retries.
"""
import json
cert_config_path = os.environ.get(environment_vars.GOOGLE_API_CERTIFICATE_CONFIG)
if not cert_config_path:
return None
has_logged_warning = False
for interval in _POLLING_INTERVALS:
try:
with open(cert_config_path, "r") as f:
cert_config = json.load(f)
cert_path = (
cert_config.get("cert_configs", {})
.get("workload", {})
.get("cert_path")
)
if _is_certificate_file_ready(cert_path):
return cert_path
except (IOError, ValueError, KeyError):
if not has_logged_warning:
_LOGGER.warning(
"Certificate config file not found at %s (from %s environment "
"variable). Retrying for up to %s seconds.",
cert_config_path,
environment_vars.GOOGLE_API_CERTIFICATE_CONFIG,
_TOTAL_TIMEOUT,
)
has_logged_warning = True
pass
# As a fallback, check the well-known certificate path.
if _is_certificate_file_ready(_WELL_KNOWN_CERT_PATH):
return _WELL_KNOWN_CERT_PATH
# A sleep is required in two cases:
# 1. The config file is not found (the except block).
# 2. The config file is found, but the certificate is not yet available.
# In both cases, we need to poll, so we sleep on every iteration
# that doesn't return a certificate.
time.sleep(interval)
raise exceptions.RefreshError(
"Certificate config or certificate file not found after multiple retries. "
f"Token binding protection is failing. You can turn off this protection by setting "
f"{environment_vars.GOOGLE_API_PREVENT_AGENT_TOKEN_SHARING_FOR_GCP_SERVICES} to false "
"to fall back to unbound tokens."
)
def get_and_parse_agent_identity_certificate():
"""Gets and parses the agent identity certificate if not opted out.
Checks if the user has opted out of certificate-bound tokens. If not,
it gets the certificate path, reads the file, and parses it.
Returns:
The parsed certificate object if found and not opted out, otherwise None.
"""
# If the user has opted out of cert bound tokens, there is no need to
# look up the certificate.
is_opted_out = (
os.environ.get(
environment_vars.GOOGLE_API_PREVENT_AGENT_TOKEN_SHARING_FOR_GCP_SERVICES,
"true",
).lower()
== "false"
)
if is_opted_out:
return None
cert_path = get_agent_identity_certificate_path()
if not cert_path:
return None
with open(cert_path, "rb") as cert_file:
cert_bytes = cert_file.read()
return parse_certificate(cert_bytes)
def parse_certificate(cert_bytes):
"""Parses a PEM-encoded certificate.
Args:
cert_bytes (bytes): The PEM-encoded certificate bytes.
Returns:
cryptography.x509.Certificate: The parsed certificate object.
"""
try:
from cryptography import x509
return x509.load_pem_x509_certificate(cert_bytes)
except ImportError as e:
raise ImportError(CRYPTOGRAPHY_NOT_FOUND_ERROR) from e
def _is_agent_identity_certificate(cert):
"""Checks if a certificate is an Agent Identity certificate.
This is determined by checking the Subject Alternative Name (SAN) for a
SPIFFE ID with a trust domain matching Agent Identity patterns.
Args:
cert (cryptography.x509.Certificate): The parsed certificate object.
Returns:
bool: True if the certificate is an Agent Identity certificate,
False otherwise.
"""
try:
from cryptography import x509
from cryptography.x509.oid import ExtensionOID
try:
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
except x509.ExtensionNotFound:
return False
uris = ext.value.get_values_for_type(x509.UniformResourceIdentifier)
for uri in uris:
parsed_uri = urlparse(uri)
if parsed_uri.scheme == "spiffe":
trust_domain = parsed_uri.netloc
for pattern in _AGENT_IDENTITY_SPIFFE_TRUST_DOMAIN_PATTERNS:
if re.match(pattern, trust_domain):
return True
return False
except ImportError as e:
raise ImportError(CRYPTOGRAPHY_NOT_FOUND_ERROR) from e
def calculate_certificate_fingerprint(cert):
"""Calculates the URL-encoded, unpadded, base64-encoded SHA256 hash of a
DER-encoded certificate.
Args:
cert (cryptography.x509.Certificate): The parsed certificate object.
Returns:
str: The URL-encoded, unpadded, base64-encoded SHA256 fingerprint.
"""
try:
from cryptography.hazmat.primitives import serialization
der_cert = cert.public_bytes(serialization.Encoding.DER)
fingerprint = hashlib.sha256(der_cert).digest()
# The certificate fingerprint is generated in two steps to align with GFE's
# expectations and ensure proper URL transmission:
# 1. Standard base64 encoding is applied, and padding ('=') is removed.
# 2. The resulting string is then URL-encoded to handle special characters
# ('+', '/') that would otherwise be misinterpreted in URL parameters.
base64_fingerprint = base64.b64encode(fingerprint).decode("utf-8")
unpadded_base64_fingerprint = base64_fingerprint.rstrip("=")
return quote(unpadded_base64_fingerprint)
except ImportError as e:
raise ImportError(CRYPTOGRAPHY_NOT_FOUND_ERROR) from e
def should_request_bound_token(cert):
"""Determines if a bound token should be requested.
This is based on the GOOGLE_API_PREVENT_AGENT_TOKEN_SHARING_FOR_GCP_SERVICES
environment variable and whether the certificate is an agent identity cert.
Args:
cert (cryptography.x509.Certificate): The parsed certificate object.
Returns:
bool: True if a bound token should be requested, False otherwise.
"""
is_agent_cert = _is_agent_identity_certificate(cert)
is_opted_in = (
os.environ.get(
environment_vars.GOOGLE_API_PREVENT_AGENT_TOKEN_SHARING_FOR_GCP_SERVICES,
"true",
).lower()
== "true"
)
return is_agent_cert and is_opted_in
def get_cached_cert_fingerprint(cached_cert):
"""Returns the fingerprint of the cached certificate."""
if cached_cert:
cert_obj = parse_certificate(cached_cert)
cached_cert_fingerprint = calculate_certificate_fingerprint(cert_obj)
else:
raise ValueError("mTLS connection is not configured.")
return cached_cert_fingerprint

View File

@@ -0,0 +1,64 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
class LRUCache(dict):
def __init__(self, maxsize):
super().__init__()
self._order = OrderedDict()
self.maxsize = maxsize
def clear(self):
super().clear()
self._order.clear()
def get(self, key, default=None):
try:
value = super().__getitem__(key)
self._update(key)
return value
except KeyError:
return default
def __getitem__(self, key):
value = super().__getitem__(key)
self._update(key)
return value
def __setitem__(self, key, value):
maxsize = self.maxsize
if maxsize <= 0:
return
if key not in self:
while len(self) >= maxsize:
self.popitem()
super().__setitem__(key, value)
self._update(key)
def __delitem__(self, key):
super().__delitem__(key)
del self._order[key]
def popitem(self):
"""Remove and return the least recently used key-value pair."""
key, _ = self._order.popitem(last=False)
return key, super().pop(key)
def _update(self, key):
try:
self._order.move_to_end(key)
except KeyError:
self._order[key] = None

View File

@@ -0,0 +1,153 @@
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for reading the Google Cloud SDK's configuration."""
import os
import subprocess
from google.auth import _helpers
from google.auth import environment_vars
from google.auth import exceptions
# The ~/.config subdirectory containing gcloud credentials.
_CONFIG_DIRECTORY = "gcloud"
# Windows systems store config at %APPDATA%\gcloud
_WINDOWS_CONFIG_ROOT_ENV_VAR = "APPDATA"
# The name of the file in the Cloud SDK config that contains default
# credentials.
_CREDENTIALS_FILENAME = "application_default_credentials.json"
# The name of the Cloud SDK shell script
_CLOUD_SDK_POSIX_COMMAND = "gcloud"
_CLOUD_SDK_WINDOWS_COMMAND = "gcloud.cmd"
# The command to get the Cloud SDK configuration
_CLOUD_SDK_CONFIG_GET_PROJECT_COMMAND = ("config", "get", "project")
# The command to get google user access token
_CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND = ("auth", "print-access-token")
# Cloud SDK's application-default client ID
CLOUD_SDK_CLIENT_ID = (
"764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com"
)
def get_config_path():
"""Returns the absolute path the the Cloud SDK's configuration directory.
Returns:
str: The Cloud SDK config path.
"""
# If the path is explicitly set, return that.
try:
return os.environ[environment_vars.CLOUD_SDK_CONFIG_DIR]
except KeyError:
pass
# Non-windows systems store this at ~/.config/gcloud
if os.name != "nt":
return os.path.join(os.path.expanduser("~"), ".config", _CONFIG_DIRECTORY)
# Windows systems store config at %APPDATA%\gcloud
else:
try:
return os.path.join(
os.environ[_WINDOWS_CONFIG_ROOT_ENV_VAR], _CONFIG_DIRECTORY
)
except KeyError:
# This should never happen unless someone is really
# messing with things, but we'll cover the case anyway.
drive = os.environ.get("SystemDrive", "C:")
return os.path.join(drive, "\\", _CONFIG_DIRECTORY)
def get_application_default_credentials_path():
"""Gets the path to the application default credentials file.
The path may or may not exist.
Returns:
str: The full path to application default credentials.
"""
config_path = get_config_path()
return os.path.join(config_path, _CREDENTIALS_FILENAME)
def _run_subprocess_ignore_stderr(command):
"""Return subprocess.check_output with the given command and ignores stderr."""
with open(os.devnull, "w") as devnull:
output = subprocess.check_output(command, stderr=devnull)
return output
def get_project_id():
"""Gets the project ID from the Cloud SDK.
Returns:
Optional[str]: The project ID.
"""
if os.name == "nt":
command = _CLOUD_SDK_WINDOWS_COMMAND
else:
command = _CLOUD_SDK_POSIX_COMMAND
try:
# Ignore the stderr coming from gcloud, so it won't be mixed into the output.
# https://github.com/googleapis/google-auth-library-python/issues/673
project = _run_subprocess_ignore_stderr(
(command,) + _CLOUD_SDK_CONFIG_GET_PROJECT_COMMAND
)
# Turn bytes into a string and remove "\n"
project = _helpers.from_bytes(project).strip()
return project if project else None
except (subprocess.CalledProcessError, OSError, IOError):
return None
def get_auth_access_token(account=None):
"""Load user access token with the ``gcloud auth print-access-token`` command.
Args:
account (Optional[str]): Account to get the access token for. If not
specified, the current active account will be used.
Returns:
str: The user access token.
Raises:
google.auth.exceptions.UserAccessTokenError: if failed to get access
token from gcloud.
"""
if os.name == "nt":
command = _CLOUD_SDK_WINDOWS_COMMAND
else:
command = _CLOUD_SDK_POSIX_COMMAND
try:
if account:
command = (
(command,)
+ _CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND
+ ("--account=" + account,)
)
else:
command = (command,) + _CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND
access_token = subprocess.check_output(command, stderr=subprocess.STDOUT)
# remove the trailing "\n"
return access_token.decode("utf-8").strip()
except (subprocess.CalledProcessError, OSError, IOError) as caught_exc:
new_exc = exceptions.UserAccessTokenError(
"Failed to obtain access token", caught_exc
)
raise new_exc from caught_exc

View File

@@ -0,0 +1,5 @@
"""Shared constants."""
_SERVICE_ACCOUNT_TRUST_BOUNDARY_LOOKUP_ENDPOINT = "https://iamcredentials.{universe_domain}/v1/projects/-/serviceAccounts/{service_account_email}/allowedLocations"
_WORKFORCE_POOL_TRUST_BOUNDARY_LOOKUP_ENDPOINT = "https://iamcredentials.{universe_domain}/v1/locations/global/workforcePools/{pool_id}/allowedLocations"
_WORKLOAD_IDENTITY_POOL_TRUST_BOUNDARY_LOOKUP_ENDPOINT = "https://iamcredentials.{universe_domain}/v1/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/allowedLocations"

View File

@@ -0,0 +1,171 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interfaces for credentials."""
import abc
import inspect
from google.auth import credentials
class Credentials(credentials.Credentials, metaclass=abc.ABCMeta):
"""Async inherited credentials class from google.auth.credentials.
The added functionality is the before_request call which requires
async/await syntax.
All credentials have a :attr:`token` that is used for authentication and
may also optionally set an :attr:`expiry` to indicate when the token will
no longer be valid.
Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
Credentials can do this automatically before the first HTTP request in
:meth:`before_request`.
Although the token and expiration will change as the credentials are
:meth:`refreshed <refresh>` and used, credentials should be considered
immutable. Various credentials will accept configuration such as private
keys, scopes, and other options. These options are not changeable after
construction. Some classes will provide mechanisms to copy the credentials
with modifications such as :meth:`ScopedCredentials.with_scopes`.
"""
async def before_request(self, request, method, url, headers):
"""Performs credential-specific before request logic.
Refreshes the credentials if necessary, then calls :meth:`apply` to
apply the token to the authentication header.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
method (str): The request's HTTP method or the RPC method being
invoked.
url (str): The request's URI or the RPC service's URI.
headers (Mapping): The request's headers.
"""
# pylint: disable=unused-argument
# (Subclasses may use these arguments to ascertain information about
# the http request.)
if not self.valid:
if inspect.iscoroutinefunction(self.refresh):
await self.refresh(request)
else:
self.refresh(request)
self.apply(headers)
class CredentialsWithQuotaProject(credentials.CredentialsWithQuotaProject):
"""Abstract base for credentials supporting ``with_quota_project`` factory"""
class AnonymousCredentials(credentials.AnonymousCredentials, Credentials):
"""Credentials that do not provide any authentication information.
These are useful in the case of services that support anonymous access or
local service emulators that do not use credentials. This class inherits
from the sync anonymous credentials file, but is kept if async credentials
is initialized and we would like anonymous credentials.
"""
class ReadOnlyScoped(credentials.ReadOnlyScoped, metaclass=abc.ABCMeta):
"""Interface for credentials whose scopes can be queried.
OAuth 2.0-based credentials allow limiting access using scopes as described
in `RFC6749 Section 3.3`_.
If a credential class implements this interface then the credentials either
use scopes in their implementation.
Some credentials require scopes in order to obtain a token. You can check
if scoping is necessary with :attr:`requires_scopes`::
if credentials.requires_scopes:
# Scoping is required.
credentials = _credentials_async.with_scopes(scopes=['one', 'two'])
Credentials that require scopes must either be constructed with scopes::
credentials = SomeScopedCredentials(scopes=['one', 'two'])
Or must copy an existing instance using :meth:`with_scopes`::
scoped_credentials = _credentials_async.with_scopes(scopes=['one', 'two'])
Some credentials have scopes but do not allow or require scopes to be set,
these credentials can be used as-is.
.. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
"""
class Scoped(credentials.Scoped):
"""Interface for credentials whose scopes can be replaced while copying.
OAuth 2.0-based credentials allow limiting access using scopes as described
in `RFC6749 Section 3.3`_.
If a credential class implements this interface then the credentials either
use scopes in their implementation.
Some credentials require scopes in order to obtain a token. You can check
if scoping is necessary with :attr:`requires_scopes`::
if credentials.requires_scopes:
# Scoping is required.
credentials = _credentials_async.create_scoped(['one', 'two'])
Credentials that require scopes must either be constructed with scopes::
credentials = SomeScopedCredentials(scopes=['one', 'two'])
Or must copy an existing instance using :meth:`with_scopes`::
scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
Some credentials have scopes but do not allow or require scopes to be set,
these credentials can be used as-is.
.. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
"""
def with_scopes_if_required(credentials, scopes):
"""Creates a copy of the credentials with scopes if scoping is required.
This helper function is useful when you do not know (or care to know) the
specific type of credentials you are using (such as when you use
:func:`google.auth.default`). This function will call
:meth:`Scoped.with_scopes` if the credentials are scoped credentials and if
the credentials require scoping. Otherwise, it will return the credentials
as-is.
Args:
credentials (google.auth.credentials.Credentials): The credentials to
scope if necessary.
scopes (Sequence[str]): The list of scopes to use.
Returns:
google.auth._credentials_async.Credentials: Either a new set of scoped
credentials, or the passed in credentials instance if no scoping
was required.
"""
if isinstance(credentials, Scoped) and credentials.requires_scopes:
return credentials.with_scopes(scopes)
else:
return credentials
class Signing(credentials.Signing, metaclass=abc.ABCMeta):
"""Interface for credentials that can cryptographically sign messages."""

View File

@@ -0,0 +1,75 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interface for base credentials."""
import abc
from google.auth import _helpers
class _BaseCredentials(metaclass=abc.ABCMeta):
"""Base class for all credentials.
All credentials have a :attr:`token` that is used for authentication and
may also optionally set an :attr:`expiry` to indicate when the token will
no longer be valid.
Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
Credentials can do this automatically before the first HTTP request in
:meth:`before_request`.
Although the token and expiration will change as the credentials are
:meth:`refreshed <refresh>` and used, credentials should be considered
immutable. Various credentials will accept configuration such as private
keys, scopes, and other options. These options are not changeable after
construction. Some classes will provide mechanisms to copy the credentials
with modifications such as :meth:`ScopedCredentials.with_scopes`.
Attributes:
token (Optional[str]): The bearer token that can be used in HTTP headers to make
authenticated requests.
"""
def __init__(self):
self.token = None
@abc.abstractmethod
def refresh(self, request):
"""Refreshes the access token.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the credentials could
not be refreshed.
"""
# pylint: disable=missing-raises-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Refresh must be implemented")
def _apply(self, headers, token=None):
"""Apply the token to the authentication header.
Args:
headers (Mapping): The HTTP request headers.
token (Optional[str]): If specified, overrides the current access
token.
"""
headers["authorization"] = "Bearer {}".format(
_helpers.from_bytes(token or self.token)
)

View File

@@ -0,0 +1,752 @@
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Application default credentials.
Implements application default credentials and project ID detection.
"""
from __future__ import annotations
import io
import json
import logging
import os
from typing import Optional, Sequence, TYPE_CHECKING
import warnings
from google.auth import environment_vars
from google.auth import exceptions
import google.auth.transport._http_client
if TYPE_CHECKING: # pragma: NO COVER
from google.auth.credentials import Credentials # noqa: F401
from google.auth.transport import Request # noqa: F401
_LOGGER = logging.getLogger(__name__)
# Valid types accepted for file-based credentials.
_AUTHORIZED_USER_TYPE = "authorized_user"
_SERVICE_ACCOUNT_TYPE = "service_account"
_EXTERNAL_ACCOUNT_TYPE = "external_account"
_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = "external_account_authorized_user"
_IMPERSONATED_SERVICE_ACCOUNT_TYPE = "impersonated_service_account"
_GDCH_SERVICE_ACCOUNT_TYPE = "gdch_service_account"
_VALID_TYPES = (
_AUTHORIZED_USER_TYPE,
_SERVICE_ACCOUNT_TYPE,
_EXTERNAL_ACCOUNT_TYPE,
_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE,
_IMPERSONATED_SERVICE_ACCOUNT_TYPE,
_GDCH_SERVICE_ACCOUNT_TYPE,
)
# Help message when no credentials can be found.
_CLOUD_SDK_MISSING_CREDENTIALS = """\
Your default credentials were not found. To set up Application Default Credentials, \
see https://cloud.google.com/docs/authentication/external/set-up-adc for more information.\
"""
# Warning when using Cloud SDK user credentials
_CLOUD_SDK_CREDENTIALS_WARNING = """\
Your application has authenticated using end user credentials from Google \
Cloud SDK without a quota project. You might receive a "quota exceeded" \
or "API not enabled" error. See the following page for troubleshooting: \
https://cloud.google.com/docs/authentication/adc-troubleshooting/user-creds. \
"""
_GENERIC_LOAD_METHOD_WARNING = """\
The {} method is deprecated because of a potential security risk.
This method does not validate the credential configuration. The security
risk occurs when a credential configuration is accepted from a source that
is not under your control and used without validation on your side.
If you know that you will be loading credential configurations of a
specific type, it is recommended to use a credential-type-specific
load method.
This will ensure that an unexpected credential type with potential for
malicious intent is not loaded unintentionally. You might still have to do
validation for certain credential types. Please follow the recommendations
for that method. For example, if you want to load only service accounts,
you can create the service account credentials explicitly:
```
from google.oauth2 import service_account
creds = service_account.Credentials.from_service_account_file(filename)
```
If you are loading your credential configuration from an untrusted source and have
not mitigated the risks (e.g. by validating the configuration yourself), make
these changes as soon as possible to prevent security risks to your environment.
Regardless of the method used, it is always your responsibility to validate
configurations received from external sources.
Refer to https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
for more details.
"""
# The subject token type used for AWS external_account credentials.
_AWS_SUBJECT_TOKEN_TYPE = "urn:ietf:params:aws:token-type:aws4_request"
def _warn_about_problematic_credentials(credentials):
"""Determines if the credentials are problematic.
Credentials from the Cloud SDK that are associated with Cloud SDK's project
are problematic because they may not have APIs enabled and have limited
quota. If this is the case, warn about it.
"""
from google.auth import _cloud_sdk
if credentials.client_id == _cloud_sdk.CLOUD_SDK_CLIENT_ID:
warnings.warn(_CLOUD_SDK_CREDENTIALS_WARNING)
def _warn_about_generic_load_method(method_name): # pragma: NO COVER
"""Warns that a generic load method is being used.
This is to discourage use of the generic load methods in favor of
more specific methods. The generic methods are more likely to lead to
security issues if the input is not validated.
Args:
method_name (str): The name of the method being used.
"""
warnings.warn(_GENERIC_LOAD_METHOD_WARNING.format(method_name), DeprecationWarning)
def load_credentials_from_file(
filename, scopes=None, default_scopes=None, quota_project_id=None, request=None
):
"""Loads Google credentials from a file.
The credentials file must be a service account key, stored authorized
user credentials, external account credentials, or impersonated service
account credentials.
.. warning::
Important: If you accept a credential configuration (credential JSON/File/Stream)
from an external source for authentication to Google Cloud Platform, you must
validate it before providing it to any Google API or client library. Providing an
unvalidated credential configuration to Google APIs or libraries can compromise
the security of your systems and data. For more information, refer to
`Validate credential configurations from external sources`_.
.. _Validate credential configurations from external sources:
https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
Args:
filename (str): The full path to the credentials file.
scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
specified, the credentials will automatically be scoped if
necessary
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
quota_project_id (Optional[str]): The project ID used for
quota and billing.
request (Optional[google.auth.transport.Request]): An object used to make
HTTP requests. This is used to determine the associated project ID
for a workload identity pool resource (external account credentials).
If not specified, then it will use a
google.auth.transport.requests.Request client to make requests.
Returns:
Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
credentials and the project ID. Authorized user credentials do not
have the project ID information. External account credentials project
IDs may not always be determined.
Raises:
google.auth.exceptions.DefaultCredentialsError: if the file is in the
wrong format or is missing.
"""
_warn_about_generic_load_method("load_credentials_from_file")
if not os.path.exists(filename):
raise exceptions.DefaultCredentialsError(
"File {} was not found.".format(filename)
)
with io.open(filename, "r") as file_obj:
try:
info = json.load(file_obj)
except ValueError as caught_exc:
new_exc = exceptions.DefaultCredentialsError(
"File {} is not a valid json file.".format(filename), caught_exc
)
raise new_exc from caught_exc
return _load_credentials_from_info(
filename, info, scopes, default_scopes, quota_project_id, request
)
def load_credentials_from_dict(
info, scopes=None, default_scopes=None, quota_project_id=None, request=None
):
"""Loads Google credentials from a dict.
The credentials file must be a service account key, stored authorized
user credentials, external account credentials, or impersonated service
account credentials.
.. warning::
Important: If you accept a credential configuration (credential JSON/File/Stream)
from an external source for authentication to Google Cloud Platform, you must
validate it before providing it to any Google API or client library. Providing an
unvalidated credential configuration to Google APIs or libraries can compromise
the security of your systems and data. For more information, refer to
`Validate credential configurations from external sources`_.
.. _Validate credential configurations from external sources:
https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
Args:
info (Dict[str, Any]): A dict object containing the credentials
scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
specified, the credentials will automatically be scoped if
necessary
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
quota_project_id (Optional[str]): The project ID used for
quota and billing.
request (Optional[google.auth.transport.Request]): An object used to make
HTTP requests. This is used to determine the associated project ID
for a workload identity pool resource (external account credentials).
If not specified, then it will use a
google.auth.transport.requests.Request client to make requests.
Returns:
Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
credentials and the project ID. Authorized user credentials do not
have the project ID information. External account credentials project
IDs may not always be determined.
Raises:
google.auth.exceptions.DefaultCredentialsError: if the file is in the
wrong format or is missing.
"""
_warn_about_generic_load_method("load_credentials_from_dict")
if not isinstance(info, dict):
raise exceptions.DefaultCredentialsError(
"info object was of type {} but dict type was expected.".format(type(info))
)
return _load_credentials_from_info(
"dict object", info, scopes, default_scopes, quota_project_id, request
)
def _load_credentials_from_info(
filename, info, scopes, default_scopes, quota_project_id, request
):
from google.auth.credentials import CredentialsWithQuotaProject
credential_type = info.get("type")
if credential_type == _AUTHORIZED_USER_TYPE:
credentials, project_id = _get_authorized_user_credentials(
filename, info, scopes
)
elif credential_type == _SERVICE_ACCOUNT_TYPE:
credentials, project_id = _get_service_account_credentials(
filename, info, scopes, default_scopes
)
elif credential_type == _EXTERNAL_ACCOUNT_TYPE:
credentials, project_id = _get_external_account_credentials(
info,
filename,
scopes=scopes,
default_scopes=default_scopes,
request=request,
)
elif credential_type == _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE:
credentials, project_id = _get_external_account_authorized_user_credentials(
filename, info, request
)
elif credential_type == _IMPERSONATED_SERVICE_ACCOUNT_TYPE:
credentials, project_id = _get_impersonated_service_account_credentials(
filename, info, scopes
)
elif credential_type == _GDCH_SERVICE_ACCOUNT_TYPE:
credentials, project_id = _get_gdch_service_account_credentials(filename, info)
else:
raise exceptions.DefaultCredentialsError(
"The file {file} does not have a valid type. "
"Type is {type}, expected one of {valid_types}.".format(
file=filename, type=credential_type, valid_types=_VALID_TYPES
)
)
if isinstance(credentials, CredentialsWithQuotaProject):
credentials = _apply_quota_project_id(credentials, quota_project_id)
return credentials, project_id
def _get_gcloud_sdk_credentials(quota_project_id=None):
"""Gets the credentials and project ID from the Cloud SDK."""
from google.auth import _cloud_sdk
_LOGGER.debug("Checking Cloud SDK credentials as part of auth process...")
# Check if application default credentials exist.
credentials_filename = _cloud_sdk.get_application_default_credentials_path()
if not os.path.isfile(credentials_filename):
_LOGGER.debug("Cloud SDK credentials not found on disk; not using them")
return None, None
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
credentials, project_id = load_credentials_from_file(
credentials_filename, quota_project_id=quota_project_id
)
credentials._cred_file_path = credentials_filename
if not project_id:
project_id = _cloud_sdk.get_project_id()
return credentials, project_id
def _get_explicit_environ_credentials(quota_project_id=None):
"""Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
variable."""
from google.auth import _cloud_sdk
cloud_sdk_adc_path = _cloud_sdk.get_application_default_credentials_path()
explicit_file = os.environ.get(environment_vars.CREDENTIALS, "")
_LOGGER.debug(
"Checking '%s' for explicit credentials as part of auth process...",
explicit_file,
)
if explicit_file != "" and explicit_file == cloud_sdk_adc_path:
# Cloud sdk flow calls gcloud to fetch project id, so if the explicit
# file path is cloud sdk credentials path, then we should fall back
# to cloud sdk flow, otherwise project id cannot be obtained.
_LOGGER.debug(
"Explicit credentials path '%s' is the same as Cloud SDK credentials path, fall back to Cloud SDK credentials flow...",
explicit_file,
)
return _get_gcloud_sdk_credentials(quota_project_id=quota_project_id)
if explicit_file != "":
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
credentials, project_id = load_credentials_from_file(
os.environ[environment_vars.CREDENTIALS],
quota_project_id=quota_project_id,
)
credentials._cred_file_path = f"{explicit_file} file via the GOOGLE_APPLICATION_CREDENTIALS environment variable"
return credentials, project_id
else:
return None, None
def _get_gae_credentials():
"""Gets Google App Engine App Identity credentials and project ID."""
# If not GAE gen1, prefer the metadata service even if the GAE APIs are
# available as per https://google.aip.dev/auth/4115.
if os.environ.get(environment_vars.LEGACY_APPENGINE_RUNTIME) != "python27":
return None, None
# While this library is normally bundled with app_engine, there are
# some cases where it's not available, so we tolerate ImportError.
try:
_LOGGER.debug("Checking for App Engine runtime as part of auth process...")
import google.auth.app_engine as app_engine
except ImportError:
_LOGGER.warning("Import of App Engine auth library failed.")
return None, None
try:
credentials = app_engine.Credentials()
project_id = app_engine.get_project_id()
return credentials, project_id
except EnvironmentError:
_LOGGER.debug(
"No App Engine library was found so cannot authentication via App Engine Identity Credentials."
)
return None, None
def _get_gce_credentials(request=None, quota_project_id=None):
"""Gets credentials and project ID from the GCE Metadata Service."""
# Ping requires a transport, but we want application default credentials
# to require no arguments. So, we'll use the _http_client transport which
# uses http.client. This is only acceptable because the metadata server
# doesn't do SSL and never requires proxies.
# While this library is normally bundled with compute_engine, there are
# some cases where it's not available, so we tolerate ImportError.
try:
from google.auth import compute_engine
from google.auth.compute_engine import _metadata
except ImportError:
_LOGGER.warning("Import of Compute Engine auth library failed.")
return None, None
if request is None:
request = google.auth.transport._http_client.Request()
if _metadata.is_on_gce(request=request):
# Get the project ID.
try:
project_id = _metadata.get_project_id(request=request)
except exceptions.TransportError:
project_id = None
cred = compute_engine.Credentials()
cred = _apply_quota_project_id(cred, quota_project_id)
return cred, project_id
else:
_LOGGER.warning(
"Authentication failed using Compute Engine authentication due to unavailable metadata server."
)
return None, None
def _get_external_account_credentials(
info, filename, scopes=None, default_scopes=None, request=None
):
"""Loads external account Credentials from the parsed external account info.
The credentials information must correspond to a supported external account
credentials.
Args:
info (Mapping[str, str]): The external account info in Google format.
filename (str): The full path to the credentials file.
scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
specified, the credentials will automatically be scoped if
necessary.
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
request (Optional[google.auth.transport.Request]): An object used to make
HTTP requests. This is used to determine the associated project ID
for a workload identity pool resource (external account credentials).
If not specified, then it will use a
google.auth.transport.requests.Request client to make requests.
Returns:
Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
credentials and the project ID. External account credentials project
IDs may not always be determined.
Raises:
google.auth.exceptions.DefaultCredentialsError: if the info dictionary
is in the wrong format or is missing required information.
"""
# There are currently 3 types of external_account credentials.
if info.get("subject_token_type") == _AWS_SUBJECT_TOKEN_TYPE:
# Check if configuration corresponds to an AWS credentials.
from google.auth import aws
credentials = aws.Credentials.from_info(
info, scopes=scopes, default_scopes=default_scopes
)
elif (
info.get("credential_source") is not None
and info.get("credential_source").get("executable") is not None
):
from google.auth import pluggable
credentials = pluggable.Credentials.from_info(
info, scopes=scopes, default_scopes=default_scopes
)
else:
try:
# Check if configuration corresponds to an Identity Pool credentials.
from google.auth import identity_pool
credentials = identity_pool.Credentials.from_info(
info, scopes=scopes, default_scopes=default_scopes
)
except ValueError:
# If the configuration is invalid or does not correspond to any
# supported external_account credentials, raise an error.
raise exceptions.DefaultCredentialsError(
"Failed to load external account credentials from {}".format(filename)
)
if request is None:
import google.auth.transport.requests
request = google.auth.transport.requests.Request()
return credentials, credentials.get_project_id(request=request)
def _get_external_account_authorized_user_credentials(
filename, info, scopes=None, default_scopes=None, request=None
):
try:
from google.auth import external_account_authorized_user
credentials = external_account_authorized_user.Credentials.from_info(info)
except ValueError:
raise exceptions.DefaultCredentialsError(
"Failed to load external account authorized user credentials from {}".format(
filename
)
)
return credentials, None
def _get_authorized_user_credentials(filename, info, scopes=None):
from google.oauth2 import credentials
try:
credentials = credentials.Credentials.from_authorized_user_info(
info, scopes=scopes
)
except ValueError as caught_exc:
msg = "Failed to load authorized user credentials from {}".format(filename)
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
raise new_exc from caught_exc
return credentials, None
def _get_service_account_credentials(filename, info, scopes=None, default_scopes=None):
from google.oauth2 import service_account
try:
credentials = service_account.Credentials.from_service_account_info(
info, scopes=scopes, default_scopes=default_scopes
)
except ValueError as caught_exc:
msg = "Failed to load service account credentials from {}".format(filename)
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
raise new_exc from caught_exc
return credentials, info.get("project_id")
def _get_impersonated_service_account_credentials(filename, info, scopes):
from google.auth import impersonated_credentials
try:
credentials = (
impersonated_credentials.Credentials.from_impersonated_service_account_info(
info, scopes=scopes
)
)
except ValueError as caught_exc:
msg = "Failed to load impersonated service account credentials from {}".format(
filename
)
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
raise new_exc from caught_exc
return credentials, None
def _get_gdch_service_account_credentials(filename, info):
from google.oauth2 import gdch_credentials
try:
credentials = (
gdch_credentials.ServiceAccountCredentials.from_service_account_info(info)
)
except ValueError as caught_exc:
msg = "Failed to load GDCH service account credentials from {}".format(filename)
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
raise new_exc from caught_exc
return credentials, info.get("project")
def get_api_key_credentials(key):
"""Return credentials with the given API key."""
from google.auth import api_key
return api_key.Credentials(key)
def _apply_quota_project_id(credentials, quota_project_id):
if quota_project_id:
credentials = credentials.with_quota_project(quota_project_id)
else:
credentials = credentials.with_quota_project_from_environment()
from google.oauth2 import credentials as authorized_user_credentials
if isinstance(credentials, authorized_user_credentials.Credentials) and (
not credentials.quota_project_id
):
_warn_about_problematic_credentials(credentials)
return credentials
def default(
scopes: Optional[Sequence[str]] = None,
request: Optional["google.auth.transport.Request"] = None,
quota_project_id: Optional[str] = None,
default_scopes: Optional[Sequence[str]] = None,
) -> tuple["google.auth.credentials.Credentials", Optional[str]]:
"""Gets the default credentials for the current environment.
`Application Default Credentials`_ provides an easy way to obtain
credentials to call Google APIs for server-to-server or local applications.
This function acquires credentials from the environment in the following
order:
1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
to the path of a valid service account JSON private key file, then it is
loaded and returned. The project ID returned is the project ID defined
in the service account file if available (some older files do not
contain project ID information).
If the environment variable is set to the path of a valid external
account JSON configuration file (workload identity federation), then the
configuration file is used to determine and retrieve the external
credentials from the current environment (AWS, Azure, etc).
These will then be exchanged for Google access tokens via the Google STS
endpoint.
The project ID returned in this case is the one corresponding to the
underlying workload identity pool resource if determinable.
If the environment variable is set to the path of a valid GDCH service
account JSON file (`Google Distributed Cloud Hosted`_), then a GDCH
credential will be returned. The project ID returned is the project
specified in the JSON file.
2. If the `Google Cloud SDK`_ is installed and has application default
credentials set they are loaded and returned.
To enable application default credentials with the Cloud SDK run::
gcloud auth application-default login
If the Cloud SDK has an active project, the project ID is returned. The
active project can be set using::
gcloud config set project
3. If the application is running in the `App Engine standard environment`_
(first generation) then the credentials and project ID from the
`App Identity Service`_ are used.
4. If the application is running in `Compute Engine`_ or `Cloud Run`_ or
the `App Engine flexible environment`_ or the `App Engine standard
environment`_ (second generation) then the credentials and project ID
are obtained from the `Metadata Service`_.
5. If no credentials are found,
:class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
.. _Application Default Credentials: https://developers.google.com\
/identity/protocols/application-default-credentials
.. _Google Cloud SDK: https://cloud.google.com/sdk
.. _App Engine standard environment: https://cloud.google.com/appengine
.. _App Identity Service: https://cloud.google.com/appengine/docs/python\
/appidentity/
.. _Compute Engine: https://cloud.google.com/compute
.. _App Engine flexible environment: https://cloud.google.com\
/appengine/flexible
.. _Metadata Service: https://cloud.google.com/compute/docs\
/storing-retrieving-metadata
.. _Cloud Run: https://cloud.google.com/run
.. _Google Distributed Cloud Hosted: https://cloud.google.com/blog/topics\
/hybrid-cloud/announcing-google-distributed-cloud-edge-and-hosted
Example::
import google.auth
credentials, project_id = google.auth.default()
Args:
scopes (Sequence[str]): The list of scopes for the credentials. If
specified, the credentials will automatically be scoped if
necessary.
request (Optional[google.auth.transport.Request]): An object used to make
HTTP requests. This is used to either detect whether the application
is running on Compute Engine or to determine the associated project
ID for a workload identity pool resource (external account
credentials). If not specified, then it will either use the standard
library http client to make requests for Compute Engine credentials
or a google.auth.transport.requests.Request client for external
account credentials.
quota_project_id (Optional[str]): The project ID used for
quota and billing.
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
Returns:
Tuple[~google.auth.credentials.Credentials, Optional[str]]:
the current environment's credentials and project ID. Project ID
may be None, which indicates that the Project ID could not be
ascertained from the environment.
Raises:
~google.auth.exceptions.DefaultCredentialsError:
If no credentials were found, or if the credentials found were
invalid.
"""
from google.auth.credentials import with_scopes_if_required
from google.auth.credentials import CredentialsWithQuotaProject
explicit_project_id = os.environ.get(
environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
)
checkers = (
# Avoid passing scopes here to prevent passing scopes to user credentials.
# with_scopes_if_required() below will ensure scopes/default scopes are
# safely set on the returned credentials since requires_scopes will
# guard against setting scopes on user credentials.
lambda: _get_explicit_environ_credentials(quota_project_id=quota_project_id),
lambda: _get_gcloud_sdk_credentials(quota_project_id=quota_project_id),
_get_gae_credentials,
lambda: _get_gce_credentials(request, quota_project_id=quota_project_id),
)
for checker in checkers:
credentials, project_id = checker()
if credentials is not None:
credentials = with_scopes_if_required(
credentials, scopes, default_scopes=default_scopes
)
effective_project_id = explicit_project_id or project_id
# For external account credentials, scopes are required to determine
# the project ID. Try to get the project ID again if not yet
# determined.
if not effective_project_id and callable(
getattr(credentials, "get_project_id", None)
):
if request is None:
import google.auth.transport.requests
request = google.auth.transport.requests.Request()
effective_project_id = credentials.get_project_id(request=request)
if quota_project_id and isinstance(
credentials, CredentialsWithQuotaProject
):
credentials = credentials.with_quota_project(quota_project_id)
if not effective_project_id:
_LOGGER.warning(
"No project ID could be determined. Consider running "
"`gcloud config set project` or setting the %s "
"environment variable",
environment_vars.PROJECT,
)
return credentials, effective_project_id
raise exceptions.DefaultCredentialsError(_CLOUD_SDK_MISSING_CREDENTIALS)

View File

@@ -0,0 +1,288 @@
# Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Application default credentials.
Implements application default credentials and project ID detection.
"""
import io
import json
import os
import warnings
from google.auth import _default
from google.auth import environment_vars
from google.auth import exceptions
def load_credentials_from_file(filename, scopes=None, quota_project_id=None):
"""Loads Google credentials from a file.
The credentials file must be a service account key or stored authorized
user credentials.
Args:
filename (str): The full path to the credentials file.
scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
specified, the credentials will automatically be scoped if
necessary
quota_project_id (Optional[str]): The project ID used for
quota and billing.
Returns:
Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
credentials and the project ID. Authorized user credentials do not
have the project ID information.
Raises:
google.auth.exceptions.DefaultCredentialsError: if the file is in the
wrong format or is missing.
"""
if not os.path.exists(filename):
raise exceptions.DefaultCredentialsError(
"File {} was not found.".format(filename)
)
with io.open(filename, "r") as file_obj:
try:
info = json.load(file_obj)
except ValueError as caught_exc:
new_exc = exceptions.DefaultCredentialsError(
"File {} is not a valid json file.".format(filename), caught_exc
)
raise new_exc from caught_exc
# The type key should indicate that the file is either a service account
# credentials file or an authorized user credentials file.
credential_type = info.get("type")
if credential_type == _default._AUTHORIZED_USER_TYPE:
from google.oauth2 import _credentials_async as credentials
try:
credentials = credentials.Credentials.from_authorized_user_info(
info, scopes=scopes
)
except ValueError as caught_exc:
msg = "Failed to load authorized user credentials from {}".format(filename)
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
raise new_exc from caught_exc
if quota_project_id:
credentials = credentials.with_quota_project(quota_project_id)
if not credentials.quota_project_id:
_default._warn_about_problematic_credentials(credentials)
return credentials, None
elif credential_type == _default._SERVICE_ACCOUNT_TYPE:
from google.oauth2 import _service_account_async as service_account
try:
credentials = service_account.Credentials.from_service_account_info(
info, scopes=scopes
).with_quota_project(quota_project_id)
except ValueError as caught_exc:
msg = "Failed to load service account credentials from {}".format(filename)
new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
raise new_exc from caught_exc
return credentials, info.get("project_id")
else:
raise exceptions.DefaultCredentialsError(
"The file {file} does not have a valid type. "
"Type is {type}, expected one of {valid_types}.".format(
file=filename, type=credential_type, valid_types=_default._VALID_TYPES
)
)
def _get_gcloud_sdk_credentials(quota_project_id=None):
"""Gets the credentials and project ID from the Cloud SDK."""
from google.auth import _cloud_sdk
# Check if application default credentials exist.
credentials_filename = _cloud_sdk.get_application_default_credentials_path()
if not os.path.isfile(credentials_filename):
return None, None
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
credentials, project_id = load_credentials_from_file(
credentials_filename, quota_project_id=quota_project_id
)
if not project_id:
project_id = _cloud_sdk.get_project_id()
return credentials, project_id
def _get_explicit_environ_credentials(quota_project_id=None):
"""Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
variable."""
from google.auth import _cloud_sdk
cloud_sdk_adc_path = _cloud_sdk.get_application_default_credentials_path()
explicit_file = os.environ.get(environment_vars.CREDENTIALS)
if explicit_file is not None and explicit_file == cloud_sdk_adc_path:
# Cloud sdk flow calls gcloud to fetch project id, so if the explicit
# file path is cloud sdk credentials path, then we should fall back
# to cloud sdk flow, otherwise project id cannot be obtained.
return _get_gcloud_sdk_credentials(quota_project_id=quota_project_id)
if explicit_file is not None:
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
credentials, project_id = load_credentials_from_file(
os.environ[environment_vars.CREDENTIALS],
quota_project_id=quota_project_id,
)
return credentials, project_id
else:
return None, None
def _get_gae_credentials():
"""Gets Google App Engine App Identity credentials and project ID."""
# While this library is normally bundled with app_engine, there are
# some cases where it's not available, so we tolerate ImportError.
return _default._get_gae_credentials()
def _get_gce_credentials(request=None):
"""Gets credentials and project ID from the GCE Metadata Service."""
# Ping requires a transport, but we want application default credentials
# to require no arguments. So, we'll use the _http_client transport which
# uses http.client. This is only acceptable because the metadata server
# doesn't do SSL and never requires proxies.
# While this library is normally bundled with compute_engine, there are
# some cases where it's not available, so we tolerate ImportError.
return _default._get_gce_credentials(request)
def default_async(scopes=None, request=None, quota_project_id=None):
"""Gets the default credentials for the current environment.
`Application Default Credentials`_ provides an easy way to obtain
credentials to call Google APIs for server-to-server or local applications.
This function acquires credentials from the environment in the following
order:
1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
to the path of a valid service account JSON private key file, then it is
loaded and returned. The project ID returned is the project ID defined
in the service account file if available (some older files do not
contain project ID information).
2. If the `Google Cloud SDK`_ is installed and has application default
credentials set they are loaded and returned.
To enable application default credentials with the Cloud SDK run::
gcloud auth application-default login
If the Cloud SDK has an active project, the project ID is returned. The
active project can be set using::
gcloud config set project
3. If the application is running in the `App Engine standard environment`_
(first generation) then the credentials and project ID from the
`App Identity Service`_ are used.
4. If the application is running in `Compute Engine`_ or `Cloud Run`_ or
the `App Engine flexible environment`_ or the `App Engine standard
environment`_ (second generation) then the credentials and project ID
are obtained from the `Metadata Service`_.
5. If no credentials are found,
:class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
.. _Application Default Credentials: https://developers.google.com\
/identity/protocols/application-default-credentials
.. _Google Cloud SDK: https://cloud.google.com/sdk
.. _App Engine standard environment: https://cloud.google.com/appengine
.. _App Identity Service: https://cloud.google.com/appengine/docs/python\
/appidentity/
.. _Compute Engine: https://cloud.google.com/compute
.. _App Engine flexible environment: https://cloud.google.com\
/appengine/flexible
.. _Metadata Service: https://cloud.google.com/compute/docs\
/storing-retrieving-metadata
.. _Cloud Run: https://cloud.google.com/run
Example::
import google.auth
credentials, project_id = google.auth.default()
Args:
scopes (Sequence[str]): The list of scopes for the credentials. If
specified, the credentials will automatically be scoped if
necessary.
request (google.auth.transport.Request): An object used to make
HTTP requests. This is used to detect whether the application
is running on Compute Engine. If not specified, then it will
use the standard library http client to make requests.
quota_project_id (Optional[str]): The project ID used for
quota and billing.
Returns:
Tuple[~google.auth.credentials.Credentials, Optional[str]]:
the current environment's credentials and project ID. Project ID
may be None, which indicates that the Project ID could not be
ascertained from the environment.
Raises:
~google.auth.exceptions.DefaultCredentialsError:
If no credentials were found, or if the credentials found were
invalid.
"""
from google.auth._credentials_async import with_scopes_if_required
from google.auth.credentials import CredentialsWithQuotaProject
explicit_project_id = os.environ.get(
environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
)
checkers = (
lambda: _get_explicit_environ_credentials(quota_project_id=quota_project_id),
lambda: _get_gcloud_sdk_credentials(quota_project_id=quota_project_id),
_get_gae_credentials,
lambda: _get_gce_credentials(request),
)
for checker in checkers:
credentials, project_id = checker()
if credentials is not None:
credentials = with_scopes_if_required(credentials, scopes)
if quota_project_id and isinstance(
credentials, CredentialsWithQuotaProject
):
credentials = credentials.with_quota_project(quota_project_id)
effective_project_id = explicit_project_id or project_id
if not effective_project_id:
_default._LOGGER.warning(
"No project ID could be determined. Consider running "
"`gcloud config set project` or setting the %s "
"environment variable",
environment_vars.PROJECT,
)
return credentials, effective_project_id
raise exceptions.DefaultCredentialsError(_default._CLOUD_SDK_MISSING_CREDENTIALS)

View File

@@ -0,0 +1,164 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import random
import time
from google.auth import exceptions
# The default amount of retry attempts
_DEFAULT_RETRY_TOTAL_ATTEMPTS = 3
# The default initial backoff period (1.0 second).
_DEFAULT_INITIAL_INTERVAL_SECONDS = 1.0
# The default randomization factor (0.1 which results in a random period ranging
# between 10% below and 10% above the retry interval).
_DEFAULT_RANDOMIZATION_FACTOR = 0.1
# The default multiplier value (2 which is 100% increase per back off).
_DEFAULT_MULTIPLIER = 2.0
"""Exponential Backoff Utility
This is a private module that implements the exponential back off algorithm.
It can be used as a utility for code that needs to retry on failure, for example
an HTTP request.
"""
class _BaseExponentialBackoff:
"""An exponential backoff iterator base class.
Args:
total_attempts Optional[int]:
The maximum amount of retries that should happen.
The default value is 3 attempts.
initial_wait_seconds Optional[int]:
The amount of time to sleep in the first backoff. This parameter
should be in seconds.
The default value is 1 second.
randomization_factor Optional[float]:
The amount of jitter that should be in each backoff. For example,
a value of 0.1 will introduce a jitter range of 10% to the
current backoff period.
The default value is 0.1.
multiplier Optional[float]:
The backoff multipler. This adjusts how much each backoff will
increase. For example a value of 2.0 leads to a 200% backoff
on each attempt. If the initial_wait is 1.0 it would look like
this sequence [1.0, 2.0, 4.0, 8.0].
The default value is 2.0.
"""
def __init__(
self,
total_attempts=_DEFAULT_RETRY_TOTAL_ATTEMPTS,
initial_wait_seconds=_DEFAULT_INITIAL_INTERVAL_SECONDS,
randomization_factor=_DEFAULT_RANDOMIZATION_FACTOR,
multiplier=_DEFAULT_MULTIPLIER,
):
if total_attempts < 1:
raise exceptions.InvalidValue(
f"total_attempts must be greater than or equal to 1 but was {total_attempts}"
)
self._total_attempts = total_attempts
self._initial_wait_seconds = initial_wait_seconds
self._current_wait_in_seconds = self._initial_wait_seconds
self._randomization_factor = randomization_factor
self._multiplier = multiplier
self._backoff_count = 0
@property
def total_attempts(self):
"""The total amount of backoff attempts that will be made."""
return self._total_attempts
@property
def backoff_count(self):
"""The current amount of backoff attempts that have been made."""
return self._backoff_count
def _reset(self):
self._backoff_count = 0
self._current_wait_in_seconds = self._initial_wait_seconds
def _calculate_jitter(self):
jitter_variance = self._current_wait_in_seconds * self._randomization_factor
jitter = random.uniform(
self._current_wait_in_seconds - jitter_variance,
self._current_wait_in_seconds + jitter_variance,
)
return jitter
class ExponentialBackoff(_BaseExponentialBackoff):
"""An exponential backoff iterator. This can be used in a for loop to
perform requests with exponential backoff.
"""
def __init__(self, *args, **kwargs):
super(ExponentialBackoff, self).__init__(*args, **kwargs)
def __iter__(self):
self._reset()
return self
def __next__(self):
if self._backoff_count >= self._total_attempts:
raise StopIteration
self._backoff_count += 1
if self._backoff_count <= 1:
return self._backoff_count
jitter = self._calculate_jitter()
time.sleep(jitter)
self._current_wait_in_seconds *= self._multiplier
return self._backoff_count
class AsyncExponentialBackoff(_BaseExponentialBackoff):
"""An async exponential backoff iterator. This can be used in a for loop to
perform async requests with exponential backoff.
"""
def __init__(self, *args, **kwargs):
super(AsyncExponentialBackoff, self).__init__(*args, **kwargs)
def __aiter__(self):
self._reset()
return self
async def __anext__(self):
if self._backoff_count >= self._total_attempts:
raise StopAsyncIteration
self._backoff_count += 1
if self._backoff_count <= 1:
return self._backoff_count
jitter = self._calculate_jitter()
await asyncio.sleep(jitter)
self._current_wait_in_seconds *= self._multiplier
return self._backoff_count

View File

@@ -0,0 +1,575 @@
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for commonly used utilities."""
import base64
import calendar
import datetime
from email.message import Message
import hashlib
import json
import logging
import os
import sys
from typing import Any, Dict, Mapping, Optional, Union
import urllib
from google.auth import exceptions
# _BASE_LOGGER_NAME is the base logger for all google-based loggers.
_BASE_LOGGER_NAME = "google"
# _LOGGING_INITIALIZED ensures that base logger is only configured once
# (unless already configured by the end-user).
_LOGGING_INITIALIZED = False
# The smallest MDS cache used by this library stores tokens until 4 minutes from
# expiry.
REFRESH_THRESHOLD = datetime.timedelta(minutes=3, seconds=45)
# TODO(https://github.com/googleapis/google-auth-library-python/issues/1684): Audit and update the list below.
_SENSITIVE_FIELDS = {
"accessToken",
"access_token",
"id_token",
"client_id",
"refresh_token",
"client_secret",
}
def copy_docstring(source_class):
"""Decorator that copies a method's docstring from another class.
Args:
source_class (type): The class that has the documented method.
Returns:
Callable: A decorator that will copy the docstring of the same
named method in the source class to the decorated method.
"""
def decorator(method):
"""Decorator implementation.
Args:
method (Callable): The method to copy the docstring to.
Returns:
Callable: the same method passed in with an updated docstring.
Raises:
google.auth.exceptions.InvalidOperation: if the method already has a docstring.
"""
if method.__doc__:
raise exceptions.InvalidOperation("Method already has a docstring.")
source_method = getattr(source_class, method.__name__)
method.__doc__ = source_method.__doc__
return method
return decorator
def parse_content_type(header_value):
"""Parse a 'content-type' header value to get just the plain media-type (without parameters).
This is done using the class Message from email.message as suggested in PEP 594
(because the cgi is now deprecated and will be removed in python 3.13,
see https://peps.python.org/pep-0594/#cgi).
Args:
header_value (str): The value of a 'content-type' header as a string.
Returns:
str: A string with just the lowercase media-type from the parsed 'content-type' header.
If the provided content-type is not parsable, returns 'text/plain',
the default value for textual files.
"""
m = Message()
m["content-type"] = header_value
return (
m.get_content_type()
) # Despite the name, actually returns just the media-type
def utcnow():
"""Returns the current UTC datetime.
Returns:
datetime: The current time in UTC.
"""
# We used datetime.utcnow() before, since it's deprecated from python 3.12,
# we are using datetime.now(timezone.utc) now. "utcnow()" is offset-native
# (no timezone info), but "now()" is offset-aware (with timezone info).
# This will cause datetime comparison problem. For backward compatibility,
# we need to remove the timezone info.
now = datetime.datetime.now(datetime.timezone.utc)
now = now.replace(tzinfo=None)
return now
def utcfromtimestamp(timestamp):
"""Returns the UTC datetime from a timestamp.
Args:
timestamp (float): The timestamp to convert.
Returns:
datetime: The time in UTC.
"""
# We used datetime.utcfromtimestamp() before, since it's deprecated from
# python 3.12, we are using datetime.fromtimestamp(timestamp, timezone.utc)
# now. "utcfromtimestamp()" is offset-native (no timezone info), but
# "fromtimestamp(timestamp, timezone.utc)" is offset-aware (with timezone
# info). This will cause datetime comparison problem. For backward
# compatibility, we need to remove the timezone info.
dt = datetime.datetime.fromtimestamp(timestamp, tz=datetime.timezone.utc)
dt = dt.replace(tzinfo=None)
return dt
def datetime_to_secs(value):
"""Convert a datetime object to the number of seconds since the UNIX epoch.
Args:
value (datetime): The datetime to convert.
Returns:
int: The number of seconds since the UNIX epoch.
"""
return calendar.timegm(value.utctimetuple())
def to_bytes(value, encoding="utf-8"):
"""Converts a string value to bytes, if necessary.
Args:
value (Union[str, bytes]): The value to be converted.
encoding (str): The encoding to use to convert unicode to bytes.
Defaults to "utf-8".
Returns:
bytes: The original value converted to bytes (if unicode) or as
passed in if it started out as bytes.
Raises:
google.auth.exceptions.InvalidValue: If the value could not be converted to bytes.
"""
result = value.encode(encoding) if isinstance(value, str) else value
if isinstance(result, bytes):
return result
else:
raise exceptions.InvalidValue(
"{0!r} could not be converted to bytes".format(value)
)
def from_bytes(value):
"""Converts bytes to a string value, if necessary.
Args:
value (Union[str, bytes]): The value to be converted.
Returns:
str: The original value converted to unicode (if bytes) or as passed in
if it started out as unicode.
Raises:
google.auth.exceptions.InvalidValue: If the value could not be converted to unicode.
"""
result = value.decode("utf-8") if isinstance(value, bytes) else value
if isinstance(result, str):
return result
else:
raise exceptions.InvalidValue(
"{0!r} could not be converted to unicode".format(value)
)
def update_query(url, params, remove=None):
"""Updates a URL's query parameters.
Replaces any current values if they are already present in the URL.
Args:
url (str): The URL to update.
params (Mapping[str, str]): A mapping of query parameter
keys to values.
remove (Sequence[str]): Parameters to remove from the query string.
Returns:
str: The URL with updated query parameters.
Examples:
>>> url = 'http://example.com?a=1'
>>> update_query(url, {'a': '2'})
http://example.com?a=2
>>> update_query(url, {'b': '3'})
http://example.com?a=1&b=3
>> update_query(url, {'b': '3'}, remove=['a'])
http://example.com?b=3
"""
if remove is None:
remove = []
# Split the URL into parts.
parts = urllib.parse.urlparse(url)
# Parse the query string.
query_params = urllib.parse.parse_qs(parts.query)
# Update the query parameters with the new parameters.
query_params.update(params)
# Remove any values specified in remove.
query_params = {
key: value for key, value in query_params.items() if key not in remove
}
# Re-encoded the query string.
new_query = urllib.parse.urlencode(query_params, doseq=True)
# Unsplit the url.
new_parts = parts._replace(query=new_query)
return urllib.parse.urlunparse(new_parts)
def scopes_to_string(scopes):
"""Converts scope value to a string suitable for sending to OAuth 2.0
authorization servers.
Args:
scopes (Sequence[str]): The sequence of scopes to convert.
Returns:
str: The scopes formatted as a single string.
"""
return " ".join(scopes)
def string_to_scopes(scopes):
"""Converts stringifed scopes value to a list.
Args:
scopes (Union[Sequence, str]): The string of space-separated scopes
to convert.
Returns:
Sequence(str): The separated scopes.
"""
if not scopes:
return []
return scopes.split(" ")
def padded_urlsafe_b64decode(value):
"""Decodes base64 strings lacking padding characters.
Google infrastructure tends to omit the base64 padding characters.
Args:
value (Union[str, bytes]): The encoded value.
Returns:
bytes: The decoded value
"""
b64string = to_bytes(value)
padded = b64string + b"=" * (-len(b64string) % 4)
return base64.urlsafe_b64decode(padded)
def unpadded_urlsafe_b64encode(value):
"""Encodes base64 strings removing any padding characters.
`rfc 7515`_ defines Base64url to NOT include any padding
characters, but the stdlib doesn't do that by default.
_rfc7515: https://tools.ietf.org/html/rfc7515#page-6
Args:
value (Union[str|bytes]): The bytes-like value to encode
Returns:
Union[str|bytes]: The encoded value
"""
return base64.urlsafe_b64encode(value).rstrip(b"=")
def get_bool_from_env(variable_name, default=False):
"""Gets a boolean value from an environment variable.
The environment variable is interpreted as a boolean with the following
(case-insensitive) rules:
- "true", "1" are considered true.
- "false", "0" are considered false.
Any other values will raise an exception.
Args:
variable_name (str): The name of the environment variable.
default (bool): The default value if the environment variable is not
set.
Returns:
bool: The boolean value of the environment variable.
Raises:
google.auth.exceptions.InvalidValue: If the environment variable is
set to a value that can not be interpreted as a boolean.
"""
value = os.environ.get(variable_name)
if value is None:
return default
value = value.lower()
if value in ("true", "1"):
return True
elif value in ("false", "0"):
return False
else:
raise exceptions.InvalidValue(
'Environment variable "{}" must be one of "true", "false", "1", or "0".'.format(
variable_name
)
)
def is_python_3():
"""Check if the Python interpreter is Python 2 or 3.
Returns:
bool: True if the Python interpreter is Python 3 and False otherwise.
"""
return sys.version_info > (3, 0) # pragma: NO COVER
def _hash_sensitive_info(data: Union[dict, list]) -> Union[dict, list, str]:
"""
Hashes sensitive information within a dictionary.
Args:
data: The dictionary containing data to be processed.
Returns:
A new dictionary with sensitive values replaced by their SHA512 hashes.
If the input is a list, returns a list with each element recursively processed.
If the input is neither a dict nor a list, returns the type of the input as a string.
"""
if isinstance(data, dict):
hashed_data: Dict[Any, Union[Optional[str], dict, list]] = {}
for key, value in data.items():
if key in _SENSITIVE_FIELDS and not isinstance(value, (dict, list)):
hashed_data[key] = _hash_value(value, key)
elif isinstance(value, (dict, list)):
hashed_data[key] = _hash_sensitive_info(value)
else:
hashed_data[key] = value
return hashed_data
elif isinstance(data, list):
hashed_list = []
for val in data:
hashed_list.append(_hash_sensitive_info(val))
return hashed_list
else:
# TODO(https://github.com/googleapis/google-auth-library-python/issues/1701):
# Investigate and hash sensitive info before logging when the data type is
# not a dict or a list.
return str(type(data))
def _hash_value(value, field_name: str) -> Optional[str]:
"""Hashes a value and returns a formatted hash string."""
if value is None:
return None
encoded_value = str(value).encode("utf-8")
hash_object = hashlib.sha512()
hash_object.update(encoded_value)
hex_digest = hash_object.hexdigest()
return f"hashed_{field_name}-{hex_digest}"
def _logger_configured(logger: logging.Logger) -> bool:
"""Determines whether `logger` has non-default configuration
Args:
logger: The logger to check.
Returns:
bool: Whether the logger has any non-default configuration.
"""
return (
logger.handlers != [] or logger.level != logging.NOTSET or not logger.propagate
)
def is_logging_enabled(logger: logging.Logger) -> bool:
"""
Checks if debug logging is enabled for the given logger.
Args:
logger: The logging.Logger instance to check.
Returns:
True if debug logging is enabled, False otherwise.
"""
# NOTE: Log propagation to the root logger is disabled unless
# the base logger i.e. logging.getLogger("google") is
# explicitly configured by the end user. Ideally this
# needs to happen in the client layer (already does for GAPICs).
# However, this is implemented here to avoid logging
# (if a root logger is configured) when a version of google-auth
# which supports logging is used with:
# - an older version of a GAPIC which does not support logging.
# - Apiary client which does not support logging.
global _LOGGING_INITIALIZED
if not _LOGGING_INITIALIZED:
base_logger = logging.getLogger(_BASE_LOGGER_NAME)
if not _logger_configured(base_logger):
base_logger.propagate = False
_LOGGING_INITIALIZED = True
return logger.isEnabledFor(logging.DEBUG)
def request_log(
logger: logging.Logger,
method: str,
url: str,
body: Optional[bytes],
headers: Optional[Mapping[str, str]],
) -> None:
"""
Logs an HTTP request at the DEBUG level if logging is enabled.
Args:
logger: The logging.Logger instance to use.
method: The HTTP method (e.g., "GET", "POST").
url: The URL of the request.
body: The request body (can be None).
headers: The request headers (can be None).
"""
if is_logging_enabled(logger):
content_type = (
headers["Content-Type"] if headers and "Content-Type" in headers else ""
)
json_body = _parse_request_body(body, content_type=content_type)
logged_body = _hash_sensitive_info(json_body)
logger.debug(
"Making request...",
extra={
"httpRequest": {
"method": method,
"url": url,
"body": logged_body,
"headers": headers,
}
},
)
def _parse_request_body(body: Optional[bytes], content_type: str = "") -> Any:
"""
Parses a request body, handling bytes and string types, and different content types.
Args:
body (Optional[bytes]): The request body.
content_type (str): The content type of the request body, e.g., "application/json",
"application/x-www-form-urlencoded", or "text/plain". If empty, attempts
to parse as JSON.
Returns:
Parsed body (dict, str, or None).
- JSON: Decodes if content_type is "application/json" or None (fallback).
- URL-encoded: Parses if content_type is "application/x-www-form-urlencoded".
- Plain text: Returns string if content_type is "text/plain".
- None: Returns if body is None, UTF-8 decode fails, or content_type is unknown.
"""
if body is None:
return None
try:
body_str = body.decode("utf-8")
except (UnicodeDecodeError, AttributeError):
return None
content_type = content_type.lower()
if not content_type or "application/json" in content_type:
try:
return json.loads(body_str)
except (TypeError, ValueError):
return body_str
if "application/x-www-form-urlencoded" in content_type:
parsed_query = urllib.parse.parse_qs(body_str)
result = {k: v[0] for k, v in parsed_query.items()}
return result
if "text/plain" in content_type:
return body_str
return None
def _parse_response(response: Any) -> Any:
"""
Parses a response, attempting to decode JSON.
Args:
response: The response object to parse. This can be any type, but
it is expected to have a `json()` method if it contains JSON.
Returns:
The parsed response. If the response contains valid JSON, the
decoded JSON object (e.g., a dictionary or list) is returned.
If the response does not have a `json()` method or if the JSON
decoding fails, None is returned.
"""
try:
json_response = response.json()
return json_response
except Exception:
# TODO(https://github.com/googleapis/google-auth-library-python/issues/1744):
# Parse and return response payload as json based on different content types.
return None
def _response_log_base(logger: logging.Logger, parsed_response: Any) -> None:
"""
Logs a parsed HTTP response at the DEBUG level.
This internal helper function takes a parsed response and logs it
using the provided logger. It also applies a hashing function to
potentially sensitive information before logging.
Args:
logger: The logging.Logger instance to use for logging.
parsed_response: The parsed HTTP response object (e.g., a dictionary,
list, or the original response if parsing failed).
"""
logged_response = _hash_sensitive_info(parsed_response)
logger.debug("Response received...", extra={"httpResponse": logged_response})
def response_log(logger: logging.Logger, response: Any) -> None:
"""
Logs an HTTP response at the DEBUG level if logging is enabled.
Args:
logger: The logging.Logger instance to use.
response: The HTTP response object to log.
"""
if is_logging_enabled(logger):
json_response = _parse_response(response)
_response_log_base(logger, json_response)

View File

@@ -0,0 +1,164 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""JSON Web Tokens
Provides support for creating (encoding) and verifying (decoding) JWTs,
especially JWTs generated and consumed by Google infrastructure.
See `rfc7519`_ for more details on JWTs.
To encode a JWT use :func:`encode`::
from google.auth import crypt
from google.auth import jwt_async
signer = crypt.Signer(private_key)
payload = {'some': 'payload'}
encoded = jwt_async.encode(signer, payload)
To decode a JWT and verify claims use :func:`decode`::
claims = jwt_async.decode(encoded, certs=public_certs)
You can also skip verification::
claims = jwt_async.decode(encoded, verify=False)
.. _rfc7519: https://tools.ietf.org/html/rfc7519
NOTE: This async support is experimental and marked internal. This surface may
change in minor releases.
"""
from google.auth import _credentials_async
from google.auth import jwt
def encode(signer, payload, header=None, key_id=None):
"""Make a signed JWT.
Args:
signer (google.auth.crypt.Signer): The signer used to sign the JWT.
payload (Mapping[str, str]): The JWT payload.
header (Mapping[str, str]): Additional JWT header payload.
key_id (str): The key id to add to the JWT header. If the
signer has a key id it will be used as the default. If this is
specified it will override the signer's key id.
Returns:
bytes: The encoded JWT.
"""
return jwt.encode(signer, payload, header, key_id)
def decode(token, certs=None, verify=True, audience=None):
"""Decode and verify a JWT.
Args:
token (str): The encoded JWT.
certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The
certificate used to validate the JWT signature. If bytes or string,
it must the the public key certificate in PEM format. If a mapping,
it must be a mapping of key IDs to public key certificates in PEM
format. The mapping must contain the same key ID that's specified
in the token's header.
verify (bool): Whether to perform signature and claim validation.
Verification is done by default.
audience (str): The audience claim, 'aud', that this JWT should
contain. If None then the JWT's 'aud' parameter is not verified.
Returns:
Mapping[str, str]: The deserialized JSON payload in the JWT.
Raises:
ValueError: if any verification checks failed.
"""
return jwt.decode(token, certs, verify, audience)
class Credentials(
jwt.Credentials, _credentials_async.Signing, _credentials_async.Credentials
):
"""Credentials that use a JWT as the bearer token.
These credentials require an "audience" claim. This claim identifies the
intended recipient of the bearer token.
The constructor arguments determine the claims for the JWT that is
sent with requests. Usually, you'll construct these credentials with
one of the helper constructors as shown in the next section.
To create JWT credentials using a Google service account private key
JSON file::
audience = 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher'
credentials = jwt_async.Credentials.from_service_account_file(
'service-account.json',
audience=audience)
If you already have the service account file loaded and parsed::
service_account_info = json.load(open('service_account.json'))
credentials = jwt_async.Credentials.from_service_account_info(
service_account_info,
audience=audience)
Both helper methods pass on arguments to the constructor, so you can
specify the JWT claims::
credentials = jwt_async.Credentials.from_service_account_file(
'service-account.json',
audience=audience,
additional_claims={'meta': 'data'})
You can also construct the credentials directly if you have a
:class:`~google.auth.crypt.Signer` instance::
credentials = jwt_async.Credentials(
signer,
issuer='your-issuer',
subject='your-subject',
audience=audience)
The claims are considered immutable. If you want to modify the claims,
you can easily create another instance using :meth:`with_claims`::
new_audience = (
'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber')
new_credentials = credentials.with_claims(audience=new_audience)
"""
class OnDemandCredentials(
jwt.OnDemandCredentials, _credentials_async.Signing, _credentials_async.Credentials
):
"""On-demand JWT credentials.
Like :class:`Credentials`, this class uses a JWT as the bearer token for
authentication. However, this class does not require the audience at
construction time. Instead, it will generate a new token on-demand for
each request using the request URI as the audience. It caches tokens
so that multiple requests to the same URI do not incur the overhead
of generating a new token every time.
This behavior is especially useful for `gRPC`_ clients. A gRPC service may
have multiple audience and gRPC clients may not know all of the audiences
required for accessing a particular service. With these credentials,
no knowledge of the audiences is required ahead of time.
.. _grpc: http://www.grpc.io/
"""

View File

@@ -0,0 +1,167 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for transitioning from oauth2client to google-auth.
.. warning::
This module is private as it is intended to assist first-party downstream
clients with the transition from oauth2client to google-auth.
"""
from __future__ import absolute_import
from google.auth import _helpers
import google.auth.app_engine
import google.auth.compute_engine
import google.oauth2.credentials
import google.oauth2.service_account
try:
import oauth2client.client # type: ignore
import oauth2client.contrib.gce # type: ignore
import oauth2client.service_account # type: ignore
except ImportError as caught_exc:
raise ImportError("oauth2client is not installed.") from caught_exc
try:
import oauth2client.contrib.appengine # type: ignore
_HAS_APPENGINE = True
except ImportError:
_HAS_APPENGINE = False
_CONVERT_ERROR_TMPL = "Unable to convert {} to a google-auth credentials class."
def _convert_oauth2_credentials(credentials):
"""Converts to :class:`google.oauth2.credentials.Credentials`.
Args:
credentials (Union[oauth2client.client.OAuth2Credentials,
oauth2client.client.GoogleCredentials]): The credentials to
convert.
Returns:
google.oauth2.credentials.Credentials: The converted credentials.
"""
new_credentials = google.oauth2.credentials.Credentials(
token=credentials.access_token,
refresh_token=credentials.refresh_token,
token_uri=credentials.token_uri,
client_id=credentials.client_id,
client_secret=credentials.client_secret,
scopes=credentials.scopes,
)
new_credentials._expires = credentials.token_expiry
return new_credentials
def _convert_service_account_credentials(credentials):
"""Converts to :class:`google.oauth2.service_account.Credentials`.
Args:
credentials (Union[
oauth2client.service_account.ServiceAccountCredentials,
oauth2client.service_account._JWTAccessCredentials]): The
credentials to convert.
Returns:
google.oauth2.service_account.Credentials: The converted credentials.
"""
info = credentials.serialization_data.copy()
info["token_uri"] = credentials.token_uri
return google.oauth2.service_account.Credentials.from_service_account_info(info)
def _convert_gce_app_assertion_credentials(credentials):
"""Converts to :class:`google.auth.compute_engine.Credentials`.
Args:
credentials (oauth2client.contrib.gce.AppAssertionCredentials): The
credentials to convert.
Returns:
google.oauth2.service_account.Credentials: The converted credentials.
"""
return google.auth.compute_engine.Credentials(
service_account_email=credentials.service_account_email
)
def _convert_appengine_app_assertion_credentials(credentials):
"""Converts to :class:`google.auth.app_engine.Credentials`.
Args:
credentials (oauth2client.contrib.app_engine.AppAssertionCredentials):
The credentials to convert.
Returns:
google.oauth2.service_account.Credentials: The converted credentials.
"""
# pylint: disable=invalid-name
return google.auth.app_engine.Credentials(
scopes=_helpers.string_to_scopes(credentials.scope),
service_account_id=credentials.service_account_id,
)
_CLASS_CONVERSION_MAP = {
oauth2client.client.OAuth2Credentials: _convert_oauth2_credentials,
oauth2client.client.GoogleCredentials: _convert_oauth2_credentials,
oauth2client.service_account.ServiceAccountCredentials: _convert_service_account_credentials,
oauth2client.service_account._JWTAccessCredentials: _convert_service_account_credentials,
oauth2client.contrib.gce.AppAssertionCredentials: _convert_gce_app_assertion_credentials,
}
if _HAS_APPENGINE: # pragma: no cover
_CLASS_CONVERSION_MAP[
oauth2client.contrib.appengine.AppAssertionCredentials
] = _convert_appengine_app_assertion_credentials
def convert(credentials):
"""Convert oauth2client credentials to google-auth credentials.
This class converts:
- :class:`oauth2client.client.OAuth2Credentials` to
:class:`google.oauth2.credentials.Credentials`.
- :class:`oauth2client.client.GoogleCredentials` to
:class:`google.oauth2.credentials.Credentials`.
- :class:`oauth2client.service_account.ServiceAccountCredentials` to
:class:`google.oauth2.service_account.Credentials`.
- :class:`oauth2client.service_account._JWTAccessCredentials` to
:class:`google.oauth2.service_account.Credentials`.
- :class:`oauth2client.contrib.gce.AppAssertionCredentials` to
:class:`google.auth.compute_engine.Credentials`.
- :class:`oauth2client.contrib.appengine.AppAssertionCredentials` to
:class:`google.auth.app_engine.Credentials`.
Returns:
google.auth.credentials.Credentials: The converted credentials.
Raises:
ValueError: If the credentials could not be converted.
"""
credentials_class = type(credentials)
try:
return _CLASS_CONVERSION_MAP[credentials_class](credentials)
except KeyError as caught_exc:
new_exc = ValueError(_CONVERT_ERROR_TMPL.format(credentials_class))
raise new_exc from caught_exc

View File

@@ -0,0 +1,109 @@
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import logging
import threading
import google.auth.exceptions as e
_LOGGER = logging.getLogger(__name__)
class RefreshThreadManager:
"""
Organizes exactly one background job that refresh a token.
"""
def __init__(self):
"""Initializes the manager."""
self._worker = None
self._lock = threading.Lock() # protects access to worker threads.
def start_refresh(self, cred, request):
"""Starts a refresh thread for the given credentials.
The credentials are refreshed using the request parameter.
request and cred MUST not be None
Returns True if a background refresh was kicked off. False otherwise.
Args:
cred: A credentials object.
request: A request object.
Returns:
bool
"""
if cred is None or request is None:
raise e.InvalidValue(
"Unable to start refresh. cred and request must be valid and instantiated objects."
)
with self._lock:
if self._worker is not None and self._worker._error_info is not None:
return False
if self._worker is None or not self._worker.is_alive(): # pragma: NO COVER
self._worker = RefreshThread(cred=cred, request=copy.deepcopy(request))
self._worker.start()
return True
def clear_error(self):
"""
Removes any errors that were stored from previous background refreshes.
"""
with self._lock:
if self._worker:
self._worker._error_info = None
def __getstate__(self):
"""Pickle helper that serializes the _lock attribute."""
state = self.__dict__.copy()
state["_lock"] = None
return state
def __setstate__(self, state):
"""Pickle helper that deserializes the _lock attribute."""
state["_lock"] = threading.Lock()
self.__dict__.update(state)
class RefreshThread(threading.Thread):
"""
Thread that refreshes credentials.
"""
def __init__(self, cred, request, **kwargs):
"""Initializes the thread.
Args:
cred: A Credential object to refresh.
request: A Request object used to perform a credential refresh.
**kwargs: Additional keyword arguments.
"""
super().__init__(**kwargs)
self._cred = cred
self._request = request
self._error_info = None
def run(self):
"""
Perform the credential refresh.
"""
try:
self._cred.refresh(self._request)
except Exception as err: # pragma: NO COVER
_LOGGER.error(f"Background refresh failed due to: {err}")
self._error_info = err

View File

@@ -0,0 +1,80 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for loading data from a Google service account file."""
import io
import json
from google.auth import crypt
from google.auth import exceptions
def from_dict(data, require=None, use_rsa_signer=True):
"""Validates a dictionary containing Google service account data.
Creates and returns a :class:`google.auth.crypt.Signer` instance from the
private key specified in the data.
Args:
data (Mapping[str, str]): The service account data
require (Sequence[str]): List of keys required to be present in the
info.
use_rsa_signer (Optional[bool]): Whether to use RSA signer or EC signer.
We use RSA signer by default.
Returns:
google.auth.crypt.Signer: A signer created from the private key in the
service account file.
Raises:
MalformedError: if the data was in the wrong format, or if one of the
required keys is missing.
"""
keys_needed = set(require if require is not None else [])
missing = keys_needed.difference(data.keys())
if missing:
raise exceptions.MalformedError(
"Service account info was not in the expected format, missing "
"fields {}.".format(", ".join(missing))
)
# Create a signer.
if use_rsa_signer:
signer = crypt.RSASigner.from_service_account_info(data)
else:
signer = crypt.EsSigner.from_service_account_info(data)
return signer
def from_filename(filename, require=None, use_rsa_signer=True):
"""Reads a Google service account JSON file and returns its parsed info.
Args:
filename (str): The path to the service account .json file.
require (Sequence[str]): List of keys required to be present in the
info.
use_rsa_signer (Optional[bool]): Whether to use RSA signer or EC signer.
We use RSA signer by default.
Returns:
Tuple[ Mapping[str, str], google.auth.crypt.Signer ]: The verified
info and a signer instance.
"""
with io.open(filename, "r", encoding="utf-8") as json_file:
data = json.load(json_file)
return data, from_dict(data, require=require, use_rsa_signer=use_rsa_signer)

View File

@@ -0,0 +1,25 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Auth AIO Library for Python."""
import logging
from google.auth import version as google_auth_version
__version__ = google_auth_version.__version__
# Set default logging handler to avoid "No handler found" warnings.
logging.getLogger(__name__).addHandler(logging.NullHandler())

View File

@@ -0,0 +1,62 @@
# Copyright 2025 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for commonly used utilities."""
import logging
from typing import Any
from google.auth import _helpers
async def _parse_response_async(response: Any) -> Any:
"""
Parses an async response, attempting to decode JSON.
Args:
response: The response object to parse. This can be any type, but
it is expected to have a `json()` method if it contains JSON.
Returns:
The parsed response. If the response contains valid JSON, the
decoded JSON object (e.g., a dictionary) is returned.
If the response does not have a `json()` method or if the JSON
decoding fails, None is returned.
"""
try:
json_response = await response.json()
return json_response
except Exception:
# TODO(https://github.com/googleapis/google-auth-library-python/issues/1745):
# Parse and return response payload as json based on different content types.
return None
async def response_log_async(logger: logging.Logger, response: Any) -> None:
"""
Logs an Async HTTP response at the DEBUG level if logging is enabled.
Args:
logger: The logging.Logger instance to use.
response: The HTTP response object to log.
"""
if _helpers.is_logging_enabled(logger):
# TODO(https://github.com/googleapis/google-auth-library-python/issues/1755):
# Parsing the response for async streaming logging results in
# the stream to be empty downstream. For now, we will not be logging
# the response for async responses until we investigate further.
# json_response = await _parse_response_async(response)
json_response = None
_helpers._response_log_base(logger, json_response)

View File

@@ -0,0 +1,143 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interfaces for asynchronous credentials."""
from google.auth import _helpers
from google.auth import exceptions
from google.auth._credentials_base import _BaseCredentials
class Credentials(_BaseCredentials):
"""Base class for all asynchronous credentials.
All credentials have a :attr:`token` that is used for authentication and
may also optionally set an :attr:`expiry` to indicate when the token will
no longer be valid.
Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
Credentials can do this automatically before the first HTTP request in
:meth:`before_request`.
Although the token and expiration will change as the credentials are
:meth:`refreshed <refresh>` and used, credentials should be considered
immutable. Various credentials will accept configuration such as private
keys, scopes, and other options. These options are not changeable after
construction. Some classes will provide mechanisms to copy the credentials
with modifications such as :meth:`ScopedCredentials.with_scopes`.
"""
def __init__(self):
super(Credentials, self).__init__()
async def apply(self, headers, token=None):
"""Apply the token to the authentication header.
Args:
headers (Mapping): The HTTP request headers.
token (Optional[str]): If specified, overrides the current access
token.
"""
self._apply(headers, token=token)
async def refresh(self, request):
"""Refreshes the access token.
Args:
request (google.auth.aio.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the credentials could
not be refreshed.
"""
raise NotImplementedError("Refresh must be implemented")
async def before_request(self, request, method, url, headers):
"""Performs credential-specific before request logic.
Refreshes the credentials if necessary, then calls :meth:`apply` to
apply the token to the authentication header.
Args:
request (google.auth.aio.transport.Request): The object used to make
HTTP requests.
method (str): The request's HTTP method or the RPC method being
invoked.
url (str): The request's URI or the RPC service's URI.
headers (Mapping): The request's headers.
"""
await self.apply(headers)
class StaticCredentials(Credentials):
"""Asynchronous Credentials representing an immutable access token.
The credentials are considered immutable except the tokens which can be
configured in the constructor ::
credentials = StaticCredentials(token="token123")
StaticCredentials does not support :meth `refresh` and assumes that the configured
token is valid and not expired. StaticCredentials will never attempt to
refresh the token.
"""
def __init__(self, token):
"""
Args:
token (str): The access token.
"""
super(StaticCredentials, self).__init__()
self.token = token
@_helpers.copy_docstring(Credentials)
async def refresh(self, request):
raise exceptions.InvalidOperation("Static credentials cannot be refreshed.")
# Note: before_request should never try to refresh access tokens.
# StaticCredentials intentionally does not support it.
@_helpers.copy_docstring(Credentials)
async def before_request(self, request, method, url, headers):
await self.apply(headers)
class AnonymousCredentials(Credentials):
"""Asynchronous Credentials that do not provide any authentication information.
These are useful in the case of services that support anonymous access or
local service emulators that do not use credentials.
"""
async def refresh(self, request):
"""Raises :class:``InvalidOperation``, anonymous credentials cannot be
refreshed."""
raise exceptions.InvalidOperation("Anonymous credentials cannot be refreshed.")
async def apply(self, headers, token=None):
"""Anonymous credentials do nothing to the request.
The optional ``token`` argument is not supported.
Raises:
google.auth.exceptions.InvalidValue: If a token was specified.
"""
if token is not None:
raise exceptions.InvalidValue("Anonymous credentials don't support tokens.")
async def before_request(self, request, method, url, headers):
"""Anonymous credentials do nothing to the request."""
pass

View File

@@ -0,0 +1,144 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport - Asynchronous HTTP client library support.
:mod:`google.auth.aio` is designed to work with various asynchronous client libraries such
as aiohttp. In order to work across these libraries with different
interfaces some abstraction is needed.
This module provides two interfaces that are implemented by transport adapters
to support HTTP libraries. :class:`Request` defines the interface expected by
:mod:`google.auth` to make asynchronous requests. :class:`Response` defines the interface
for the return value of :class:`Request`.
"""
import abc
from typing import AsyncGenerator, Mapping, Optional
import google.auth.transport
_DEFAULT_TIMEOUT_SECONDS = 180
DEFAULT_RETRYABLE_STATUS_CODES = google.auth.transport.DEFAULT_RETRYABLE_STATUS_CODES
"""Sequence[int]: HTTP status codes indicating a request can be retried.
"""
DEFAULT_MAX_RETRY_ATTEMPTS = 3
"""int: How many times to retry a request."""
class Response(metaclass=abc.ABCMeta):
"""Asynchronous HTTP Response Interface."""
@property
@abc.abstractmethod
def status_code(self) -> int:
"""
The HTTP response status code.
Returns:
int: The HTTP response status code.
"""
raise NotImplementedError("status_code must be implemented.")
@property
@abc.abstractmethod
def headers(self) -> Mapping[str, str]:
"""The HTTP response headers.
Returns:
Mapping[str, str]: The HTTP response headers.
"""
raise NotImplementedError("headers must be implemented.")
@abc.abstractmethod
async def content(self, chunk_size: int) -> AsyncGenerator[bytes, None]:
"""The raw response content.
Args:
chunk_size (int): The size of each chunk.
Yields:
AsyncGenerator[bytes, None]: An asynchronous generator yielding
response chunks as bytes.
"""
raise NotImplementedError("content must be implemented.")
@abc.abstractmethod
async def read(self) -> bytes:
"""Read the entire response content as bytes.
Returns:
bytes: The entire response content.
"""
raise NotImplementedError("read must be implemented.")
@abc.abstractmethod
async def close(self):
"""Close the response after it is fully consumed to resource."""
raise NotImplementedError("close must be implemented.")
class Request(metaclass=abc.ABCMeta):
"""Interface for a callable that makes HTTP requests.
Specific transport implementations should provide an implementation of
this that adapts their specific request / response API.
.. automethod:: __call__
"""
@abc.abstractmethod
async def __call__(
self,
url: str,
method: str,
body: Optional[bytes],
headers: Optional[Mapping[str, str]],
timeout: float,
**kwargs
) -> Response:
"""Make an HTTP request.
Args:
url (str): The URI to be requested.
method (str): The HTTP method to use for the request. Defaults
to 'GET'.
body (Optional[bytes]): The payload / body in HTTP request.
headers (Mapping[str, str]): Request headers.
timeout (float): The number of seconds to wait for a
response from the server. If not specified or if None, the
transport-specific default timeout will be used.
kwargs: Additional arguments passed on to the transport's
request method.
Returns:
google.auth.aio.transport.Response: The HTTP response.
Raises:
google.auth.exceptions.TransportError: If any exception occurred.
"""
# pylint: disable=redundant-returns-doc, missing-raises-doc
# (pylint doesn't play well with abstract docstrings.)
raise NotImplementedError("__call__ must be implemented.")
async def close(self) -> None:
"""
Close the underlying session.
"""
raise NotImplementedError("close must be implemented.")

View File

@@ -0,0 +1,190 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport adapter for Asynchronous HTTP Requests based on aiohttp.
"""
import asyncio
import logging
from typing import AsyncGenerator, Mapping, Optional
try:
import aiohttp # type: ignore
except ImportError as caught_exc: # pragma: NO COVER
raise ImportError(
"The aiohttp library is not installed from please install the aiohttp package to use the aiohttp transport."
) from caught_exc
from google.auth import _helpers
from google.auth import exceptions
from google.auth.aio import _helpers as _helpers_async
from google.auth.aio import transport
_LOGGER = logging.getLogger(__name__)
class Response(transport.Response):
"""
Represents an HTTP response and its data. It is returned by ``google.auth.aio.transport.sessions.AsyncAuthorizedSession``.
Args:
response (aiohttp.ClientResponse): An instance of aiohttp.ClientResponse.
Attributes:
status_code (int): The HTTP status code of the response.
headers (Mapping[str, str]): The HTTP headers of the response.
"""
def __init__(self, response: aiohttp.ClientResponse):
self._response = response
@property
@_helpers.copy_docstring(transport.Response)
def status_code(self) -> int:
return self._response.status
@property
@_helpers.copy_docstring(transport.Response)
def headers(self) -> Mapping[str, str]:
return {key: value for key, value in self._response.headers.items()}
@_helpers.copy_docstring(transport.Response)
async def content(self, chunk_size: int = 1024) -> AsyncGenerator[bytes, None]:
try:
async for chunk in self._response.content.iter_chunked(
chunk_size
): # pragma: no branch
yield chunk
except aiohttp.ClientPayloadError as exc:
raise exceptions.ResponseError(
"Failed to read from the payload stream."
) from exc
@_helpers.copy_docstring(transport.Response)
async def read(self) -> bytes:
try:
return await self._response.read()
except aiohttp.ClientResponseError as exc:
raise exceptions.ResponseError("Failed to read the response body.") from exc
@_helpers.copy_docstring(transport.Response)
async def close(self):
self._response.close()
class Request(transport.Request):
"""Asynchronous Requests request adapter.
This class is used internally for making requests using aiohttp
in a consistent way. If you use :class:`google.auth.aio.transport.sessions.AsyncAuthorizedSession`
you do not need to construct or use this class directly.
This class can be useful if you want to configure a Request callable
with a custom ``aiohttp.ClientSession`` in :class:`AuthorizedSession` or if
you want to manually refresh a :class:`~google.auth.aio.credentials.Credentials` instance::
import aiohttp
import google.auth.aio.transport.aiohttp
# Default example:
request = google.auth.aio.transport.aiohttp.Request()
await credentials.refresh(request)
# Custom aiohttp Session Example:
session = session=aiohttp.ClientSession(auto_decompress=False)
request = google.auth.aio.transport.aiohttp.Request(session=session)
auth_session = google.auth.aio.transport.sessions.AsyncAuthorizedSession(auth_request=request)
Args:
session (aiohttp.ClientSession): An instance :class:`aiohttp.ClientSession` used
to make HTTP requests. If not specified, a session will be created.
.. automethod:: __call__
"""
def __init__(self, session: aiohttp.ClientSession = None):
self._session = session
self._closed = False
async def __call__(
self,
url: str,
method: str = "GET",
body: Optional[bytes] = None,
headers: Optional[Mapping[str, str]] = None,
timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
**kwargs,
) -> transport.Response:
"""
Make an HTTP request using aiohttp.
Args:
url (str): The URL to be requested.
method (Optional[str]):
The HTTP method to use for the request. Defaults to 'GET'.
body (Optional[bytes]):
The payload or body in HTTP request.
headers (Optional[Mapping[str, str]]):
Request headers.
timeout (float): The number of seconds to wait for a
response from the server. If not specified or if None, the
requests default timeout will be used.
kwargs: Additional arguments passed through to the underlying
aiohttp :meth:`aiohttp.Session.request` method.
Returns:
google.auth.aio.transport.Response: The HTTP response.
Raises:
- google.auth.exceptions.TransportError: If the request fails or if the session is closed.
- google.auth.exceptions.TimeoutError: If the request times out.
"""
try:
if self._closed:
raise exceptions.TransportError("session is closed.")
if not self._session:
self._session = aiohttp.ClientSession()
client_timeout = aiohttp.ClientTimeout(total=timeout)
_helpers.request_log(_LOGGER, method, url, body, headers)
response = await self._session.request(
method,
url,
data=body,
headers=headers,
timeout=client_timeout,
**kwargs,
)
await _helpers_async.response_log_async(_LOGGER, response)
return Response(response)
except aiohttp.ClientError as caught_exc:
client_exc = exceptions.TransportError(f"Failed to send request to {url}.")
raise client_exc from caught_exc
except asyncio.TimeoutError as caught_exc:
timeout_exc = exceptions.TimeoutError(
f"Request timed out after {timeout} seconds."
)
raise timeout_exc from caught_exc
async def close(self) -> None:
"""
Close the underlying aiohttp session to release the acquired resources.
"""
if not self._closed and self._session:
await self._session.close()
self._closed = True

View File

@@ -0,0 +1,268 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
from contextlib import asynccontextmanager
import functools
import time
from typing import Mapping, Optional
from google.auth import _exponential_backoff, exceptions
from google.auth.aio import transport
from google.auth.aio.credentials import Credentials
from google.auth.exceptions import TimeoutError
try:
from google.auth.aio.transport.aiohttp import Request as AiohttpRequest
AIOHTTP_INSTALLED = True
except ImportError: # pragma: NO COVER
AIOHTTP_INSTALLED = False
@asynccontextmanager
async def timeout_guard(timeout):
"""
timeout_guard is an asynchronous context manager to apply a timeout to an asynchronous block of code.
Args:
timeout (float): The time in seconds before the context manager times out.
Raises:
google.auth.exceptions.TimeoutError: If the code within the context exceeds the provided timeout.
Usage:
async with timeout_guard(10) as with_timeout:
await with_timeout(async_function())
"""
start = time.monotonic()
total_timeout = timeout
def _remaining_time():
elapsed = time.monotonic() - start
remaining = total_timeout - elapsed
if remaining <= 0:
raise TimeoutError(
f"Context manager exceeded the configured timeout of {total_timeout}s."
)
return remaining
async def with_timeout(coro):
try:
remaining = _remaining_time()
response = await asyncio.wait_for(coro, remaining)
return response
except (asyncio.TimeoutError, TimeoutError) as e:
raise TimeoutError(
f"The operation {coro} exceeded the configured timeout of {total_timeout}s."
) from e
try:
yield with_timeout
finally:
_remaining_time()
class AsyncAuthorizedSession:
"""This is an asynchronous implementation of :class:`google.auth.requests.AuthorizedSession` class.
We utilize an instance of a class that implements :class:`google.auth.aio.transport.Request` configured
by the caller or otherwise default to `google.auth.aio.transport.aiohttp.Request` if the external aiohttp
package is installed.
A Requests Session class with credentials.
This class is used to perform asynchronous requests to API endpoints that require
authorization::
import aiohttp
from google.auth.aio.transport import sessions
async with sessions.AsyncAuthorizedSession(credentials) as authed_session:
response = await authed_session.request(
'GET', 'https://www.googleapis.com/storage/v1/b')
The underlying :meth:`request` implementation handles adding the
credentials' headers to the request and refreshing credentials as needed.
Args:
credentials (google.auth.aio.credentials.Credentials):
The credentials to add to the request.
auth_request (Optional[google.auth.aio.transport.Request]):
An instance of a class that implements
:class:`~google.auth.aio.transport.Request` used to make requests
and refresh credentials. If not passed,
an instance of :class:`~google.auth.aio.transport.aiohttp.Request`
is created.
Raises:
- google.auth.exceptions.TransportError: If `auth_request` is `None`
and the external package `aiohttp` is not installed.
- google.auth.exceptions.InvalidType: If the provided credentials are
not of type `google.auth.aio.credentials.Credentials`.
"""
def __init__(
self, credentials: Credentials, auth_request: Optional[transport.Request] = None
):
if not isinstance(credentials, Credentials):
raise exceptions.InvalidType(
f"The configured credentials of type {type(credentials)} are invalid and must be of type `google.auth.aio.credentials.Credentials`"
)
self._credentials = credentials
_auth_request = auth_request
if not _auth_request and AIOHTTP_INSTALLED:
_auth_request = AiohttpRequest()
if _auth_request is None:
raise exceptions.TransportError(
"`auth_request` must either be configured or the external package `aiohttp` must be installed to use the default value."
)
self._auth_request = _auth_request
async def request(
self,
method: str,
url: str,
data: Optional[bytes] = None,
headers: Optional[Mapping[str, str]] = None,
max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
**kwargs,
) -> transport.Response:
"""
Args:
method (str): The http method used to make the request.
url (str): The URI to be requested.
data (Optional[bytes]): The payload or body in HTTP request.
headers (Optional[Mapping[str, str]]): Request headers.
timeout (float):
The amount of time in seconds to wait for the server response
with each individual request.
max_allowed_time (float):
If the method runs longer than this, a ``Timeout`` exception is
automatically raised. Unlike the ``timeout`` parameter, this
value applies to the total method execution time, even if
multiple requests are made under the hood.
Mind that it is not guaranteed that the timeout error is raised
at ``max_allowed_time``. It might take longer, for example, if
an underlying request takes a lot of time, but the request
itself does not timeout, e.g. if a large file is being
transmitted. The timeout error will be raised after such
request completes.
Returns:
google.auth.aio.transport.Response: The HTTP response.
Raises:
google.auth.exceptions.TimeoutError: If the method does not complete within
the configured `max_allowed_time` or the request exceeds the configured
`timeout`.
"""
retries = _exponential_backoff.AsyncExponentialBackoff(
total_attempts=transport.DEFAULT_MAX_RETRY_ATTEMPTS
)
async with timeout_guard(max_allowed_time) as with_timeout:
await with_timeout(
# Note: before_request will attempt to refresh credentials if expired.
self._credentials.before_request(
self._auth_request, method, url, headers
)
)
# Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch`
# See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372
async for _ in retries: # pragma: no branch
response = await with_timeout(
self._auth_request(url, method, data, headers, timeout, **kwargs)
)
if response.status_code not in transport.DEFAULT_RETRYABLE_STATUS_CODES:
break
return response
@functools.wraps(request)
async def get(
self,
url: str,
data: Optional[bytes] = None,
headers: Optional[Mapping[str, str]] = None,
max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
**kwargs,
) -> transport.Response:
return await self.request(
"GET", url, data, headers, max_allowed_time, timeout, **kwargs
)
@functools.wraps(request)
async def post(
self,
url: str,
data: Optional[bytes] = None,
headers: Optional[Mapping[str, str]] = None,
max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
**kwargs,
) -> transport.Response:
return await self.request(
"POST", url, data, headers, max_allowed_time, timeout, **kwargs
)
@functools.wraps(request)
async def put(
self,
url: str,
data: Optional[bytes] = None,
headers: Optional[Mapping[str, str]] = None,
max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
**kwargs,
) -> transport.Response:
return await self.request(
"PUT", url, data, headers, max_allowed_time, timeout, **kwargs
)
@functools.wraps(request)
async def patch(
self,
url: str,
data: Optional[bytes] = None,
headers: Optional[Mapping[str, str]] = None,
max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
**kwargs,
) -> transport.Response:
return await self.request(
"PATCH", url, data, headers, max_allowed_time, timeout, **kwargs
)
@functools.wraps(request)
async def delete(
self,
url: str,
data: Optional[bytes] = None,
headers: Optional[Mapping[str, str]] = None,
max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
**kwargs,
) -> transport.Response:
return await self.request(
"DELETE", url, data, headers, max_allowed_time, timeout, **kwargs
)
async def close(self) -> None:
"""
Close the underlying auth request session.
"""
await self._auth_request.close()

View File

@@ -0,0 +1,76 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google API key support.
This module provides authentication using the `API key`_.
.. _API key:
https://cloud.google.com/docs/authentication/api-keys/
"""
from google.auth import _helpers
from google.auth import credentials
from google.auth import exceptions
class Credentials(credentials.Credentials):
"""API key credentials.
These credentials use API key to provide authorization to applications.
"""
def __init__(self, token):
"""
Args:
token (str): API key string
Raises:
ValueError: If the provided API key is not a non-empty string.
"""
super(Credentials, self).__init__()
if not token:
raise exceptions.InvalidValue("Token must be a non-empty API key string")
self.token = token
@property
def expired(self):
return False
@property
def valid(self):
return True
@_helpers.copy_docstring(credentials.Credentials)
def refresh(self, request):
return
def apply(self, headers, token=None):
"""Apply the API key token to the x-goog-api-key header.
Args:
headers (Mapping): The HTTP request headers.
token (Optional[str]): If specified, overrides the current access
token.
"""
headers["x-goog-api-key"] = token or self.token
def before_request(self, request, method, url, headers):
"""Performs credential-specific before request logic.
Refreshes the credentials if necessary, then calls :meth:`apply` to
apply the token to the x-goog-api-key header.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
method (str): The request's HTTP method or the RPC method being
invoked.
url (str): The request's URI or the RPC service's URI.
headers (Mapping): The request's headers.
"""
self.apply(headers)

View File

@@ -0,0 +1,179 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google App Engine standard environment support.
This module provides authentication and signing for applications running on App
Engine in the standard environment using the `App Identity API`_.
.. _App Identity API:
https://cloud.google.com/appengine/docs/python/appidentity/
"""
from google.auth import _helpers
from google.auth import credentials
from google.auth import crypt
from google.auth import exceptions
# pytype: disable=import-error
try:
from google.appengine.api import app_identity # type: ignore
except ImportError:
app_identity = None # type: ignore
# pytype: enable=import-error
class Signer(crypt.Signer):
"""Signs messages using the App Engine App Identity service.
This can be used in place of :class:`google.auth.crypt.Signer` when
running in the App Engine standard environment.
"""
@property
def key_id(self):
"""Optional[str]: The key ID used to identify this private key.
.. warning::
This is always ``None``. The key ID used by App Engine can not
be reliably determined ahead of time.
"""
return None
@_helpers.copy_docstring(crypt.Signer)
def sign(self, message):
message = _helpers.to_bytes(message)
_, signature = app_identity.sign_blob(message)
return signature
def get_project_id():
"""Gets the project ID for the current App Engine application.
Returns:
str: The project ID
Raises:
google.auth.exceptions.OSError: If the App Engine APIs are unavailable.
"""
# pylint: disable=missing-raises-doc
# Pylint rightfully thinks google.auth.exceptions.OSError is OSError, but doesn't
# realize it's a valid alias.
if app_identity is None:
raise exceptions.OSError("The App Engine APIs are not available.")
return app_identity.get_application_id()
class Credentials(
credentials.Scoped, credentials.Signing, credentials.CredentialsWithQuotaProject
):
"""App Engine standard environment credentials.
These credentials use the App Engine App Identity API to obtain access
tokens.
"""
def __init__(
self,
scopes=None,
default_scopes=None,
service_account_id=None,
quota_project_id=None,
):
"""
Args:
scopes (Sequence[str]): Scopes to request from the App Identity
API.
default_scopes (Sequence[str]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
service_account_id (str): The service account ID passed into
:func:`google.appengine.api.app_identity.get_access_token`.
If not specified, the default application service account
ID will be used.
quota_project_id (Optional[str]): The project ID used for quota
and billing.
Raises:
google.auth.exceptions.OSError: If the App Engine APIs are unavailable.
"""
# pylint: disable=missing-raises-doc
# Pylint rightfully thinks google.auth.exceptions.OSError is OSError, but doesn't
# realize it's a valid alias.
if app_identity is None:
raise exceptions.OSError("The App Engine APIs are not available.")
super(Credentials, self).__init__()
self._scopes = scopes
self._default_scopes = default_scopes
self._service_account_id = service_account_id
self._signer = Signer()
self._quota_project_id = quota_project_id
@_helpers.copy_docstring(credentials.Credentials)
def refresh(self, request):
scopes = self._scopes if self._scopes is not None else self._default_scopes
# pylint: disable=unused-argument
token, ttl = app_identity.get_access_token(scopes, self._service_account_id)
expiry = _helpers.utcfromtimestamp(ttl)
self.token, self.expiry = token, expiry
@property
def service_account_email(self):
"""The service account email."""
if self._service_account_id is None:
self._service_account_id = app_identity.get_service_account_name()
return self._service_account_id
@property
def requires_scopes(self):
"""Checks if the credentials requires scopes.
Returns:
bool: True if there are no scopes set otherwise False.
"""
return not self._scopes and not self._default_scopes
@_helpers.copy_docstring(credentials.Scoped)
def with_scopes(self, scopes, default_scopes=None):
return self.__class__(
scopes=scopes,
default_scopes=default_scopes,
service_account_id=self._service_account_id,
quota_project_id=self.quota_project_id,
)
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
return self.__class__(
scopes=self._scopes,
service_account_id=self._service_account_id,
quota_project_id=quota_project_id,
)
@_helpers.copy_docstring(credentials.Signing)
def sign_bytes(self, message):
return self._signer.sign(message)
@property # type: ignore
@_helpers.copy_docstring(credentials.Signing)
def signer_email(self):
return self.service_account_email
@property # type: ignore
@_helpers.copy_docstring(credentials.Signing)
def signer(self):
return self._signer

View File

@@ -0,0 +1,863 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AWS Credentials and AWS Signature V4 Request Signer.
This module provides credentials to access Google Cloud resources from Amazon
Web Services (AWS) workloads. These credentials are recommended over the
use of service account credentials in AWS as they do not involve the management
of long-live service account private keys.
AWS Credentials are initialized using external_account arguments which are
typically loaded from the external credentials JSON file.
This module also provides a definition for an abstract AWS security credentials supplier.
This supplier can be implemented to return valid AWS security credentials and an AWS region
and used to create AWS credentials. The credentials will then call the
supplier instead of using pre-defined methods such as calling the EC2 metadata endpoints.
This module also provides a basic implementation of the
`AWS Signature Version 4`_ request signing algorithm.
AWS Credentials use serialized signed requests to the
`AWS STS GetCallerIdentity`_ API that can be exchanged for Google access tokens
via the GCP STS endpoint.
.. _AWS Signature Version 4: https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
.. _AWS STS GetCallerIdentity: https://docs.aws.amazon.com/STS/latest/APIReference/API_GetCallerIdentity.html
"""
import abc
from dataclasses import dataclass
import hashlib
import hmac
import http.client as http_client
import json
import os
import posixpath
import re
from typing import Optional
import urllib
from urllib.parse import urljoin
from google.auth import _helpers
from google.auth import environment_vars
from google.auth import exceptions
from google.auth import external_account
# AWS Signature Version 4 signing algorithm identifier.
_AWS_ALGORITHM = "AWS4-HMAC-SHA256"
# The termination string for the AWS credential scope value as defined in
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
_AWS_REQUEST_TYPE = "aws4_request"
# The AWS authorization header name for the security session token if available.
_AWS_SECURITY_TOKEN_HEADER = "x-amz-security-token"
# The AWS authorization header name for the auto-generated date.
_AWS_DATE_HEADER = "x-amz-date"
# The default AWS regional credential verification URL.
_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = (
"https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15"
)
# IMDSV2 session token lifetime. This is set to a low value because the session token is used immediately.
_IMDSV2_SESSION_TOKEN_TTL_SECONDS = "300"
class RequestSigner(object):
"""Implements an AWS request signer based on the AWS Signature Version 4 signing
process.
https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
"""
def __init__(self, region_name):
"""Instantiates an AWS request signer used to compute authenticated signed
requests to AWS APIs based on the AWS Signature Version 4 signing process.
Args:
region_name (str): The AWS region to use.
"""
self._region_name = region_name
def get_request_options(
self,
aws_security_credentials,
url,
method,
request_payload="",
additional_headers={},
):
"""Generates the signed request for the provided HTTP request for calling
an AWS API. This follows the steps described at:
https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html
Args:
aws_security_credentials (AWSSecurityCredentials): The AWS security credentials.
url (str): The AWS service URL containing the canonical URI and
query string.
method (str): The HTTP method used to call this API.
request_payload (Optional[str]): The optional request payload if
available.
additional_headers (Optional[Mapping[str, str]]): The optional
additional headers needed for the requested AWS API.
Returns:
Mapping[str, str]: The AWS signed request dictionary object.
"""
additional_headers = additional_headers or {}
uri = urllib.parse.urlparse(url)
# Normalize the URL path. This is needed for the canonical_uri.
# os.path.normpath can't be used since it normalizes "/" paths
# to "\\" in Windows OS.
normalized_uri = urllib.parse.urlparse(
urljoin(url, posixpath.normpath(uri.path))
)
# Validate provided URL.
if not uri.hostname or uri.scheme != "https":
raise exceptions.InvalidResource("Invalid AWS service URL")
header_map = _generate_authentication_header_map(
host=uri.hostname,
canonical_uri=normalized_uri.path or "/",
canonical_querystring=_get_canonical_querystring(uri.query),
method=method,
region=self._region_name,
aws_security_credentials=aws_security_credentials,
request_payload=request_payload,
additional_headers=additional_headers,
)
headers = {
"Authorization": header_map.get("authorization_header"),
"host": uri.hostname,
}
# Add x-amz-date if available.
if "amz_date" in header_map:
headers[_AWS_DATE_HEADER] = header_map.get("amz_date")
# Append additional optional headers, eg. X-Amz-Target, Content-Type, etc.
for key in additional_headers:
headers[key] = additional_headers[key]
# Add session token if available.
if aws_security_credentials.session_token is not None:
headers[_AWS_SECURITY_TOKEN_HEADER] = aws_security_credentials.session_token
signed_request = {"url": url, "method": method, "headers": headers}
if request_payload:
signed_request["data"] = request_payload
return signed_request
def _get_canonical_querystring(query):
"""Generates the canonical query string given a raw query string.
Logic is based on
https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
Args:
query (str): The raw query string.
Returns:
str: The canonical query string.
"""
# Parse raw query string.
querystring = urllib.parse.parse_qs(query)
querystring_encoded_map = {}
for key in querystring:
quote_key = urllib.parse.quote(key, safe="-_.~")
# URI encode key.
querystring_encoded_map[quote_key] = []
for item in querystring[key]:
# For each key, URI encode all values for that key.
querystring_encoded_map[quote_key].append(
urllib.parse.quote(item, safe="-_.~")
)
# Sort values for each key.
querystring_encoded_map[quote_key].sort()
# Sort keys.
sorted_keys = list(querystring_encoded_map.keys())
sorted_keys.sort()
# Reconstruct the query string. Preserve keys with multiple values.
querystring_encoded_pairs = []
for key in sorted_keys:
for item in querystring_encoded_map[key]:
querystring_encoded_pairs.append("{}={}".format(key, item))
return "&".join(querystring_encoded_pairs)
def _sign(key, msg):
"""Creates the HMAC-SHA256 hash of the provided message using the provided
key.
Args:
key (str): The HMAC-SHA256 key to use.
msg (str): The message to hash.
Returns:
str: The computed hash bytes.
"""
return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()
def _get_signing_key(key, date_stamp, region_name, service_name):
"""Calculates the signing key used to calculate the signature for
AWS Signature Version 4 based on:
https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
Args:
key (str): The AWS secret access key.
date_stamp (str): The '%Y%m%d' date format.
region_name (str): The AWS region.
service_name (str): The AWS service name, eg. sts.
Returns:
str: The signing key bytes.
"""
k_date = _sign(("AWS4" + key).encode("utf-8"), date_stamp)
k_region = _sign(k_date, region_name)
k_service = _sign(k_region, service_name)
k_signing = _sign(k_service, "aws4_request")
return k_signing
def _generate_authentication_header_map(
host,
canonical_uri,
canonical_querystring,
method,
region,
aws_security_credentials,
request_payload="",
additional_headers={},
):
"""Generates the authentication header map needed for generating the AWS
Signature Version 4 signed request.
Args:
host (str): The AWS service URL hostname.
canonical_uri (str): The AWS service URL path name.
canonical_querystring (str): The AWS service URL query string.
method (str): The HTTP method used to call this API.
region (str): The AWS region.
aws_security_credentials (AWSSecurityCredentials): The AWS security credentials.
request_payload (Optional[str]): The optional request payload if
available.
additional_headers (Optional[Mapping[str, str]]): The optional
additional headers needed for the requested AWS API.
Returns:
Mapping[str, str]: The AWS authentication header dictionary object.
This contains the x-amz-date and authorization header information.
"""
# iam.amazonaws.com host => iam service.
# sts.us-east-2.amazonaws.com host => sts service.
service_name = host.split(".")[0]
current_time = _helpers.utcnow()
amz_date = current_time.strftime("%Y%m%dT%H%M%SZ")
date_stamp = current_time.strftime("%Y%m%d")
# Change all additional headers to be lower case.
full_headers = {}
for key in additional_headers:
full_headers[key.lower()] = additional_headers[key]
# Add AWS session token if available.
if aws_security_credentials.session_token is not None:
full_headers[
_AWS_SECURITY_TOKEN_HEADER
] = aws_security_credentials.session_token
# Required headers
full_headers["host"] = host
# Do not use generated x-amz-date if the date header is provided.
# Previously the date was not fixed with x-amz- and could be provided
# manually.
# https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req
if "date" not in full_headers:
full_headers[_AWS_DATE_HEADER] = amz_date
# Header keys need to be sorted alphabetically.
canonical_headers = ""
header_keys = list(full_headers.keys())
header_keys.sort()
for key in header_keys:
canonical_headers = "{}{}:{}\n".format(
canonical_headers, key, full_headers[key]
)
signed_headers = ";".join(header_keys)
payload_hash = hashlib.sha256((request_payload or "").encode("utf-8")).hexdigest()
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
canonical_request = "{}\n{}\n{}\n{}\n{}\n{}".format(
method,
canonical_uri,
canonical_querystring,
canonical_headers,
signed_headers,
payload_hash,
)
credential_scope = "{}/{}/{}/{}".format(
date_stamp, region, service_name, _AWS_REQUEST_TYPE
)
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
string_to_sign = "{}\n{}\n{}\n{}".format(
_AWS_ALGORITHM,
amz_date,
credential_scope,
hashlib.sha256(canonical_request.encode("utf-8")).hexdigest(),
)
# https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
signing_key = _get_signing_key(
aws_security_credentials.secret_access_key, date_stamp, region, service_name
)
signature = hmac.new(
signing_key, string_to_sign.encode("utf-8"), hashlib.sha256
).hexdigest()
# https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html
authorization_header = "{} Credential={}/{}, SignedHeaders={}, Signature={}".format(
_AWS_ALGORITHM,
aws_security_credentials.access_key_id,
credential_scope,
signed_headers,
signature,
)
authentication_header = {"authorization_header": authorization_header}
# Do not use generated x-amz-date if the date header is provided.
if "date" not in full_headers:
authentication_header["amz_date"] = amz_date
return authentication_header
@dataclass
class AwsSecurityCredentials:
"""A class that models AWS security credentials with an optional session token.
Attributes:
access_key_id (str): The AWS security credentials access key id.
secret_access_key (str): The AWS security credentials secret access key.
session_token (Optional[str]): The optional AWS security credentials session token. This should be set when using temporary credentials.
"""
access_key_id: str
secret_access_key: str
session_token: Optional[str] = None
class AwsSecurityCredentialsSupplier(metaclass=abc.ABCMeta):
"""Base class for AWS security credential suppliers. This can be implemented with custom logic to retrieve
AWS security credentials to exchange for a Google Cloud access token. The AWS external account credential does
not cache the AWS security credentials, so caching logic should be added in the implementation.
"""
@abc.abstractmethod
def get_aws_security_credentials(self, context, request):
"""Returns the AWS security credentials for the requested context.
.. warning: This is not cached by the calling Google credential, so caching logic should be implemented in the supplier.
Args:
context (google.auth.externalaccount.SupplierContext): The context object
containing information about the requested audience and subject token type.
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If an error is encountered during
security credential retrieval logic.
Returns:
AwsSecurityCredentials: The requested AWS security credentials.
"""
raise NotImplementedError("")
@abc.abstractmethod
def get_aws_region(self, context, request):
"""Returns the AWS region for the requested context.
Args:
context (google.auth.externalaccount.SupplierContext): The context object
containing information about the requested audience and subject token type.
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If an error is encountered during
region retrieval logic.
Returns:
str: The AWS region.
"""
raise NotImplementedError("")
class _DefaultAwsSecurityCredentialsSupplier(AwsSecurityCredentialsSupplier):
"""Default implementation of AWS security credentials supplier. Supports retrieving
credentials and region via EC2 metadata endpoints and environment variables.
"""
def __init__(self, credential_source):
self._region_url = credential_source.get("region_url")
self._security_credentials_url = credential_source.get("url")
self._imdsv2_session_token_url = credential_source.get(
"imdsv2_session_token_url"
)
@_helpers.copy_docstring(AwsSecurityCredentialsSupplier)
def get_aws_security_credentials(self, context, request):
# Check environment variables for permanent credentials first.
# https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html
env_aws_access_key_id = os.environ.get(environment_vars.AWS_ACCESS_KEY_ID)
env_aws_secret_access_key = os.environ.get(
environment_vars.AWS_SECRET_ACCESS_KEY
)
# This is normally not available for permanent credentials.
env_aws_session_token = os.environ.get(environment_vars.AWS_SESSION_TOKEN)
if env_aws_access_key_id and env_aws_secret_access_key:
return AwsSecurityCredentials(
env_aws_access_key_id, env_aws_secret_access_key, env_aws_session_token
)
imdsv2_session_token = self._get_imdsv2_session_token(request)
role_name = self._get_metadata_role_name(request, imdsv2_session_token)
# Get security credentials.
credentials = self._get_metadata_security_credentials(
request, role_name, imdsv2_session_token
)
return AwsSecurityCredentials(
credentials.get("AccessKeyId"),
credentials.get("SecretAccessKey"),
credentials.get("Token"),
)
@_helpers.copy_docstring(AwsSecurityCredentialsSupplier)
def get_aws_region(self, context, request):
# The AWS metadata server is not available in some AWS environments
# such as AWS lambda. Instead, it is available via environment
# variable.
env_aws_region = os.environ.get(environment_vars.AWS_REGION)
if env_aws_region is not None:
return env_aws_region
env_aws_region = os.environ.get(environment_vars.AWS_DEFAULT_REGION)
if env_aws_region is not None:
return env_aws_region
if not self._region_url:
raise exceptions.RefreshError("Unable to determine AWS region")
headers = None
imdsv2_session_token = self._get_imdsv2_session_token(request)
if imdsv2_session_token is not None:
headers = {"X-aws-ec2-metadata-token": imdsv2_session_token}
response = request(url=self._region_url, method="GET", headers=headers)
# Support both string and bytes type response.data.
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
if response.status != http_client.OK:
raise exceptions.RefreshError(
"Unable to retrieve AWS region: {}".format(response_body)
)
# This endpoint will return the region in format: us-east-2b.
# Only the us-east-2 part should be used.
return response_body[:-1]
def _get_imdsv2_session_token(self, request):
if request is not None and self._imdsv2_session_token_url is not None:
headers = {
"X-aws-ec2-metadata-token-ttl-seconds": _IMDSV2_SESSION_TOKEN_TTL_SECONDS
}
imdsv2_session_token_response = request(
url=self._imdsv2_session_token_url, method="PUT", headers=headers
)
if imdsv2_session_token_response.status != http_client.OK:
raise exceptions.RefreshError(
"Unable to retrieve AWS Session Token: {}".format(
imdsv2_session_token_response.data
)
)
return imdsv2_session_token_response.data
else:
return None
def _get_metadata_security_credentials(
self, request, role_name, imdsv2_session_token
):
"""Retrieves the AWS security credentials required for signing AWS
requests from the AWS metadata server.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
role_name (str): The AWS role name required by the AWS metadata
server security_credentials endpoint in order to return the
credentials.
imdsv2_session_token (str): The AWS IMDSv2 session token to be added as a
header in the requests to AWS metadata endpoint.
Returns:
Mapping[str, str]: The AWS metadata server security credentials
response.
Raises:
google.auth.exceptions.RefreshError: If an error occurs while
retrieving the AWS security credentials.
"""
if imdsv2_session_token is not None:
headers = {"X-aws-ec2-metadata-token": imdsv2_session_token}
else:
headers = None
response = request(
url="{}/{}".format(self._security_credentials_url, role_name),
method="GET",
headers=headers,
)
# support both string and bytes type response.data
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
if response.status != http_client.OK:
raise exceptions.RefreshError(
"Unable to retrieve AWS security credentials: {}".format(response_body)
)
credentials_response = json.loads(response_body)
return credentials_response
def _get_metadata_role_name(self, request, imdsv2_session_token):
"""Retrieves the AWS role currently attached to the current AWS
workload by querying the AWS metadata server. This is needed for the
AWS metadata server security credentials endpoint in order to retrieve
the AWS security credentials needed to sign requests to AWS APIs.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
imdsv2_session_token (str): The AWS IMDSv2 session token to be added as a
header in the requests to AWS metadata endpoint.
Returns:
str: The AWS role name.
Raises:
google.auth.exceptions.RefreshError: If an error occurs while
retrieving the AWS role name.
"""
if self._security_credentials_url is None:
raise exceptions.RefreshError(
"Unable to determine the AWS metadata server security credentials endpoint"
)
headers = None
if imdsv2_session_token is not None:
headers = {"X-aws-ec2-metadata-token": imdsv2_session_token}
response = request(
url=self._security_credentials_url, method="GET", headers=headers
)
# support both string and bytes type response.data
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
if response.status != http_client.OK:
raise exceptions.RefreshError(
"Unable to retrieve AWS role name {}".format(response_body)
)
return response_body
class Credentials(external_account.Credentials):
"""AWS external account credentials.
This is used to exchange serialized AWS signature v4 signed requests to
AWS STS GetCallerIdentity service for Google access tokens.
"""
def __init__(
self,
audience,
subject_token_type,
token_url=external_account._DEFAULT_TOKEN_URL,
credential_source=None,
aws_security_credentials_supplier=None,
*args,
**kwargs
):
"""Instantiates an AWS workload external account credentials object.
Args:
audience (str): The STS audience field.
subject_token_type (str): The subject token type based on the Oauth2.0 token exchange spec.
Expected values include::
“urn:ietf:params:aws:token-type:aws4_request”
token_url (Optional [str]): The STS endpoint URL. If not provided, will default to "https://sts.googleapis.com/v1/token".
credential_source (Optional [Mapping]): The credential source dictionary used
to provide instructions on how to retrieve external credential to be exchanged for Google access tokens.
Either a credential source or an AWS security credentials supplier must be provided.
Example credential_source for AWS credential::
{
"environment_id": "aws1",
"regional_cred_verification_url": "https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15",
"region_url": "http://169.254.169.254/latest/meta-data/placement/availability-zone",
"url": "http://169.254.169.254/latest/meta-data/iam/security-credentials",
imdsv2_session_token_url": "http://169.254.169.254/latest/api/token"
}
aws_security_credentials_supplier (Optional [AwsSecurityCredentialsSupplier]): Optional AWS security credentials supplier.
This will be called to supply valid AWS security credentails which will then
be exchanged for Google access tokens. Either an AWS security credentials supplier
or a credential source must be provided.
args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
Raises:
google.auth.exceptions.RefreshError: If an error is encountered during
access token retrieval logic.
ValueError: For invalid parameters.
.. note:: Typically one of the helper constructors
:meth:`from_file` or
:meth:`from_info` are used instead of calling the constructor directly.
"""
super(Credentials, self).__init__(
audience=audience,
subject_token_type=subject_token_type,
token_url=token_url,
credential_source=credential_source,
*args,
**kwargs
)
if credential_source is None and aws_security_credentials_supplier is None:
raise exceptions.InvalidValue(
"A valid credential source or AWS security credentials supplier must be provided."
)
if (
credential_source is not None
and aws_security_credentials_supplier is not None
):
raise exceptions.InvalidValue(
"AWS credential cannot have both a credential source and an AWS security credentials supplier."
)
if aws_security_credentials_supplier:
self._aws_security_credentials_supplier = aws_security_credentials_supplier
# The regional cred verification URL would normally be provided through the credential source. So set it to the default one here.
self._cred_verification_url = (
_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL
)
else:
environment_id = credential_source.get("environment_id") or ""
self._aws_security_credentials_supplier = (
_DefaultAwsSecurityCredentialsSupplier(credential_source)
)
self._cred_verification_url = credential_source.get(
"regional_cred_verification_url"
)
# Get the environment ID, i.e. "aws1". Currently, only one version supported (1).
matches = re.match(r"^(aws)([\d]+)$", environment_id)
if matches:
env_id, env_version = matches.groups()
else:
env_id, env_version = (None, None)
if env_id != "aws" or self._cred_verification_url is None:
raise exceptions.InvalidResource(
"No valid AWS 'credential_source' provided"
)
elif env_version is None or int(env_version) != 1:
raise exceptions.InvalidValue(
"aws version '{}' is not supported in the current build.".format(
env_version
)
)
self._target_resource = audience
self._request_signer = None
def retrieve_subject_token(self, request):
"""Retrieves the subject token using the credential_source object.
The subject token is a serialized `AWS GetCallerIdentity signed request`_.
The logic is summarized as:
Retrieve the AWS region from the AWS_REGION or AWS_DEFAULT_REGION
environment variable or from the AWS metadata server availability-zone
if not found in the environment variable.
Check AWS credentials in environment variables. If not found, retrieve
from the AWS metadata server security-credentials endpoint.
When retrieving AWS credentials from the metadata server
security-credentials endpoint, the AWS role needs to be determined by
calling the security-credentials endpoint without any argument. Then the
credentials can be retrieved via: security-credentials/role_name
Generate the signed request to AWS STS GetCallerIdentity action.
Inject x-goog-cloud-target-resource into header and serialize the
signed request. This will be the subject-token to pass to GCP STS.
.. _AWS GetCallerIdentity signed request:
https://cloud.google.com/iam/docs/access-resources-aws#exchange-token
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Returns:
str: The retrieved subject token.
"""
# Initialize the request signer if not yet initialized after determining
# the current AWS region.
if self._request_signer is None:
self._region = self._aws_security_credentials_supplier.get_aws_region(
self._supplier_context, request
)
self._request_signer = RequestSigner(self._region)
# Retrieve the AWS security credentials needed to generate the signed
# request.
aws_security_credentials = (
self._aws_security_credentials_supplier.get_aws_security_credentials(
self._supplier_context, request
)
)
# Generate the signed request to AWS STS GetCallerIdentity API.
# Use the required regional endpoint. Otherwise, the request will fail.
request_options = self._request_signer.get_request_options(
aws_security_credentials,
self._cred_verification_url.replace("{region}", self._region),
"POST",
)
# The GCP STS endpoint expects the headers to be formatted as:
# [
# {key: 'x-amz-date', value: '...'},
# {key: 'Authorization', value: '...'},
# ...
# ]
# And then serialized as:
# quote(json.dumps({
# url: '...',
# method: 'POST',
# headers: [{key: 'x-amz-date', value: '...'}, ...]
# }))
request_headers = request_options.get("headers")
# The full, canonical resource name of the workload identity pool
# provider, with or without the HTTPS prefix.
# Including this header as part of the signature is recommended to
# ensure data integrity.
request_headers["x-goog-cloud-target-resource"] = self._target_resource
# Serialize AWS signed request.
aws_signed_req = {}
aws_signed_req["url"] = request_options.get("url")
aws_signed_req["method"] = request_options.get("method")
aws_signed_req["headers"] = []
# Reformat header to GCP STS expected format.
for key in request_headers.keys():
aws_signed_req["headers"].append(
{"key": key, "value": request_headers[key]}
)
return urllib.parse.quote(
json.dumps(aws_signed_req, separators=(",", ":"), sort_keys=True)
)
def _create_default_metrics_options(self):
metrics_options = super(Credentials, self)._create_default_metrics_options()
metrics_options["source"] = "aws"
if self._has_custom_supplier():
metrics_options["source"] = "programmatic"
return metrics_options
def _has_custom_supplier(self):
return self._credential_source is None
def _constructor_args(self):
args = super(Credentials, self)._constructor_args()
# If a custom supplier was used, append it to the args dict.
if self._has_custom_supplier():
args.update(
{
"aws_security_credentials_supplier": self._aws_security_credentials_supplier
}
)
return args
@classmethod
def from_info(cls, info, **kwargs):
"""Creates an AWS Credentials instance from parsed external account info.
Args:
info (Mapping[str, str]): The AWS external account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.aws.Credentials: The constructed credentials.
Raises:
ValueError: For invalid parameters.
"""
aws_security_credentials_supplier = info.get(
"aws_security_credentials_supplier"
)
kwargs.update(
{"aws_security_credentials_supplier": aws_security_credentials_supplier}
)
return super(Credentials, cls).from_info(info, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
"""Creates an AWS Credentials instance from an external account json file.
Args:
filename (str): The path to the AWS external account json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.aws.Credentials: The constructed credentials.
"""
return super(Credentials, cls).from_file(filename, **kwargs)

View File

@@ -0,0 +1,22 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Compute Engine authentication."""
from google.auth.compute_engine._metadata import detect_gce_residency_linux
from google.auth.compute_engine.credentials import Credentials
from google.auth.compute_engine.credentials import IDTokenCredentials
__all__ = ["Credentials", "IDTokenCredentials", "detect_gce_residency_linux"]

View File

@@ -0,0 +1,505 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides helper methods for talking to the Compute Engine metadata server.
See https://cloud.google.com/compute/docs/metadata for more details.
"""
import datetime
import http.client as http_client
import json
import logging
import os
from urllib.parse import urljoin
import requests
from google.auth import _helpers
from google.auth import environment_vars
from google.auth import exceptions
from google.auth import metrics
from google.auth import transport
from google.auth._exponential_backoff import ExponentialBackoff
from google.auth.compute_engine import _mtls
_LOGGER = logging.getLogger(__name__)
_GCE_DEFAULT_MDS_IP = "169.254.169.254"
_GCE_DEFAULT_HOST = "metadata.google.internal"
_GCE_DEFAULT_MDS_HOSTS = [_GCE_DEFAULT_HOST, _GCE_DEFAULT_MDS_IP]
# Environment variable GCE_METADATA_HOST is originally named
# GCE_METADATA_ROOT. For compatibility reasons, here it checks
# the new variable first; if not set, the system falls back
# to the old variable.
_GCE_METADATA_HOST = os.getenv(environment_vars.GCE_METADATA_HOST, None)
if not _GCE_METADATA_HOST:
_GCE_METADATA_HOST = os.getenv(
environment_vars.GCE_METADATA_ROOT, _GCE_DEFAULT_HOST
)
def _validate_gce_mds_configured_environment():
"""Validates the GCE metadata server environment configuration for mTLS.
mTLS is only supported when connecting to the default metadata server hosts.
If we are in strict mode (which requires mTLS), ensure that the metadata host
has not been overridden to a custom value (which means mTLS will fail).
Raises:
google.auth.exceptions.MutualTLSChannelError: if the environment
configuration is invalid for mTLS.
"""
mode = _mtls._parse_mds_mode()
if mode == _mtls.MdsMtlsMode.STRICT:
# mTLS is only supported when connecting to the default metadata host.
# Raise an exception if we are in strict mode (which requires mTLS)
# but the metadata host has been overridden to a custom MDS. (which means mTLS will fail)
if _GCE_METADATA_HOST not in _GCE_DEFAULT_MDS_HOSTS:
raise exceptions.MutualTLSChannelError(
"Mutual TLS is required, but the metadata host has been overridden. "
"mTLS is only supported when connecting to the default metadata host."
)
def _get_metadata_root(use_mtls: bool):
"""Returns the metadata server root URL."""
scheme = "https" if use_mtls else "http"
return "{}://{}/computeMetadata/v1/".format(scheme, _GCE_METADATA_HOST)
def _get_metadata_ip_root(use_mtls: bool):
"""Returns the metadata server IP root URL."""
scheme = "https" if use_mtls else "http"
return "{}://{}".format(
scheme, os.getenv(environment_vars.GCE_METADATA_IP, _GCE_DEFAULT_MDS_IP)
)
_METADATA_FLAVOR_HEADER = "metadata-flavor"
_METADATA_FLAVOR_VALUE = "Google"
_METADATA_HEADERS = {_METADATA_FLAVOR_HEADER: _METADATA_FLAVOR_VALUE}
# Timeout in seconds to wait for the GCE metadata server when detecting the
# GCE environment.
try:
_METADATA_DEFAULT_TIMEOUT = int(os.getenv(environment_vars.GCE_METADATA_TIMEOUT, 3))
except ValueError: # pragma: NO COVER
_METADATA_DEFAULT_TIMEOUT = 3
# The number of tries to perform when waiting for the GCE metadata server
# when detecting the GCE environment.
try:
_METADATA_DETECT_RETRIES = int(
os.getenv(environment_vars.GCE_METADATA_DETECT_RETRIES, 3)
)
except ValueError: # pragma: NO COVER
_METADATA_DETECT_RETRIES = 3
# This is used to disable checking for the GCE metadata server and directly
# assuming it's not available.
_NO_GCE_CHECK = os.getenv(environment_vars.NO_GCE_CHECK) == "true"
# Detect GCE Residency
_GOOGLE = "Google"
_GCE_PRODUCT_NAME_FILE = "/sys/class/dmi/id/product_name"
def is_on_gce(request):
"""Checks to see if the code runs on Google Compute Engine
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Returns:
bool: True if the code runs on Google Compute Engine, False otherwise.
"""
if _NO_GCE_CHECK:
return False
if ping(request):
return True
if os.name == "nt":
# TODO: implement GCE residency detection on Windows
return False
# Detect GCE residency on Linux
return detect_gce_residency_linux()
def detect_gce_residency_linux():
"""Detect Google Compute Engine residency by smbios check on Linux
Returns:
bool: True if the GCE product name file is detected, False otherwise.
"""
try:
with open(_GCE_PRODUCT_NAME_FILE, "r") as file_obj:
content = file_obj.read().strip()
except Exception:
return False
return content.startswith(_GOOGLE)
def _prepare_request_for_mds(request, use_mtls=False) -> None:
"""Prepares a request for the metadata server.
This will check if mTLS should be used and mount the mTLS adapter if needed.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
use_mtls (bool): Whether to use mTLS for the request.
Returns:
google.auth.transport.Request: A request object to use.
If mTLS is enabled, the request will have the mTLS adapter mounted.
Otherwise, the original request will be returned unchanged.
"""
# Only modify the request if mTLS is enabled.
if use_mtls:
# Ensure the request has a session to mount the adapter to.
if not request.session:
request.session = requests.Session()
adapter = _mtls.MdsMtlsAdapter()
# Mount the adapter for all default GCE metadata hosts.
for host in _GCE_DEFAULT_MDS_HOSTS:
request.session.mount(f"https://{host}/", adapter)
def ping(
request, timeout=_METADATA_DEFAULT_TIMEOUT, retry_count=_METADATA_DETECT_RETRIES
):
"""Checks to see if the metadata server is available.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
timeout (int): How long to wait for the metadata server to respond.
retry_count (int): How many times to attempt connecting to metadata
server using above timeout.
Returns:
bool: True if the metadata server is reachable, False otherwise.
"""
use_mtls = _mtls.should_use_mds_mtls()
_prepare_request_for_mds(request, use_mtls=use_mtls)
# NOTE: The explicit ``timeout`` is a workaround. The underlying
# issue is that resolving an unknown host on some networks will take
# 20-30 seconds; making this timeout short fixes the issue, but
# could lead to false negatives in the event that we are on GCE, but
# the metadata resolution was particularly slow. The latter case is
# "unlikely".
headers = _METADATA_HEADERS.copy()
headers[metrics.API_CLIENT_HEADER] = metrics.mds_ping()
backoff = ExponentialBackoff(total_attempts=retry_count)
for attempt in backoff:
try:
response = request(
url=_get_metadata_ip_root(use_mtls),
method="GET",
headers=headers,
timeout=timeout,
)
metadata_flavor = response.headers.get(_METADATA_FLAVOR_HEADER)
return (
response.status == http_client.OK
and metadata_flavor == _METADATA_FLAVOR_VALUE
)
except exceptions.TransportError as e:
_LOGGER.warning(
"Compute Engine Metadata server unavailable on "
"attempt %s of %s. Reason: %s",
attempt,
retry_count,
e,
)
return False
def get(
request,
path,
root=None,
params=None,
recursive=False,
retry_count=5,
headers=None,
return_none_for_not_found_error=False,
timeout=_METADATA_DEFAULT_TIMEOUT,
):
"""Fetch a resource from the metadata server.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
path (str): The resource to retrieve. For example,
``'instance/service-accounts/default'``.
root (Optional[str]): The full path to the metadata server root. If not
provided, the default root will be used.
params (Optional[Mapping[str, str]]): A mapping of query parameter
keys to values.
recursive (bool): Whether to do a recursive query of metadata. See
https://cloud.google.com/compute/docs/metadata#aggcontents for more
details.
retry_count (int): How many times to attempt connecting to metadata
server using above timeout.
headers (Optional[Mapping[str, str]]): Headers for the request.
return_none_for_not_found_error (Optional[bool]): If True, returns None
for 404 error instead of throwing an exception.
timeout (int): How long to wait, in seconds for the metadata server to respond.
Returns:
Union[Mapping, str]: If the metadata server returns JSON, a mapping of
the decoded JSON is returned. Otherwise, the response content is
returned as a string.
Raises:
google.auth.exceptions.TransportError: if an error occurred while
retrieving metadata.
google.auth.exceptions.MutualTLSChannelError: if using mtls and the environment
configuration is invalid for mTLS (for example, the metadata host
has been overridden in strict mTLS mode).
"""
use_mtls = _mtls.should_use_mds_mtls()
# Prepare the request object for mTLS if needed.
# This will create a new request object with the mTLS session.
_prepare_request_for_mds(request, use_mtls=use_mtls)
if root is None:
root = _get_metadata_root(use_mtls)
# mTLS is only supported when connecting to the default metadata host.
# If we are in strict mode (which requires mTLS), ensure that the metadata host
# has not been overridden to a non-default host value (which means mTLS will fail).
_validate_gce_mds_configured_environment()
base_url = urljoin(root, path)
query_params = {} if params is None else params
headers_to_use = _METADATA_HEADERS.copy()
if headers:
headers_to_use.update(headers)
if recursive:
query_params["recursive"] = "true"
url = _helpers.update_query(base_url, query_params)
backoff = ExponentialBackoff(total_attempts=retry_count)
last_exception = None
for attempt in backoff:
try:
response = request(
url=url, method="GET", headers=headers_to_use, timeout=timeout
)
if response.status in transport.DEFAULT_RETRYABLE_STATUS_CODES:
_LOGGER.warning(
"Compute Engine Metadata server unavailable on "
"attempt %s of %s. Response status: %s",
attempt,
retry_count,
response.status,
)
last_exception = None
continue
else:
last_exception = None
break
except exceptions.TransportError as e:
_LOGGER.warning(
"Compute Engine Metadata server unavailable on "
"attempt %s of %s. Reason: %s",
attempt,
retry_count,
e,
)
last_exception = e
else:
if last_exception:
raise exceptions.TransportError(
"Failed to retrieve {} from the Google Compute Engine "
"metadata service. Compute Engine Metadata server unavailable. "
"Last exception: {}".format(url, last_exception)
) from last_exception
else:
error_details = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
raise exceptions.TransportError(
"Failed to retrieve {} from the Google Compute Engine "
"metadata service. Compute Engine Metadata server unavailable. "
"Response status: {}\nResponse details:\n{}".format(
url, response.status, error_details
)
)
content = _helpers.from_bytes(response.data)
if response.status == http_client.NOT_FOUND and return_none_for_not_found_error:
return None
if response.status == http_client.OK:
if (
_helpers.parse_content_type(response.headers["content-type"])
== "application/json"
):
try:
return json.loads(content)
except ValueError as caught_exc:
new_exc = exceptions.TransportError(
"Received invalid JSON from the Google Compute Engine "
"metadata service: {:.20}".format(content)
)
raise new_exc from caught_exc
else:
return content
raise exceptions.TransportError(
"Failed to retrieve {} from the Google Compute Engine "
"metadata service. Status: {} Response:\n{}".format(
url, response.status, response.data
),
response,
)
def get_project_id(request):
"""Get the Google Cloud Project ID from the metadata server.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Returns:
str: The project ID
Raises:
google.auth.exceptions.TransportError: if an error occurred while
retrieving metadata.
"""
return get(request, "project/project-id")
def get_universe_domain(request):
"""Get the universe domain value from the metadata server.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Returns:
str: The universe domain value. If the universe domain endpoint is not
not found, return the default value, which is googleapis.com
Raises:
google.auth.exceptions.TransportError: if an error other than
404 occurs while retrieving metadata.
"""
universe_domain = get(
request, "universe/universe-domain", return_none_for_not_found_error=True
)
if not universe_domain:
return "googleapis.com"
return universe_domain
def get_service_account_info(request, service_account="default"):
"""Get information about a service account from the metadata server.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
service_account (str): The string 'default' or a service account email
address. The determines which service account for which to acquire
information.
Returns:
Mapping: The service account's information, for example::
{
'email': '...',
'scopes': ['scope', ...],
'aliases': ['default', '...']
}
Raises:
google.auth.exceptions.TransportError: if an error occurred while
retrieving metadata.
"""
path = "instance/service-accounts/{0}/".format(service_account)
# See https://cloud.google.com/compute/docs/metadata#aggcontents
# for more on the use of 'recursive'.
return get(request, path, params={"recursive": "true"})
def get_service_account_token(request, service_account="default", scopes=None):
"""Get the OAuth 2.0 access token for a service account.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
service_account (str): The string 'default' or a service account email
address. The determines which service account for which to acquire
an access token.
scopes (Optional[Union[str, List[str]]]): Optional string or list of
strings with auth scopes.
Returns:
Tuple[str, datetime]: The access token and its expiration.
Raises:
google.auth.exceptions.TransportError: if an error occurred while
retrieving metadata.
"""
from google.auth import _agent_identity_utils
params = {}
if scopes:
if not isinstance(scopes, str):
scopes = ",".join(scopes)
params["scopes"] = scopes
cert = _agent_identity_utils.get_and_parse_agent_identity_certificate()
if cert:
if _agent_identity_utils.should_request_bound_token(cert):
fingerprint = _agent_identity_utils.calculate_certificate_fingerprint(cert)
params["bindCertificateFingerprint"] = fingerprint
metrics_header = {
metrics.API_CLIENT_HEADER: metrics.token_request_access_token_mds()
}
path = "instance/service-accounts/{0}/token".format(service_account)
token_json = get(request, path, params=params, headers=metrics_header)
token_expiry = _helpers.utcnow() + datetime.timedelta(
seconds=token_json["expires_in"]
)
return token_json["access_token"], token_expiry

View File

@@ -0,0 +1,164 @@
# -*- coding: utf-8 -*-
#
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Mutual TLS for Google Compute Engine metadata server."""
from dataclasses import dataclass, field
import enum
import logging
import os
from pathlib import Path
import ssl
from urllib.parse import urlparse, urlunparse
import requests
from requests.adapters import HTTPAdapter
from google.auth import environment_vars, exceptions
_LOGGER = logging.getLogger(__name__)
_WINDOWS_OS_NAME = "nt"
# MDS mTLS certificate paths based on OS.
# Documentation to well known locations can be found at:
# https://cloud.google.com/compute/docs/metadata/overview#https-mds-certificates
_WINDOWS_MTLS_COMPONENTS_BASE_PATH = Path("C:/ProgramData/Google/ComputeEngine")
_MTLS_COMPONENTS_BASE_PATH = Path("/run/google-mds-mtls")
def _get_mds_root_crt_path():
if os.name == _WINDOWS_OS_NAME:
return _WINDOWS_MTLS_COMPONENTS_BASE_PATH / "mds-mtls-root.crt"
else:
return _MTLS_COMPONENTS_BASE_PATH / "root.crt"
def _get_mds_client_combined_cert_path():
if os.name == _WINDOWS_OS_NAME:
return _WINDOWS_MTLS_COMPONENTS_BASE_PATH / "mds-mtls-client.key"
else:
return _MTLS_COMPONENTS_BASE_PATH / "client.key"
@dataclass
class MdsMtlsConfig:
ca_cert_path: Path = field(
default_factory=_get_mds_root_crt_path
) # path to CA certificate
client_combined_cert_path: Path = field(
default_factory=_get_mds_client_combined_cert_path
) # path to file containing client certificate and key
def _certs_exist(mds_mtls_config: MdsMtlsConfig):
"""Checks if the mTLS certificates exist."""
return os.path.exists(mds_mtls_config.ca_cert_path) and os.path.exists(
mds_mtls_config.client_combined_cert_path
)
class MdsMtlsMode(enum.Enum):
"""MDS mTLS mode. Used to configure connection behavior when connecting to MDS.
STRICT: Always use HTTPS/mTLS. If certificates are not found locally, an error will be returned.
NONE: Never use mTLS. Requests will use regular HTTP.
DEFAULT: Use mTLS if certificates are found locally, otherwise use regular HTTP.
"""
STRICT = "strict"
NONE = "none"
DEFAULT = "default"
def _parse_mds_mode():
"""Parses the GCE_METADATA_MTLS_MODE environment variable."""
mode_str = os.environ.get(
environment_vars.GCE_METADATA_MTLS_MODE, "default"
).lower()
try:
return MdsMtlsMode(mode_str)
except ValueError:
raise ValueError(
"Invalid value for GCE_METADATA_MTLS_MODE. Must be one of 'strict', 'none', or 'default'."
)
def should_use_mds_mtls(mds_mtls_config: MdsMtlsConfig = MdsMtlsConfig()):
"""Determines if mTLS should be used for the metadata server."""
mode = _parse_mds_mode()
if mode == MdsMtlsMode.STRICT:
if not _certs_exist(mds_mtls_config):
raise exceptions.MutualTLSChannelError(
"mTLS certificates not found in strict mode."
)
return True
elif mode == MdsMtlsMode.NONE:
return False
else: # Default mode
return _certs_exist(mds_mtls_config)
class MdsMtlsAdapter(HTTPAdapter):
"""An HTTP adapter that uses mTLS for the metadata server."""
def __init__(
self, mds_mtls_config: MdsMtlsConfig = MdsMtlsConfig(), *args, **kwargs
):
self.ssl_context = ssl.create_default_context()
self.ssl_context.load_verify_locations(cafile=mds_mtls_config.ca_cert_path)
self.ssl_context.load_cert_chain(
certfile=mds_mtls_config.client_combined_cert_path
)
super(MdsMtlsAdapter, self).__init__(*args, **kwargs)
def init_poolmanager(self, *args, **kwargs):
kwargs["ssl_context"] = self.ssl_context
return super(MdsMtlsAdapter, self).init_poolmanager(*args, **kwargs)
def proxy_manager_for(self, *args, **kwargs):
kwargs["ssl_context"] = self.ssl_context
return super(MdsMtlsAdapter, self).proxy_manager_for(*args, **kwargs)
def send(self, request, **kwargs):
# If we are in strict mode, always use mTLS (no HTTP fallback)
if _parse_mds_mode() == MdsMtlsMode.STRICT:
return super(MdsMtlsAdapter, self).send(request, **kwargs)
# In default mode, attempt mTLS first, then fallback to HTTP on failure
try:
response = super(MdsMtlsAdapter, self).send(request, **kwargs)
response.raise_for_status()
return response
except (
ssl.SSLError,
requests.exceptions.SSLError,
requests.exceptions.HTTPError,
) as e:
_LOGGER.warning(
"mTLS connection to Compute Engine Metadata server failed. "
"Falling back to standard HTTP. Reason: %s",
e,
)
# Fallback to standard HTTP
parsed_original_url = urlparse(request.url)
http_fallback_url = urlunparse(parsed_original_url._replace(scheme="http"))
request.url = http_fallback_url
# Use a standard HTTPAdapter for the fallback
http_adapter = HTTPAdapter()
return http_adapter.send(request, **kwargs)

View File

@@ -0,0 +1,556 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Compute Engine credentials.
This module provides authentication for an application running on Google
Compute Engine using the Compute Engine metadata server.
"""
import datetime
from google.auth import _helpers
from google.auth import credentials
from google.auth import exceptions
from google.auth import iam
from google.auth import jwt
from google.auth import metrics
from google.auth.compute_engine import _metadata
from google.oauth2 import _client
_TRUST_BOUNDARY_LOOKUP_ENDPOINT = (
"https://iamcredentials.{}/v1/projects/-/serviceAccounts/{}/allowedLocations"
)
class Credentials(
credentials.Scoped,
credentials.CredentialsWithQuotaProject,
credentials.CredentialsWithUniverseDomain,
credentials.CredentialsWithTrustBoundary,
):
"""Compute Engine Credentials.
These credentials use the Google Compute Engine metadata server to obtain
OAuth 2.0 access tokens associated with the instance's service account,
and are also used for Cloud Run, Flex and App Engine (except for the Python
2.7 runtime, which is supported only on older versions of this library).
For more information about Compute Engine authentication, including how
to configure scopes, see the `Compute Engine authentication
documentation`_.
.. note:: On Compute Engine the metadata server ignores requested scopes.
On Cloud Run, Flex and App Engine the server honours requested scopes.
.. _Compute Engine authentication documentation:
https://cloud.google.com/compute/docs/authentication#using
"""
def __init__(
self,
service_account_email="default",
quota_project_id=None,
scopes=None,
default_scopes=None,
universe_domain=None,
trust_boundary=None,
):
"""
Args:
service_account_email (str): The service account email to use, or
'default'. A Compute Engine instance may have multiple service
accounts.
quota_project_id (Optional[str]): The project ID used for quota and
billing.
scopes (Optional[Sequence[str]]): The list of scopes for the credentials.
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
universe_domain (Optional[str]): The universe domain. If not
provided or None, credential will attempt to fetch the value
from metadata server. If metadata server doesn't have universe
domain endpoint, then the default googleapis.com will be used.
trust_boundary (Mapping[str,str]): A credential trust boundary.
"""
super(Credentials, self).__init__()
self._service_account_email = service_account_email
self._quota_project_id = quota_project_id
self._scopes = scopes
self._default_scopes = default_scopes
self._universe_domain_cached = False
if universe_domain:
self._universe_domain = universe_domain
self._universe_domain_cached = True
self._trust_boundary = trust_boundary
def _retrieve_info(self, request):
"""Retrieve information about the service account.
Updates the scopes and retrieves the full service account email.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
"""
info = _metadata.get_service_account_info(
request, service_account=self._service_account_email
)
if not info or "email" not in info:
raise exceptions.RefreshError(
"Unexpected response from metadata server: "
"service account info is missing 'email' field."
)
self._service_account_email = info["email"]
# Don't override scopes requested by the user.
if self._scopes is None:
self._scopes = info.get("scopes")
def _metric_header_for_usage(self):
return metrics.CRED_TYPE_SA_MDS
def _perform_refresh_token(self, request):
"""Refresh the access token and scopes.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the Compute Engine metadata
service can't be reached if if the instance has not
credentials.
"""
try:
self._retrieve_info(request)
scopes = self._scopes if self._scopes is not None else self._default_scopes
# Always fetch token with default service account email.
self.token, self.expiry = _metadata.get_service_account_token(
request, service_account="default", scopes=scopes
)
except exceptions.TransportError as caught_exc:
new_exc = exceptions.RefreshError(caught_exc)
raise new_exc from caught_exc
def _build_trust_boundary_lookup_url(self):
"""Builds and returns the URL for the trust boundary lookup API for GCE."""
# If the service account email is 'default', we need to get the
# actual email address from the metadata server.
if self._service_account_email == "default":
from google.auth.transport import requests as google_auth_requests
request = google_auth_requests.Request()
try:
info = _metadata.get_service_account_info(request, "default")
if not info or "email" not in info:
raise exceptions.RefreshError(
"Unexpected response from metadata server: "
"service account info is missing 'email' field."
)
self._service_account_email = info["email"]
except exceptions.TransportError as e:
# If fetching the service account email fails due to a transport error,
# it means we cannot build the trust boundary lookup URL.
# Wrap this in a RefreshError so it's caught by _refresh_trust_boundary.
raise exceptions.RefreshError(
"Failed to get service account email for trust boundary lookup: {}".format(
e
)
) from e
return _TRUST_BOUNDARY_LOOKUP_ENDPOINT.format(
self.universe_domain, self.service_account_email
)
@property
def service_account_email(self):
"""The service account email.
.. note:: This is not guaranteed to be set until :meth:`refresh` has been
called.
"""
return self._service_account_email
@property
def requires_scopes(self):
return not self._scopes
@property
def universe_domain(self):
if self._universe_domain_cached:
return self._universe_domain
from google.auth.transport import requests as google_auth_requests
self._universe_domain = _metadata.get_universe_domain(
google_auth_requests.Request()
)
self._universe_domain_cached = True
return self._universe_domain
@_helpers.copy_docstring(credentials.Credentials)
def get_cred_info(self):
return {
"credential_source": "metadata server",
"credential_type": "VM credentials",
"principal": self.service_account_email,
}
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
creds = self.__class__(
service_account_email=self._service_account_email,
quota_project_id=quota_project_id,
scopes=self._scopes,
default_scopes=self._default_scopes,
universe_domain=self._universe_domain,
trust_boundary=self._trust_boundary,
)
creds._universe_domain_cached = self._universe_domain_cached
return creds
@_helpers.copy_docstring(credentials.Scoped)
def with_scopes(self, scopes, default_scopes=None):
# Compute Engine credentials can not be scoped (the metadata service
# ignores the scopes parameter). App Engine, Cloud Run and Flex support
# requesting scopes.
creds = self.__class__(
scopes=scopes,
default_scopes=default_scopes,
service_account_email=self._service_account_email,
quota_project_id=self._quota_project_id,
universe_domain=self._universe_domain,
trust_boundary=self._trust_boundary,
)
creds._universe_domain_cached = self._universe_domain_cached
return creds
@_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
def with_universe_domain(self, universe_domain):
return self.__class__(
scopes=self._scopes,
default_scopes=self._default_scopes,
service_account_email=self._service_account_email,
quota_project_id=self._quota_project_id,
trust_boundary=self._trust_boundary,
universe_domain=universe_domain,
)
@_helpers.copy_docstring(credentials.CredentialsWithTrustBoundary)
def with_trust_boundary(self, trust_boundary):
creds = self.__class__(
service_account_email=self._service_account_email,
quota_project_id=self._quota_project_id,
scopes=self._scopes,
default_scopes=self._default_scopes,
universe_domain=self._universe_domain,
trust_boundary=trust_boundary,
)
creds._universe_domain_cached = self._universe_domain_cached
return creds
_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
_DEFAULT_TOKEN_URI = "https://www.googleapis.com/oauth2/v4/token"
class IDTokenCredentials(
credentials.CredentialsWithQuotaProject,
credentials.Signing,
credentials.CredentialsWithTokenUri,
):
"""Open ID Connect ID Token-based service account credentials.
These credentials relies on the default service account of a GCE instance.
ID token can be requested from `GCE metadata server identity endpoint`_, IAM
token endpoint or other token endpoints you specify. If metadata server
identity endpoint is not used, the GCE instance must have been started with
a service account that has access to the IAM Cloud API.
.. _GCE metadata server identity endpoint:
https://cloud.google.com/compute/docs/instances/verifying-instance-identity
"""
def __init__(
self,
request,
target_audience,
token_uri=None,
additional_claims=None,
service_account_email=None,
signer=None,
use_metadata_identity_endpoint=False,
quota_project_id=None,
):
"""
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
target_audience (str): The intended audience for these credentials,
used when requesting the ID Token. The ID Token's ``aud`` claim
will be set to this string.
token_uri (str): The OAuth 2.0 Token URI.
additional_claims (Mapping[str, str]): Any additional claims for
the JWT assertion used in the authorization grant.
service_account_email (str): Optional explicit service account to
use to sign JWT tokens.
By default, this is the default GCE service account.
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
In case the signer is specified, the request argument will be
ignored.
use_metadata_identity_endpoint (bool): Whether to use GCE metadata
identity endpoint. For backward compatibility the default value
is False. If set to True, ``token_uri``, ``additional_claims``,
``service_account_email``, ``signer`` argument should not be set;
otherwise ValueError will be raised.
quota_project_id (Optional[str]): The project ID used for quota and
billing.
Raises:
ValueError:
If ``use_metadata_identity_endpoint`` is set to True, and one of
``token_uri``, ``additional_claims``, ``service_account_email``,
``signer`` arguments is set.
"""
super(IDTokenCredentials, self).__init__()
self._quota_project_id = quota_project_id
self._use_metadata_identity_endpoint = use_metadata_identity_endpoint
self._target_audience = target_audience
if use_metadata_identity_endpoint:
if token_uri or additional_claims or service_account_email or signer:
raise ValueError(
"If use_metadata_identity_endpoint is set, token_uri, "
"additional_claims, service_account_email, signer arguments"
" must not be set"
)
self._token_uri = None
self._additional_claims = None
self._signer = None
if service_account_email is None:
sa_info = _metadata.get_service_account_info(request)
self._service_account_email = sa_info["email"]
else:
self._service_account_email = service_account_email
if not use_metadata_identity_endpoint:
if signer is None:
signer = iam.Signer(
request=request,
credentials=Credentials(),
service_account_email=self._service_account_email,
)
self._signer = signer
self._token_uri = token_uri or _DEFAULT_TOKEN_URI
if additional_claims is not None:
self._additional_claims = additional_claims
else:
self._additional_claims = {}
def with_target_audience(self, target_audience):
"""Create a copy of these credentials with the specified target
audience.
Args:
target_audience (str): The intended audience for these credentials,
used when requesting the ID Token.
Returns:
google.auth.service_account.IDTokenCredentials: A new credentials
instance.
"""
# since the signer is already instantiated,
# the request is not needed
if self._use_metadata_identity_endpoint:
return self.__class__(
None,
target_audience=target_audience,
use_metadata_identity_endpoint=True,
quota_project_id=self._quota_project_id,
)
else:
return self.__class__(
None,
service_account_email=self._service_account_email,
token_uri=self._token_uri,
target_audience=target_audience,
additional_claims=self._additional_claims.copy(),
signer=self.signer,
use_metadata_identity_endpoint=False,
quota_project_id=self._quota_project_id,
)
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
# since the signer is already instantiated,
# the request is not needed
if self._use_metadata_identity_endpoint:
return self.__class__(
None,
target_audience=self._target_audience,
use_metadata_identity_endpoint=True,
quota_project_id=quota_project_id,
)
else:
return self.__class__(
None,
service_account_email=self._service_account_email,
token_uri=self._token_uri,
target_audience=self._target_audience,
additional_claims=self._additional_claims.copy(),
signer=self.signer,
use_metadata_identity_endpoint=False,
quota_project_id=quota_project_id,
)
@_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
def with_token_uri(self, token_uri):
# since the signer is already instantiated,
# the request is not needed
if self._use_metadata_identity_endpoint:
raise ValueError(
"If use_metadata_identity_endpoint is set, token_uri" " must not be set"
)
else:
return self.__class__(
None,
service_account_email=self._service_account_email,
token_uri=token_uri,
target_audience=self._target_audience,
additional_claims=self._additional_claims.copy(),
signer=self.signer,
use_metadata_identity_endpoint=False,
quota_project_id=self.quota_project_id,
)
def _make_authorization_grant_assertion(self):
"""Create the OAuth 2.0 assertion.
This assertion is used during the OAuth 2.0 grant to acquire an
ID token.
Returns:
bytes: The authorization grant assertion.
"""
now = _helpers.utcnow()
lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
expiry = now + lifetime
payload = {
"iat": _helpers.datetime_to_secs(now),
"exp": _helpers.datetime_to_secs(expiry),
# The issuer must be the service account email.
"iss": self.service_account_email,
# The audience must be the auth token endpoint's URI
"aud": self._token_uri,
# The target audience specifies which service the ID token is
# intended for.
"target_audience": self._target_audience,
}
payload.update(self._additional_claims)
token = jwt.encode(self._signer, payload)
return token
def _call_metadata_identity_endpoint(self, request):
"""Request ID token from metadata identity endpoint.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Returns:
Tuple[str, datetime.datetime]: The ID token and the expiry of the ID token.
Raises:
google.auth.exceptions.RefreshError: If the Compute Engine metadata
service can't be reached or if the instance has no credentials.
ValueError: If extracting expiry from the obtained ID token fails.
"""
try:
path = "instance/service-accounts/default/identity"
params = {"audience": self._target_audience, "format": "full"}
metrics_header = {
metrics.API_CLIENT_HEADER: metrics.token_request_id_token_mds()
}
id_token = _metadata.get(
request, path, params=params, headers=metrics_header
)
except exceptions.TransportError as caught_exc:
new_exc = exceptions.RefreshError(caught_exc)
raise new_exc from caught_exc
_, payload, _, _ = jwt._unverified_decode(id_token)
return id_token, _helpers.utcfromtimestamp(payload["exp"])
def refresh(self, request):
"""Refreshes the ID token.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the credentials could
not be refreshed.
ValueError: If extracting expiry from the obtained ID token fails.
"""
if self._use_metadata_identity_endpoint:
self.token, self.expiry = self._call_metadata_identity_endpoint(request)
else:
assertion = self._make_authorization_grant_assertion()
access_token, expiry, _ = _client.id_token_jwt_grant(
request, self._token_uri, assertion
)
self.token = access_token
self.expiry = expiry
@property # type: ignore
@_helpers.copy_docstring(credentials.Signing)
def signer(self):
return self._signer
def sign_bytes(self, message):
"""Signs the given message.
Args:
message (bytes): The message to sign.
Returns:
bytes: The message's cryptographic signature.
Raises:
ValueError:
Signer is not available if metadata identity endpoint is used.
"""
if self._use_metadata_identity_endpoint:
raise exceptions.InvalidOperation(
"Signer is not available if metadata identity endpoint is used"
)
return self._signer.sign(message)
@property
def service_account_email(self):
"""The service account email."""
return self._service_account_email
@property
def signer_email(self):
return self._service_account_email

View File

@@ -0,0 +1,667 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interfaces for credentials."""
import abc
from enum import Enum
import logging
import os
from typing import List
from google.auth import _helpers, environment_vars
from google.auth import exceptions
from google.auth import metrics
from google.auth._credentials_base import _BaseCredentials
from google.auth._refresh_worker import RefreshThreadManager
DEFAULT_UNIVERSE_DOMAIN = "googleapis.com"
NO_OP_TRUST_BOUNDARY_LOCATIONS: List[str] = []
NO_OP_TRUST_BOUNDARY_ENCODED_LOCATIONS = "0x0"
_LOGGER = logging.getLogger("google.auth._default")
class Credentials(_BaseCredentials):
"""Base class for all credentials.
All credentials have a :attr:`token` that is used for authentication and
may also optionally set an :attr:`expiry` to indicate when the token will
no longer be valid.
Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
Credentials can do this automatically before the first HTTP request in
:meth:`before_request`.
Although the token and expiration will change as the credentials are
:meth:`refreshed <refresh>` and used, credentials should be considered
immutable. Various credentials will accept configuration such as private
keys, scopes, and other options. These options are not changeable after
construction. Some classes will provide mechanisms to copy the credentials
with modifications such as :meth:`ScopedCredentials.with_scopes`.
"""
def __init__(self):
super(Credentials, self).__init__()
self.expiry = None
"""Optional[datetime]: When the token expires and is no longer valid.
If this is None, the token is assumed to never expire."""
self._quota_project_id = None
"""Optional[str]: Project to use for quota and billing purposes."""
self._trust_boundary = None
"""Optional[dict]: Cache of a trust boundary response which has a list
of allowed regions and an encoded string representation of credentials
trust boundary."""
self._universe_domain = DEFAULT_UNIVERSE_DOMAIN
"""Optional[str]: The universe domain value, default is googleapis.com
"""
self._use_non_blocking_refresh = False
self._refresh_worker = RefreshThreadManager()
@property
def expired(self):
"""Checks if the credentials are expired.
Note that credentials can be invalid but not expired because
Credentials with :attr:`expiry` set to None is considered to never
expire.
.. deprecated:: v2.24.0
Prefer checking :attr:`token_state` instead.
"""
if not self.expiry:
return False
# Remove some threshold from expiry to err on the side of reporting
# expiration early so that we avoid the 401-refresh-retry loop.
skewed_expiry = self.expiry - _helpers.REFRESH_THRESHOLD
return _helpers.utcnow() >= skewed_expiry
@property
def valid(self):
"""Checks the validity of the credentials.
This is True if the credentials have a :attr:`token` and the token
is not :attr:`expired`.
.. deprecated:: v2.24.0
Prefer checking :attr:`token_state` instead.
"""
return self.token is not None and not self.expired
@property
def token_state(self):
"""
See `:obj:`TokenState`
"""
if self.token is None:
return TokenState.INVALID
# Credentials that can't expire are always treated as fresh.
if self.expiry is None:
return TokenState.FRESH
expired = _helpers.utcnow() >= self.expiry
if expired:
return TokenState.INVALID
is_stale = _helpers.utcnow() >= (self.expiry - _helpers.REFRESH_THRESHOLD)
if is_stale:
return TokenState.STALE
return TokenState.FRESH
@property
def quota_project_id(self):
"""Project to use for quota and billing purposes."""
return self._quota_project_id
@property
def universe_domain(self):
"""The universe domain value."""
return self._universe_domain
def get_cred_info(self):
"""The credential information JSON.
The credential information will be added to auth related error messages
by client library.
Returns:
Mapping[str, str]: The credential information JSON.
"""
return None
@abc.abstractmethod
def refresh(self, request):
"""Refreshes the access token.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the credentials could
not be refreshed.
"""
# pylint: disable=missing-raises-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Refresh must be implemented")
def _metric_header_for_usage(self):
"""The x-goog-api-client header for token usage metric.
This header will be added to the API service requests in before_request
method. For example, "cred-type/sa-jwt" means service account self
signed jwt access token is used in the API service request
authorization header. Children credentials classes need to override
this method to provide the header value, if the token usage metric is
needed.
Returns:
str: The x-goog-api-client header value.
"""
return None
def apply(self, headers, token=None):
"""Apply the token to the authentication header.
Args:
headers (Mapping): The HTTP request headers.
token (Optional[str]): If specified, overrides the current access
token.
"""
self._apply(headers, token)
if self.quota_project_id:
headers["x-goog-user-project"] = self.quota_project_id
def _blocking_refresh(self, request):
if not self.valid:
self.refresh(request)
def _non_blocking_refresh(self, request):
use_blocking_refresh_fallback = False
if self.token_state == TokenState.STALE:
use_blocking_refresh_fallback = not self._refresh_worker.start_refresh(
self, request
)
if self.token_state == TokenState.INVALID or use_blocking_refresh_fallback:
self.refresh(request)
# If the blocking refresh succeeds then we can clear the error info
# on the background refresh worker, and perform refreshes in a
# background thread.
self._refresh_worker.clear_error()
def before_request(self, request, method, url, headers):
"""Performs credential-specific before request logic.
Refreshes the credentials if necessary, then calls :meth:`apply` to
apply the token to the authentication header.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
method (str): The request's HTTP method or the RPC method being
invoked.
url (str): The request's URI or the RPC service's URI.
headers (Mapping): The request's headers.
"""
# pylint: disable=unused-argument
# (Subclasses may use these arguments to ascertain information about
# the http request.)
if self._use_non_blocking_refresh:
self._non_blocking_refresh(request)
else:
self._blocking_refresh(request)
metrics.add_metric_header(headers, self._metric_header_for_usage())
self.apply(headers)
def with_non_blocking_refresh(self):
self._use_non_blocking_refresh = True
class CredentialsWithQuotaProject(Credentials):
"""Abstract base for credentials supporting ``with_quota_project`` factory"""
def with_quota_project(self, quota_project_id):
"""Returns a copy of these credentials with a modified quota project.
Args:
quota_project_id (str): The project to use for quota and
billing purposes
Returns:
google.auth.credentials.Credentials: A new credentials instance.
"""
raise NotImplementedError("This credential does not support quota project.")
def with_quota_project_from_environment(self):
quota_from_env = os.environ.get(environment_vars.GOOGLE_CLOUD_QUOTA_PROJECT)
if quota_from_env:
return self.with_quota_project(quota_from_env)
return self
class CredentialsWithTokenUri(Credentials):
"""Abstract base for credentials supporting ``with_token_uri`` factory"""
def with_token_uri(self, token_uri):
"""Returns a copy of these credentials with a modified token uri.
Args:
token_uri (str): The uri to use for fetching/exchanging tokens
Returns:
google.auth.credentials.Credentials: A new credentials instance.
"""
raise NotImplementedError("This credential does not use token uri.")
class CredentialsWithUniverseDomain(Credentials):
"""Abstract base for credentials supporting ``with_universe_domain`` factory"""
def with_universe_domain(self, universe_domain):
"""Returns a copy of these credentials with a modified universe domain.
Args:
universe_domain (str): The universe domain to use
Returns:
google.auth.credentials.Credentials: A new credentials instance.
"""
raise NotImplementedError(
"This credential does not support with_universe_domain."
)
class CredentialsWithTrustBoundary(Credentials):
"""Abstract base for credentials supporting ``with_trust_boundary`` factory"""
@abc.abstractmethod
def _perform_refresh_token(self, request):
"""Refreshes the access token.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the credentials could
not be refreshed.
"""
raise NotImplementedError("_perform_refresh_token must be implemented")
def with_trust_boundary(self, trust_boundary):
"""Returns a copy of these credentials with a modified trust boundary.
Args:
trust_boundary Mapping[str, str]: The trust boundary to use for the
credential. This should be a map with a "locations" key that maps to
a list of GCP regions, and a "encodedLocations" key that maps to a
hex string.
Returns:
google.auth.credentials.Credentials: A new credentials instance.
"""
raise NotImplementedError("This credential does not support trust boundaries.")
def _is_trust_boundary_lookup_required(self):
"""Checks if a trust boundary lookup is required.
A lookup is required if the feature is enabled via an environment
variable, the universe domain is supported, and a no-op boundary
is not already cached.
Returns:
bool: True if a trust boundary lookup is required, False otherwise.
"""
# 1. Check if the feature is enabled via environment variable.
if not _helpers.get_bool_from_env(
environment_vars.GOOGLE_AUTH_TRUST_BOUNDARY_ENABLED, default=False
):
return False
# 2. Skip trust boundary flow for non-default universe domains.
if self.universe_domain != DEFAULT_UNIVERSE_DOMAIN:
return False
# 3. Do not trigger refresh if credential has a cached no-op trust boundary.
return not self._has_no_op_trust_boundary()
def _get_trust_boundary_header(self):
if self._trust_boundary is not None:
if self._has_no_op_trust_boundary():
# STS expects an empty string if the trust boundary value is no-op.
return {"x-allowed-locations": ""}
else:
return {"x-allowed-locations": self._trust_boundary["encodedLocations"]}
return {}
def apply(self, headers, token=None):
"""Apply the token to the authentication header."""
super().apply(headers, token)
headers.update(self._get_trust_boundary_header())
def refresh(self, request):
"""Refreshes the access token and the trust boundary.
This method calls the subclass's token refresh logic and then
refreshes the trust boundary if applicable.
"""
self._perform_refresh_token(request)
self._refresh_trust_boundary(request)
def _refresh_trust_boundary(self, request):
"""Triggers a refresh of the trust boundary and updates the cache if necessary.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the trust boundary could
not be refreshed and no cached value is available.
"""
if not self._is_trust_boundary_lookup_required():
return
try:
self._trust_boundary = self._lookup_trust_boundary(request)
except exceptions.RefreshError as error:
# If the call to the lookup API failed, check if there is a trust boundary
# already cached. If there is, do nothing. If not, then throw the error.
if self._trust_boundary is None:
raise error
if _helpers.is_logging_enabled(_LOGGER):
_LOGGER.debug(
"Using cached trust boundary due to refresh error: %s", error
)
return
def _lookup_trust_boundary(self, request):
"""Calls the trust boundary lookup API to refresh the trust boundary cache.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Returns:
trust_boundary (dict): The trust boundary object returned by the lookup API.
Raises:
google.auth.exceptions.RefreshError: If the trust boundary could not be
retrieved.
"""
from google.oauth2 import _client
url = self._build_trust_boundary_lookup_url()
if not url:
raise exceptions.InvalidValue("Failed to build trust boundary lookup URL.")
headers = {}
self._apply(headers)
headers.update(self._get_trust_boundary_header())
return _client._lookup_trust_boundary(request, url, headers=headers)
@abc.abstractmethod
def _build_trust_boundary_lookup_url(self):
"""
Builds and returns the URL for the trust boundary lookup API.
This method should be implemented by subclasses to provide the
specific URL based on the credential type and its properties.
Returns:
str: The URL for the trust boundary lookup endpoint, or None
if lookup should be skipped (e.g., for non-applicable universe domains).
"""
raise NotImplementedError(
"_build_trust_boundary_lookup_url must be implemented"
)
def _has_no_op_trust_boundary(self):
# A no-op trust boundary is indicated by encodedLocations being "0x0".
# The "locations" list may or may not be present as an empty list.
if self._trust_boundary is None:
return False
return (
self._trust_boundary.get("encodedLocations")
== NO_OP_TRUST_BOUNDARY_ENCODED_LOCATIONS
)
class AnonymousCredentials(Credentials):
"""Credentials that do not provide any authentication information.
These are useful in the case of services that support anonymous access or
local service emulators that do not use credentials.
"""
@property
def expired(self):
"""Returns `False`, anonymous credentials never expire."""
return False
@property
def valid(self):
"""Returns `True`, anonymous credentials are always valid."""
return True
def refresh(self, request):
"""Raises :class:``InvalidOperation``, anonymous credentials cannot be
refreshed."""
raise exceptions.InvalidOperation("Anonymous credentials cannot be refreshed.")
def apply(self, headers, token=None):
"""Anonymous credentials do nothing to the request.
The optional ``token`` argument is not supported.
Raises:
google.auth.exceptions.InvalidValue: If a token was specified.
"""
if token is not None:
raise exceptions.InvalidValue("Anonymous credentials don't support tokens.")
def before_request(self, request, method, url, headers):
"""Anonymous credentials do nothing to the request."""
class ReadOnlyScoped(metaclass=abc.ABCMeta):
"""Interface for credentials whose scopes can be queried.
OAuth 2.0-based credentials allow limiting access using scopes as described
in `RFC6749 Section 3.3`_.
If a credential class implements this interface then the credentials either
use scopes in their implementation.
Some credentials require scopes in order to obtain a token. You can check
if scoping is necessary with :attr:`requires_scopes`::
if credentials.requires_scopes:
# Scoping is required.
credentials = credentials.with_scopes(scopes=['one', 'two'])
Credentials that require scopes must either be constructed with scopes::
credentials = SomeScopedCredentials(scopes=['one', 'two'])
Or must copy an existing instance using :meth:`with_scopes`::
scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
Some credentials have scopes but do not allow or require scopes to be set,
these credentials can be used as-is.
.. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
"""
def __init__(self):
super(ReadOnlyScoped, self).__init__()
self._scopes = None
self._default_scopes = None
@property
def scopes(self):
"""Sequence[str]: the credentials' current set of scopes."""
return self._scopes
@property
def default_scopes(self):
"""Sequence[str]: the credentials' current set of default scopes."""
return self._default_scopes
@abc.abstractproperty
def requires_scopes(self):
"""True if these credentials require scopes to obtain an access token."""
return False
def has_scopes(self, scopes):
"""Checks if the credentials have the given scopes.
.. warning: This method is not guaranteed to be accurate if the
credentials are :attr:`~Credentials.invalid`.
Args:
scopes (Sequence[str]): The list of scopes to check.
Returns:
bool: True if the credentials have the given scopes.
"""
credential_scopes = (
self._scopes if self._scopes is not None else self._default_scopes
)
return set(scopes).issubset(set(credential_scopes or []))
class Scoped(ReadOnlyScoped):
"""Interface for credentials whose scopes can be replaced while copying.
OAuth 2.0-based credentials allow limiting access using scopes as described
in `RFC6749 Section 3.3`_.
If a credential class implements this interface then the credentials either
use scopes in their implementation.
Some credentials require scopes in order to obtain a token. You can check
if scoping is necessary with :attr:`requires_scopes`::
if credentials.requires_scopes:
# Scoping is required.
credentials = credentials.create_scoped(['one', 'two'])
Credentials that require scopes must either be constructed with scopes::
credentials = SomeScopedCredentials(scopes=['one', 'two'])
Or must copy an existing instance using :meth:`with_scopes`::
scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
Some credentials have scopes but do not allow or require scopes to be set,
these credentials can be used as-is.
.. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
"""
@abc.abstractmethod
def with_scopes(self, scopes, default_scopes=None):
"""Create a copy of these credentials with the specified scopes.
Args:
scopes (Sequence[str]): The list of scopes to attach to the
current credentials.
Raises:
NotImplementedError: If the credentials' scopes can not be changed.
This can be avoided by checking :attr:`requires_scopes` before
calling this method.
"""
raise NotImplementedError("This class does not require scoping.")
def with_scopes_if_required(credentials, scopes, default_scopes=None):
"""Creates a copy of the credentials with scopes if scoping is required.
This helper function is useful when you do not know (or care to know) the
specific type of credentials you are using (such as when you use
:func:`google.auth.default`). This function will call
:meth:`Scoped.with_scopes` if the credentials are scoped credentials and if
the credentials require scoping. Otherwise, it will return the credentials
as-is.
Args:
credentials (google.auth.credentials.Credentials): The credentials to
scope if necessary.
scopes (Sequence[str]): The list of scopes to use.
default_scopes (Sequence[str]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
Returns:
google.auth.credentials.Credentials: Either a new set of scoped
credentials, or the passed in credentials instance if no scoping
was required.
"""
if isinstance(credentials, Scoped) and credentials.requires_scopes:
return credentials.with_scopes(scopes, default_scopes=default_scopes)
else:
return credentials
class Signing(metaclass=abc.ABCMeta):
"""Interface for credentials that can cryptographically sign messages."""
@abc.abstractmethod
def sign_bytes(self, message):
"""Signs the given message.
Args:
message (bytes): The message to sign.
Returns:
bytes: The message's cryptographic signature.
"""
# pylint: disable=missing-raises-doc,redundant-returns-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Sign bytes must be implemented.")
@abc.abstractproperty
def signer_email(self):
"""Optional[str]: An email address that identifies the signer."""
# pylint: disable=missing-raises-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Signer email must be implemented.")
@abc.abstractproperty
def signer(self):
"""google.auth.crypt.Signer: The signer used to sign bytes."""
# pylint: disable=missing-raises-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Signer must be implemented.")
class TokenState(Enum):
"""
Tracks the state of a token.
FRESH: The token is valid. It is not expired or close to expired, or the token has no expiry.
STALE: The token is close to expired, and should be refreshed. The token can be used normally.
INVALID: The token is expired or invalid. The token cannot be used for a normal operation.
"""
FRESH = 1
STALE = 2
INVALID = 3

View File

@@ -0,0 +1,96 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cryptography helpers for verifying and signing messages.
The simplest way to verify signatures is using :func:`verify_signature`::
cert = open('certs.pem').read()
valid = crypt.verify_signature(message, signature, cert)
If you're going to verify many messages with the same certificate, you can use
:class:`RSAVerifier`::
cert = open('certs.pem').read()
verifier = crypt.RSAVerifier.from_string(cert)
valid = verifier.verify(message, signature)
To sign messages use :class:`RSASigner` with a private key::
private_key = open('private_key.pem').read()
signer = crypt.RSASigner.from_string(private_key)
signature = signer.sign(message)
The code above also works for :class:`ES256Signer` and :class:`ES256Verifier`.
Note that these two classes are only available if your `cryptography` dependency
version is at least 1.4.0.
"""
from google.auth.crypt import base
from google.auth.crypt import es
from google.auth.crypt import es256
from google.auth.crypt import rsa
EsSigner = es.EsSigner
EsVerifier = es.EsVerifier
ES256Signer = es256.ES256Signer
ES256Verifier = es256.ES256Verifier
# Aliases to maintain the v1.0.0 interface, as the crypt module was split
# into submodules.
Signer = base.Signer
Verifier = base.Verifier
RSASigner = rsa.RSASigner
RSAVerifier = rsa.RSAVerifier
def verify_signature(message, signature, certs, verifier_cls=rsa.RSAVerifier):
"""Verify an RSA or ECDSA cryptographic signature.
Checks that the provided ``signature`` was generated from ``bytes`` using
the private key associated with the ``cert``.
Args:
message (Union[str, bytes]): The plaintext message.
signature (Union[str, bytes]): The cryptographic signature to check.
certs (Union[Sequence, str, bytes]): The certificate or certificates
to use to check the signature.
verifier_cls (Optional[~google.auth.crypt.base.Signer]): Which verifier
class to use for verification. This can be used to select different
algorithms, such as RSA or ECDSA. Default value is :class:`RSAVerifier`.
Returns:
bool: True if the signature is valid, otherwise False.
"""
if isinstance(certs, (str, bytes)):
certs = [certs]
for cert in certs:
verifier = verifier_cls.from_string(cert)
if verifier.verify(message, signature):
return True
return False
__all__ = [
"EsSigner",
"EsVerifier",
"ES256Signer",
"ES256Verifier",
"RSASigner",
"RSAVerifier",
"Signer",
"Verifier",
]

View File

@@ -0,0 +1,151 @@
# Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""RSA verifier and signer that use the ``cryptography`` library.
This is a much faster implementation than the default (in
``google.auth.crypt._python_rsa``), which depends on the pure-Python
``rsa`` library.
"""
import cryptography.exceptions
from cryptography.hazmat import backends
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
import cryptography.x509
from google.auth import _helpers
from google.auth.crypt import base
_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
_BACKEND = backends.default_backend()
_PADDING = padding.PKCS1v15()
_SHA256 = hashes.SHA256()
class RSAVerifier(base.Verifier):
"""Verifies RSA cryptographic signatures using public keys.
Args:
public_key (
cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey):
The public key used to verify signatures.
"""
def __init__(self, public_key):
self._pubkey = public_key
@_helpers.copy_docstring(base.Verifier)
def verify(self, message, signature):
message = _helpers.to_bytes(message)
try:
self._pubkey.verify(signature, message, _PADDING, _SHA256)
return True
except (ValueError, cryptography.exceptions.InvalidSignature):
return False
@classmethod
def from_string(cls, public_key):
"""Construct an Verifier instance from a public key or public
certificate string.
Args:
public_key (Union[str, bytes]): The public key in PEM format or the
x509 public key certificate.
Returns:
Verifier: The constructed verifier.
Raises:
ValueError: If the public key can't be parsed.
"""
public_key_data = _helpers.to_bytes(public_key)
if _CERTIFICATE_MARKER in public_key_data:
cert = cryptography.x509.load_pem_x509_certificate(
public_key_data, _BACKEND
)
pubkey = cert.public_key()
else:
pubkey = serialization.load_pem_public_key(public_key_data, _BACKEND)
return cls(pubkey)
class RSASigner(base.Signer, base.FromServiceAccountMixin):
"""Signs messages with an RSA private key.
Args:
private_key (
cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey):
The private key to sign with.
key_id (str): Optional key ID used to identify this private key. This
can be useful to associate the private key with its associated
public key or certificate.
"""
def __init__(self, private_key, key_id=None):
self._key = private_key
self._key_id = key_id
@property # type: ignore
@_helpers.copy_docstring(base.Signer)
def key_id(self):
return self._key_id
@_helpers.copy_docstring(base.Signer)
def sign(self, message):
message = _helpers.to_bytes(message)
return self._key.sign(message, _PADDING, _SHA256)
@classmethod
def from_string(cls, key, key_id=None):
"""Construct a RSASigner from a private key in PEM format.
Args:
key (Union[bytes, str]): Private key in PEM format.
key_id (str): An optional key id used to identify the private key.
Returns:
google.auth.crypt._cryptography_rsa.RSASigner: The
constructed signer.
Raises:
ValueError: If ``key`` is not ``bytes`` or ``str`` (unicode).
UnicodeDecodeError: If ``key`` is ``bytes`` but cannot be decoded
into a UTF-8 ``str``.
ValueError: If ``cryptography`` "Could not deserialize key data."
"""
key = _helpers.to_bytes(key)
private_key = serialization.load_pem_private_key(
key, password=None, backend=_BACKEND
)
return cls(private_key, key_id=key_id)
def __getstate__(self):
"""Pickle helper that serializes the _key attribute."""
state = self.__dict__.copy()
state["_key"] = self._key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
)
return state
def __setstate__(self, state):
"""Pickle helper that deserializes the _key attribute."""
state["_key"] = serialization.load_pem_private_key(state["_key"], None)
self.__dict__.update(state)

View File

@@ -0,0 +1,199 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pure-Python RSA cryptography implementation.
Uses the ``rsa``, ``pyasn1`` and ``pyasn1_modules`` packages
to parse PEM files storing PKCS#1 or PKCS#8 keys as well as
certificates. There is no support for p12 files.
"""
from __future__ import absolute_import
import io
import warnings
from pyasn1.codec.der import decoder # type: ignore
from pyasn1_modules import pem # type: ignore
from pyasn1_modules.rfc2459 import Certificate # type: ignore
from pyasn1_modules.rfc5208 import PrivateKeyInfo # type: ignore
import rsa # type: ignore
from google.auth import _helpers
from google.auth import exceptions
from google.auth.crypt import base
_POW2 = (128, 64, 32, 16, 8, 4, 2, 1)
_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
_PKCS1_MARKER = ("-----BEGIN RSA PRIVATE KEY-----", "-----END RSA PRIVATE KEY-----")
_PKCS8_MARKER = ("-----BEGIN PRIVATE KEY-----", "-----END PRIVATE KEY-----")
_PKCS8_SPEC = PrivateKeyInfo()
_warning_msg = (
"The 'rsa' library is deprecated and will be removed in a future release. "
"Please migrate to 'cryptography'."
)
def _bit_list_to_bytes(bit_list):
"""Converts an iterable of 1s and 0s to bytes.
Combines the list 8 at a time, treating each group of 8 bits
as a single byte.
Args:
bit_list (Sequence): Sequence of 1s and 0s.
Returns:
bytes: The decoded bytes.
"""
num_bits = len(bit_list)
byte_vals = bytearray()
for start in range(0, num_bits, 8):
curr_bits = bit_list[start : start + 8]
char_val = sum(val * digit for val, digit in zip(_POW2, curr_bits))
byte_vals.append(char_val)
return bytes(byte_vals)
class RSAVerifier(base.Verifier):
"""Verifies RSA cryptographic signatures using public keys.
.. deprecated::
The `rsa` library has been archived. Please migrate to
`cryptography`.
Args:
public_key (rsa.key.PublicKey): The public key used to verify
signatures.
"""
def __init__(self, public_key):
warnings.warn(
_warning_msg,
category=DeprecationWarning,
stacklevel=2,
)
self._pubkey = public_key
@_helpers.copy_docstring(base.Verifier)
def verify(self, message, signature):
message = _helpers.to_bytes(message)
try:
return rsa.pkcs1.verify(message, signature, self._pubkey)
except (ValueError, rsa.pkcs1.VerificationError):
return False
@classmethod
def from_string(cls, public_key):
"""Construct an Verifier instance from a public key or public
certificate string.
Args:
public_key (Union[str, bytes]): The public key in PEM format or the
x509 public key certificate.
Returns:
google.auth.crypt._python_rsa.RSAVerifier: The constructed verifier.
Raises:
ValueError: If the public_key can't be parsed.
"""
public_key = _helpers.to_bytes(public_key)
is_x509_cert = _CERTIFICATE_MARKER in public_key
# If this is a certificate, extract the public key info.
if is_x509_cert:
der = rsa.pem.load_pem(public_key, "CERTIFICATE")
asn1_cert, remaining = decoder.decode(der, asn1Spec=Certificate())
if remaining != b"":
raise exceptions.InvalidValue("Unused bytes", remaining)
cert_info = asn1_cert["tbsCertificate"]["subjectPublicKeyInfo"]
key_bytes = _bit_list_to_bytes(cert_info["subjectPublicKey"])
pubkey = rsa.PublicKey.load_pkcs1(key_bytes, "DER")
else:
pubkey = rsa.PublicKey.load_pkcs1(public_key, "PEM")
return cls(pubkey)
class RSASigner(base.Signer, base.FromServiceAccountMixin):
"""Signs messages with an RSA private key.
.. deprecated::
The `rsa` library has been archived. Please migrate to
`cryptography`.
Args:
private_key (rsa.key.PrivateKey): The private key to sign with.
key_id (str): Optional key ID used to identify this private key. This
can be useful to associate the private key with its associated
public key or certificate.
"""
def __init__(self, private_key, key_id=None):
warnings.warn(
_warning_msg,
category=DeprecationWarning,
stacklevel=2,
)
self._key = private_key
self._key_id = key_id
@property # type: ignore
@_helpers.copy_docstring(base.Signer)
def key_id(self):
return self._key_id
@_helpers.copy_docstring(base.Signer)
def sign(self, message):
message = _helpers.to_bytes(message)
return rsa.pkcs1.sign(message, self._key, "SHA-256")
@classmethod
def from_string(cls, key, key_id=None):
"""Construct an Signer instance from a private key in PEM format.
Args:
key (str): Private key in PEM format.
key_id (str): An optional key id used to identify the private key.
Returns:
google.auth.crypt.Signer: The constructed signer.
Raises:
ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in
PEM format.
"""
key = _helpers.from_bytes(key) # PEM expects str in Python 3
marker_id, key_bytes = pem.readPemBlocksFromFile(
io.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER
)
# Key is in pkcs1 format.
if marker_id == 0:
private_key = rsa.key.PrivateKey.load_pkcs1(key_bytes, format="DER")
# Key is in pkcs8.
elif marker_id == 1:
key_info, remaining = decoder.decode(key_bytes, asn1Spec=_PKCS8_SPEC)
if remaining != b"":
raise exceptions.InvalidValue("Unused bytes", remaining)
private_key_info = key_info.getComponentByName("privateKey")
private_key = rsa.key.PrivateKey.load_pkcs1(
private_key_info.asOctets(), format="DER"
)
else:
raise exceptions.MalformedError("No key could be detected.")
return cls(private_key, key_id=key_id)

View File

@@ -0,0 +1,127 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base classes for cryptographic signers and verifiers."""
import abc
import io
import json
from google.auth import exceptions
_JSON_FILE_PRIVATE_KEY = "private_key"
_JSON_FILE_PRIVATE_KEY_ID = "private_key_id"
class Verifier(metaclass=abc.ABCMeta):
"""Abstract base class for crytographic signature verifiers."""
@abc.abstractmethod
def verify(self, message, signature):
"""Verifies a message against a cryptographic signature.
Args:
message (Union[str, bytes]): The message to verify.
signature (Union[str, bytes]): The cryptography signature to check.
Returns:
bool: True if message was signed by the private key associated
with the public key that this object was constructed with.
"""
# pylint: disable=missing-raises-doc,redundant-returns-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Verify must be implemented")
class Signer(metaclass=abc.ABCMeta):
"""Abstract base class for cryptographic signers."""
@abc.abstractproperty
def key_id(self):
"""Optional[str]: The key ID used to identify this private key."""
raise NotImplementedError("Key id must be implemented")
@abc.abstractmethod
def sign(self, message):
"""Signs a message.
Args:
message (Union[str, bytes]): The message to be signed.
Returns:
bytes: The signature of the message.
"""
# pylint: disable=missing-raises-doc,redundant-returns-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Sign must be implemented")
class FromServiceAccountMixin(metaclass=abc.ABCMeta):
"""Mix-in to enable factory constructors for a Signer."""
@abc.abstractmethod
def from_string(cls, key, key_id=None):
"""Construct an Signer instance from a private key string.
Args:
key (str): Private key as a string.
key_id (str): An optional key id used to identify the private key.
Returns:
google.auth.crypt.Signer: The constructed signer.
Raises:
ValueError: If the key cannot be parsed.
"""
raise NotImplementedError("from_string must be implemented")
@classmethod
def from_service_account_info(cls, info):
"""Creates a Signer instance instance from a dictionary containing
service account info in Google format.
Args:
info (Mapping[str, str]): The service account info in Google
format.
Returns:
google.auth.crypt.Signer: The constructed signer.
Raises:
ValueError: If the info is not in the expected format.
"""
if _JSON_FILE_PRIVATE_KEY not in info:
raise exceptions.MalformedError(
"The private_key field was not found in the service account " "info."
)
return cls.from_string(
info[_JSON_FILE_PRIVATE_KEY], info.get(_JSON_FILE_PRIVATE_KEY_ID)
)
@classmethod
def from_service_account_file(cls, filename):
"""Creates a Signer instance from a service account .json file
in Google format.
Args:
filename (str): The path to the service account .json file.
Returns:
google.auth.crypt.Signer: The constructed signer.
"""
with io.open(filename, "r", encoding="utf-8") as json_file:
data = json.load(json_file)
return cls.from_service_account_info(data)

View File

@@ -0,0 +1,221 @@
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ECDSA verifier and signer that use the ``cryptography`` library.
"""
from dataclasses import dataclass
from typing import Any, Dict, Optional, Union
import cryptography.exceptions
from cryptography.hazmat import backends
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature
from cryptography.hazmat.primitives.asymmetric.utils import encode_dss_signature
import cryptography.x509
from google.auth import _helpers
from google.auth.crypt import base
_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
_BACKEND = backends.default_backend()
_PADDING = padding.PKCS1v15()
@dataclass
class _ESAttributes:
"""A class that models ECDSA attributes.
Attributes:
rs_size (int): Size for ASN.1 r and s size.
sha_algo (hashes.HashAlgorithm): Hash algorithm.
algorithm (str): Algorithm name.
"""
rs_size: int
sha_algo: hashes.HashAlgorithm
algorithm: str
@classmethod
def from_key(
cls, key: Union[ec.EllipticCurvePublicKey, ec.EllipticCurvePrivateKey]
):
return cls.from_curve(key.curve)
@classmethod
def from_curve(cls, curve: ec.EllipticCurve):
# ECDSA raw signature has (r||s) format where r,s are two
# integers of size 32 bytes for P-256 curve and 48 bytes
# for P-384 curve. For P-256 curve, we use SHA256 hash algo,
# and for P-384 curve we use SHA384 algo.
if isinstance(curve, ec.SECP384R1):
return cls(48, hashes.SHA384(), "ES384")
else:
# default to ES256
return cls(32, hashes.SHA256(), "ES256")
class EsVerifier(base.Verifier):
"""Verifies ECDSA cryptographic signatures using public keys.
Args:
public_key (
cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey):
The public key used to verify signatures.
"""
def __init__(self, public_key: ec.EllipticCurvePublicKey) -> None:
self._pubkey = public_key
self._attributes = _ESAttributes.from_key(public_key)
@_helpers.copy_docstring(base.Verifier)
def verify(self, message: bytes, signature: bytes) -> bool:
# First convert (r||s) raw signature to ASN1 encoded signature.
sig_bytes = _helpers.to_bytes(signature)
if len(sig_bytes) != self._attributes.rs_size * 2:
return False
r = int.from_bytes(sig_bytes[: self._attributes.rs_size], byteorder="big")
s = int.from_bytes(sig_bytes[self._attributes.rs_size :], byteorder="big")
asn1_sig = encode_dss_signature(r, s)
message = _helpers.to_bytes(message)
try:
self._pubkey.verify(asn1_sig, message, ec.ECDSA(self._attributes.sha_algo))
return True
except (ValueError, cryptography.exceptions.InvalidSignature):
return False
@classmethod
def from_string(cls, public_key: Union[str, bytes]) -> "EsVerifier":
"""Construct a Verifier instance from a public key or public
certificate string.
Args:
public_key (Union[str, bytes]): The public key in PEM format or the
x509 public key certificate.
Returns:
google.auth.crypt.Verifier: The constructed verifier.
Raises:
ValueError: If the public key can't be parsed.
"""
public_key_data = _helpers.to_bytes(public_key)
if _CERTIFICATE_MARKER in public_key_data:
cert = cryptography.x509.load_pem_x509_certificate(
public_key_data, _BACKEND
)
pubkey = cert.public_key() # type: Any
else:
pubkey = serialization.load_pem_public_key(public_key_data, _BACKEND)
if not isinstance(pubkey, ec.EllipticCurvePublicKey):
raise TypeError("Expected public key of type EllipticCurvePublicKey")
return cls(pubkey)
class EsSigner(base.Signer, base.FromServiceAccountMixin):
"""Signs messages with an ECDSA private key.
Args:
private_key (
cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey):
The private key to sign with.
key_id (str): Optional key ID used to identify this private key. This
can be useful to associate the private key with its associated
public key or certificate.
"""
def __init__(
self, private_key: ec.EllipticCurvePrivateKey, key_id: Optional[str] = None
) -> None:
self._key = private_key
self._key_id = key_id
self._attributes = _ESAttributes.from_key(private_key)
@property
def algorithm(self) -> str:
"""Name of the algorithm used to sign messages.
Returns:
str: The algorithm name.
"""
return self._attributes.algorithm
@property # type: ignore
@_helpers.copy_docstring(base.Signer)
def key_id(self) -> Optional[str]:
return self._key_id
@_helpers.copy_docstring(base.Signer)
def sign(self, message: bytes) -> bytes:
message = _helpers.to_bytes(message)
asn1_signature = self._key.sign(message, ec.ECDSA(self._attributes.sha_algo))
# Convert ASN1 encoded signature to (r||s) raw signature.
(r, s) = decode_dss_signature(asn1_signature)
return r.to_bytes(self._attributes.rs_size, byteorder="big") + s.to_bytes(
self._attributes.rs_size, byteorder="big"
)
@classmethod
def from_string(
cls, key: Union[bytes, str], key_id: Optional[str] = None
) -> "EsSigner":
"""Construct a RSASigner from a private key in PEM format.
Args:
key (Union[bytes, str]): Private key in PEM format.
key_id (str): An optional key id used to identify the private key.
Returns:
google.auth.crypt._cryptography_rsa.RSASigner: The
constructed signer.
Raises:
ValueError: If ``key`` is not ``bytes`` or ``str`` (unicode).
UnicodeDecodeError: If ``key`` is ``bytes`` but cannot be decoded
into a UTF-8 ``str``.
ValueError: If ``cryptography`` "Could not deserialize key data."
"""
key_bytes = _helpers.to_bytes(key)
private_key = serialization.load_pem_private_key(
key_bytes, password=None, backend=_BACKEND
)
if not isinstance(private_key, ec.EllipticCurvePrivateKey):
raise TypeError("Expected private key of type EllipticCurvePrivateKey")
return cls(private_key, key_id=key_id)
def __getstate__(self) -> Dict[str, Any]:
"""Pickle helper that serializes the _key attribute."""
state = self.__dict__.copy()
state["_key"] = self._key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
)
return state
def __setstate__(self, state: Dict[str, Any]) -> None:
"""Pickle helper that deserializes the _key attribute."""
state["_key"] = serialization.load_pem_private_key(state["_key"], None)
self.__dict__.update(state)

View File

@@ -0,0 +1,45 @@
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ECDSA (ES256) verifier and signer that use the ``cryptography`` library.
"""
from google.auth.crypt.es import EsSigner
from google.auth.crypt.es import EsVerifier
class ES256Verifier(EsVerifier):
"""Verifies ECDSA cryptographic signatures using public keys.
Args:
public_key (cryptography.hazmat.primitives.asymmetric.ec.ECDSAPublicKey): The public key used to verify
signatures.
"""
pass
class ES256Signer(EsSigner):
"""Signs messages with an ECDSA private key.
Args:
private_key (
cryptography.hazmat.primitives.asymmetric.ec.ECDSAPrivateKey):
The private key to sign with.
key_id (str): Optional key ID used to identify this private key. This
can be useful to associate the private key with its associated
public key or certificate.
"""
pass

View File

@@ -0,0 +1,127 @@
# Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
RSA cryptography signer and verifier.
This file provides a shared wrapper, that defers to _python_rsa or _cryptography_rsa
for implmentations using different third party libraries
"""
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
from google.auth import _helpers
from google.auth.crypt import _cryptography_rsa
from google.auth.crypt import _python_rsa
from google.auth.crypt import base
RSA_KEY_MODULE_PREFIX = "rsa.key"
class RSAVerifier(base.Verifier):
"""Verifies RSA cryptographic signatures using public keys.
Args:
public_key (Union["rsa.key.PublicKey", cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey]):
The public key used to verify signatures.
Raises:
ValueError: if an unrecognized public key is provided
"""
def __init__(self, public_key):
module_str = public_key.__class__.__module__
if isinstance(public_key, RSAPublicKey):
impl_lib = _cryptography_rsa
elif module_str.startswith(RSA_KEY_MODULE_PREFIX):
impl_lib = _python_rsa
else:
raise ValueError(f"unrecognized public key type: {type(public_key)}")
self._impl = impl_lib.RSAVerifier(public_key)
@_helpers.copy_docstring(base.Verifier)
def verify(self, message, signature):
return self._impl.verify(message, signature)
@classmethod
def from_string(cls, public_key):
"""Construct a Verifier instance from a public key or public
certificate string.
Args:
public_key (Union[str, bytes]): The public key in PEM format or the
x509 public key certificate.
Returns:
google.auth.crypt.Verifier: The constructed verifier.
Raises:
ValueError: If the public_key can't be parsed.
"""
instance = cls.__new__(cls)
instance._impl = _cryptography_rsa.RSAVerifier.from_string(public_key)
return instance
class RSASigner(base.Signer, base.FromServiceAccountMixin):
"""Signs messages with an RSA private key.
Args:
private_key (Union["rsa.key.PrivateKey", cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey]):
The private key to sign with.
key_id (str): Optional key ID used to identify this private key. This
can be useful to associate the private key with its associated
public key or certificate.
Raises:
ValueError: if an unrecognized public key is provided
"""
def __init__(self, private_key, key_id=None):
module_str = private_key.__class__.__module__
if isinstance(private_key, RSAPrivateKey):
impl_lib = _cryptography_rsa
elif module_str.startswith(RSA_KEY_MODULE_PREFIX):
impl_lib = _python_rsa
else:
raise ValueError(f"unrecognized private key type: {type(private_key)}")
self._impl = impl_lib.RSASigner(private_key, key_id=key_id)
@property # type: ignore
@_helpers.copy_docstring(base.Signer)
def key_id(self):
return self._impl.key_id
@_helpers.copy_docstring(base.Signer)
def sign(self, message):
return self._impl.sign(message)
@classmethod
def from_string(cls, key, key_id=None):
"""Construct a Signer instance from a private key in PEM format.
Args:
key (str): Private key in PEM format.
key_id (str): An optional key id used to identify the private key.
Returns:
google.auth.crypt.Signer: The constructed signer.
Raises:
ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in
PEM format.
"""
instance = cls.__new__(cls)
instance._impl = _cryptography_rsa.RSASigner.from_string(key, key_id=key_id)
return instance

View File

@@ -0,0 +1,512 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Downscoping with Credential Access Boundaries
This module provides the ability to downscope credentials using
`Downscoping with Credential Access Boundaries`_. This is useful to restrict the
Identity and Access Management (IAM) permissions that a short-lived credential
can use.
To downscope permissions of a source credential, a Credential Access Boundary
that specifies which resources the new credential can access, as well as
an upper bound on the permissions that are available on each resource, has to
be defined. A downscoped credential can then be instantiated using the source
credential and the Credential Access Boundary.
The common pattern of usage is to have a token broker with elevated access
generate these downscoped credentials from higher access source credentials and
pass the downscoped short-lived access tokens to a token consumer via some
secure authenticated channel for limited access to Google Cloud Storage
resources.
For example, a token broker can be set up on a server in a private network.
Various workloads (token consumers) in the same network will send authenticated
requests to that broker for downscoped tokens to access or modify specific google
cloud storage buckets.
The broker will instantiate downscoped credentials instances that can be used to
generate short lived downscoped access tokens that can be passed to the token
consumer. These downscoped access tokens can be injected by the consumer into
google.oauth2.Credentials and used to initialize a storage client instance to
access Google Cloud Storage resources with restricted access.
Note: Only Cloud Storage supports Credential Access Boundaries. Other Google
Cloud services do not support this feature.
.. _Downscoping with Credential Access Boundaries: https://cloud.google.com/iam/docs/downscoping-short-lived-credentials
"""
import datetime
from google.auth import _helpers
from google.auth import credentials
from google.auth import exceptions
from google.oauth2 import sts
# The maximum number of access boundary rules a Credential Access Boundary can
# contain.
_MAX_ACCESS_BOUNDARY_RULES_COUNT = 10
# The token exchange grant_type used for exchanging credentials.
_STS_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:token-exchange"
# The token exchange requested_token_type. This is always an access_token.
_STS_REQUESTED_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token"
# The STS token URL used to exchanged a short lived access token for a downscoped one.
_STS_TOKEN_URL_PATTERN = "https://sts.{}/v1/token"
# The subject token type to use when exchanging a short lived access token for a
# downscoped token.
_STS_SUBJECT_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token"
class CredentialAccessBoundary(object):
"""Defines a Credential Access Boundary which contains a list of access boundary
rules. Each rule contains information on the resource that the rule applies to,
the upper bound of the permissions that are available on that resource and an
optional condition to further restrict permissions.
"""
def __init__(self, rules=[]):
"""Instantiates a Credential Access Boundary. A Credential Access Boundary
can contain up to 10 access boundary rules.
Args:
rules (Sequence[google.auth.downscoped.AccessBoundaryRule]): The list of
access boundary rules limiting the access that a downscoped credential
will have.
Raises:
InvalidType: If any of the rules are not a valid type.
InvalidValue: If the provided rules exceed the maximum allowed.
"""
self.rules = rules
@property
def rules(self):
"""Returns the list of access boundary rules defined on the Credential
Access Boundary.
Returns:
Tuple[google.auth.downscoped.AccessBoundaryRule, ...]: The list of access
boundary rules defined on the Credential Access Boundary. These are returned
as an immutable tuple to prevent modification.
"""
return tuple(self._rules)
@rules.setter
def rules(self, value):
"""Updates the current rules on the Credential Access Boundary. This will overwrite
the existing set of rules.
Args:
value (Sequence[google.auth.downscoped.AccessBoundaryRule]): The list of
access boundary rules limiting the access that a downscoped credential
will have.
Raises:
InvalidType: If any of the rules are not a valid type.
InvalidValue: If the provided rules exceed the maximum allowed.
"""
if len(value) > _MAX_ACCESS_BOUNDARY_RULES_COUNT:
raise exceptions.InvalidValue(
"Credential access boundary rules can have a maximum of {} rules.".format(
_MAX_ACCESS_BOUNDARY_RULES_COUNT
)
)
for access_boundary_rule in value:
if not isinstance(access_boundary_rule, AccessBoundaryRule):
raise exceptions.InvalidType(
"List of rules provided do not contain a valid 'google.auth.downscoped.AccessBoundaryRule'."
)
# Make a copy of the original list.
self._rules = list(value)
def add_rule(self, rule):
"""Adds a single access boundary rule to the existing rules.
Args:
rule (google.auth.downscoped.AccessBoundaryRule): The access boundary rule,
limiting the access that a downscoped credential will have, to be added to
the existing rules.
Raises:
InvalidType: If any of the rules are not a valid type.
InvalidValue: If the provided rules exceed the maximum allowed.
"""
if len(self.rules) == _MAX_ACCESS_BOUNDARY_RULES_COUNT:
raise exceptions.InvalidValue(
"Credential access boundary rules can have a maximum of {} rules.".format(
_MAX_ACCESS_BOUNDARY_RULES_COUNT
)
)
if not isinstance(rule, AccessBoundaryRule):
raise exceptions.InvalidType(
"The provided rule does not contain a valid 'google.auth.downscoped.AccessBoundaryRule'."
)
self._rules.append(rule)
def to_json(self):
"""Generates the dictionary representation of the Credential Access Boundary.
This uses the format expected by the Security Token Service API as documented in
`Defining a Credential Access Boundary`_.
.. _Defining a Credential Access Boundary:
https://cloud.google.com/iam/docs/downscoping-short-lived-credentials#define-boundary
Returns:
Mapping: Credential Access Boundary Rule represented in a dictionary object.
"""
rules = []
for access_boundary_rule in self.rules:
rules.append(access_boundary_rule.to_json())
return {"accessBoundary": {"accessBoundaryRules": rules}}
class AccessBoundaryRule(object):
"""Defines an access boundary rule which contains information on the resource that
the rule applies to, the upper bound of the permissions that are available on that
resource and an optional condition to further restrict permissions.
"""
def __init__(
self, available_resource, available_permissions, availability_condition=None
):
"""Instantiates a single access boundary rule.
Args:
available_resource (str): The full resource name of the Cloud Storage bucket
that the rule applies to. Use the format
"//storage.googleapis.com/projects/_/buckets/bucket-name".
available_permissions (Sequence[str]): A list defining the upper bound that
the downscoped token will have on the available permissions for the
resource. Each value is the identifier for an IAM predefined role or
custom role, with the prefix "inRole:". For example:
"inRole:roles/storage.objectViewer".
Only the permissions in these roles will be available.
availability_condition (Optional[google.auth.downscoped.AvailabilityCondition]):
Optional condition that restricts the availability of permissions to
specific Cloud Storage objects.
Raises:
InvalidType: If any of the parameters are not of the expected types.
InvalidValue: If any of the parameters are not of the expected values.
"""
self.available_resource = available_resource
self.available_permissions = available_permissions
self.availability_condition = availability_condition
@property
def available_resource(self):
"""Returns the current available resource.
Returns:
str: The current available resource.
"""
return self._available_resource
@available_resource.setter
def available_resource(self, value):
"""Updates the current available resource.
Args:
value (str): The updated value of the available resource.
Raises:
google.auth.exceptions.InvalidType: If the value is not a string.
"""
if not isinstance(value, str):
raise exceptions.InvalidType(
"The provided available_resource is not a string."
)
self._available_resource = value
@property
def available_permissions(self):
"""Returns the current available permissions.
Returns:
Tuple[str, ...]: The current available permissions. These are returned
as an immutable tuple to prevent modification.
"""
return tuple(self._available_permissions)
@available_permissions.setter
def available_permissions(self, value):
"""Updates the current available permissions.
Args:
value (Sequence[str]): The updated value of the available permissions.
Raises:
InvalidType: If the value is not a list of strings.
InvalidValue: If the value is not valid.
"""
for available_permission in value:
if not isinstance(available_permission, str):
raise exceptions.InvalidType(
"Provided available_permissions are not a list of strings."
)
if available_permission.find("inRole:") != 0:
raise exceptions.InvalidValue(
"available_permissions must be prefixed with 'inRole:'."
)
# Make a copy of the original list.
self._available_permissions = list(value)
@property
def availability_condition(self):
"""Returns the current availability condition.
Returns:
Optional[google.auth.downscoped.AvailabilityCondition]: The current
availability condition.
"""
return self._availability_condition
@availability_condition.setter
def availability_condition(self, value):
"""Updates the current availability condition.
Args:
value (Optional[google.auth.downscoped.AvailabilityCondition]): The updated
value of the availability condition.
Raises:
google.auth.exceptions.InvalidType: If the value is not of type google.auth.downscoped.AvailabilityCondition
or None.
"""
if not isinstance(value, AvailabilityCondition) and value is not None:
raise exceptions.InvalidType(
"The provided availability_condition is not a 'google.auth.downscoped.AvailabilityCondition' or None."
)
self._availability_condition = value
def to_json(self):
"""Generates the dictionary representation of the access boundary rule.
This uses the format expected by the Security Token Service API as documented in
`Defining a Credential Access Boundary`_.
.. _Defining a Credential Access Boundary:
https://cloud.google.com/iam/docs/downscoping-short-lived-credentials#define-boundary
Returns:
Mapping: The access boundary rule represented in a dictionary object.
"""
json = {
"availablePermissions": list(self.available_permissions),
"availableResource": self.available_resource,
}
if self.availability_condition:
json["availabilityCondition"] = self.availability_condition.to_json()
return json
class AvailabilityCondition(object):
"""An optional condition that can be used as part of a Credential Access Boundary
to further restrict permissions."""
def __init__(self, expression, title=None, description=None):
"""Instantiates an availability condition using the provided expression and
optional title or description.
Args:
expression (str): A condition expression that specifies the Cloud Storage
objects where permissions are available. For example, this expression
makes permissions available for objects whose name starts with "customer-a":
"resource.name.startsWith('projects/_/buckets/example-bucket/objects/customer-a')"
title (Optional[str]): An optional short string that identifies the purpose of
the condition.
description (Optional[str]): Optional details about the purpose of the condition.
Raises:
InvalidType: If any of the parameters are not of the expected types.
InvalidValue: If any of the parameters are not of the expected values.
"""
self.expression = expression
self.title = title
self.description = description
@property
def expression(self):
"""Returns the current condition expression.
Returns:
str: The current conditon expression.
"""
return self._expression
@expression.setter
def expression(self, value):
"""Updates the current condition expression.
Args:
value (str): The updated value of the condition expression.
Raises:
google.auth.exceptions.InvalidType: If the value is not of type string.
"""
if not isinstance(value, str):
raise exceptions.InvalidType("The provided expression is not a string.")
self._expression = value
@property
def title(self):
"""Returns the current title.
Returns:
Optional[str]: The current title.
"""
return self._title
@title.setter
def title(self, value):
"""Updates the current title.
Args:
value (Optional[str]): The updated value of the title.
Raises:
google.auth.exceptions.InvalidType: If the value is not of type string or None.
"""
if not isinstance(value, str) and value is not None:
raise exceptions.InvalidType("The provided title is not a string or None.")
self._title = value
@property
def description(self):
"""Returns the current description.
Returns:
Optional[str]: The current description.
"""
return self._description
@description.setter
def description(self, value):
"""Updates the current description.
Args:
value (Optional[str]): The updated value of the description.
Raises:
google.auth.exceptions.InvalidType: If the value is not of type string or None.
"""
if not isinstance(value, str) and value is not None:
raise exceptions.InvalidType(
"The provided description is not a string or None."
)
self._description = value
def to_json(self):
"""Generates the dictionary representation of the availability condition.
This uses the format expected by the Security Token Service API as documented in
`Defining a Credential Access Boundary`_.
.. _Defining a Credential Access Boundary:
https://cloud.google.com/iam/docs/downscoping-short-lived-credentials#define-boundary
Returns:
Mapping[str, str]: The availability condition represented in a dictionary
object.
"""
json = {"expression": self.expression}
if self.title:
json["title"] = self.title
if self.description:
json["description"] = self.description
return json
class Credentials(credentials.CredentialsWithQuotaProject):
"""Defines a set of Google credentials that are downscoped from an existing set
of Google OAuth2 credentials. This is useful to restrict the Identity and Access
Management (IAM) permissions that a short-lived credential can use.
The common pattern of usage is to have a token broker with elevated access
generate these downscoped credentials from higher access source credentials and
pass the downscoped short-lived access tokens to a token consumer via some
secure authenticated channel for limited access to Google Cloud Storage
resources.
"""
def __init__(
self,
source_credentials,
credential_access_boundary,
quota_project_id=None,
universe_domain=credentials.DEFAULT_UNIVERSE_DOMAIN,
):
"""Instantiates a downscoped credentials object using the provided source
credentials and credential access boundary rules.
To downscope permissions of a source credential, a Credential Access Boundary
that specifies which resources the new credential can access, as well as an
upper bound on the permissions that are available on each resource, has to be
defined. A downscoped credential can then be instantiated using the source
credential and the Credential Access Boundary.
Args:
source_credentials (google.auth.credentials.Credentials): The source credentials
to be downscoped based on the provided Credential Access Boundary rules.
credential_access_boundary (google.auth.downscoped.CredentialAccessBoundary):
The Credential Access Boundary which contains a list of access boundary
rules. Each rule contains information on the resource that the rule applies to,
the upper bound of the permissions that are available on that resource and an
optional condition to further restrict permissions.
quota_project_id (Optional[str]): The optional quota project ID.
universe_domain (Optional[str]): The universe domain value, default is googleapis.com
Raises:
google.auth.exceptions.RefreshError: If the source credentials
return an error on token refresh.
google.auth.exceptions.OAuthError: If the STS token exchange
endpoint returned an error during downscoped token generation.
"""
super(Credentials, self).__init__()
self._source_credentials = source_credentials
self._credential_access_boundary = credential_access_boundary
self._quota_project_id = quota_project_id
self._universe_domain = universe_domain or credentials.DEFAULT_UNIVERSE_DOMAIN
self._sts_client = sts.Client(
_STS_TOKEN_URL_PATTERN.format(self.universe_domain)
)
@_helpers.copy_docstring(credentials.Credentials)
def refresh(self, request):
# Generate an access token from the source credentials.
self._source_credentials.refresh(request)
now = _helpers.utcnow()
# Exchange the access token for a downscoped access token.
response_data = self._sts_client.exchange_token(
request=request,
grant_type=_STS_GRANT_TYPE,
subject_token=self._source_credentials.token,
subject_token_type=_STS_SUBJECT_TOKEN_TYPE,
requested_token_type=_STS_REQUESTED_TOKEN_TYPE,
additional_options=self._credential_access_boundary.to_json(),
)
self.token = response_data.get("access_token")
# For downscoping CAB flow, the STS endpoint may not return the expiration
# field for some flows. The generated downscoped token should always have
# the same expiration time as the source credentials. When no expires_in
# field is returned in the response, we can just get the expiration time
# from the source credentials.
if response_data.get("expires_in"):
lifetime = datetime.timedelta(seconds=response_data.get("expires_in"))
self.expiry = now + lifetime
else:
self.expiry = self._source_credentials.expiry
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
return self.__class__(
self._source_credentials,
self._credential_access_boundary,
quota_project_id=quota_project_id,
)

View File

@@ -0,0 +1,119 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Environment variables used by :mod:`google.auth`."""
PROJECT = "GOOGLE_CLOUD_PROJECT"
"""Environment variable defining default project.
This used by :func:`google.auth.default` to explicitly set a project ID. This
environment variable is also used by the Google Cloud Python Library.
"""
LEGACY_PROJECT = "GCLOUD_PROJECT"
"""Previously used environment variable defining the default project.
This environment variable is used instead of the current one in some
situations (such as Google App Engine).
"""
GOOGLE_CLOUD_QUOTA_PROJECT = "GOOGLE_CLOUD_QUOTA_PROJECT"
"""Environment variable defining the project to be used for
quota and billing."""
CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS"
"""Environment variable defining the location of Google application default
credentials."""
# The environment variable name which can replace ~/.config if set.
CLOUD_SDK_CONFIG_DIR = "CLOUDSDK_CONFIG"
"""Environment variable defines the location of Google Cloud SDK's config
files."""
# These two variables allow for customization of the addresses used when
# contacting the GCE metadata service.
GCE_METADATA_HOST = "GCE_METADATA_HOST"
"""Environment variable providing an alternate hostname or host:port to be
used for GCE metadata requests.
This environment variable was originally named GCE_METADATA_ROOT. The system will
check this environemnt variable first; should there be no value present,
the system will fall back to the old variable.
"""
GCE_METADATA_ROOT = "GCE_METADATA_ROOT"
"""Old environment variable for GCE_METADATA_HOST."""
GCE_METADATA_IP = "GCE_METADATA_IP"
"""Environment variable providing an alternate ip:port to be used for ip-only
GCE metadata requests."""
GCE_METADATA_TIMEOUT = "GCE_METADATA_TIMEOUT"
"""Environment variable defining the timeout in seconds to wait for the
GCE metadata server when detecting the GCE environment.
"""
GCE_METADATA_DETECT_RETRIES = "GCE_METADATA_DETECT_RETRIES"
"""Environment variable representing the number of retries that should be
attempted on metadata lookup.
"""
NO_GCE_CHECK = "NO_GCE_CHECK"
"""Environment variable controlling whether to check if running on GCE or not.
The default value is false. Users have to explicitly set this value to true
in order to disable the GCE check."""
GCE_METADATA_MTLS_MODE = "GCE_METADATA_MTLS_MODE"
"""Environment variable controlling the mTLS behavior for GCE metadata requests.
Can be one of "strict", "none", or "default".
"""
GOOGLE_API_USE_CLIENT_CERTIFICATE = "GOOGLE_API_USE_CLIENT_CERTIFICATE"
"""Environment variable controlling whether to use client certificate or not.
The default value is false. Users have to explicitly set this value to true
in order to use client certificate to establish a mutual TLS channel."""
LEGACY_APPENGINE_RUNTIME = "APPENGINE_RUNTIME"
"""Gen1 environment variable defining the App Engine Runtime.
Used to distinguish between GAE gen1 and GAE gen2+.
"""
# AWS environment variables used with AWS workload identity pools to retrieve
# AWS security credentials and the AWS region needed to create a serialized
# signed requests to the AWS STS GetCalledIdentity API that can be exchanged
# for a Google access tokens via the GCP STS endpoint.
# When not available the AWS metadata server is used to retrieve these values.
AWS_ACCESS_KEY_ID = "AWS_ACCESS_KEY_ID"
AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY"
AWS_SESSION_TOKEN = "AWS_SESSION_TOKEN"
AWS_REGION = "AWS_REGION"
AWS_DEFAULT_REGION = "AWS_DEFAULT_REGION"
GOOGLE_AUTH_TRUST_BOUNDARY_ENABLED = "GOOGLE_AUTH_TRUST_BOUNDARY_ENABLED"
"""Environment variable controlling whether to enable trust boundary feature.
The default value is false. Users have to explicitly set this value to true."""
GOOGLE_API_CERTIFICATE_CONFIG = "GOOGLE_API_CERTIFICATE_CONFIG"
"""Environment variable defining the location of Google API certificate config
file."""
GOOGLE_API_PREVENT_AGENT_TOKEN_SHARING_FOR_GCP_SERVICES = (
"GOOGLE_API_PREVENT_AGENT_TOKEN_SHARING_FOR_GCP_SERVICES"
)
"""Environment variable to prevent agent token sharing for GCP services."""

View File

@@ -0,0 +1,108 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exceptions used in the google.auth package."""
class GoogleAuthError(Exception):
"""Base class for all google.auth errors."""
def __init__(self, *args, **kwargs):
super(GoogleAuthError, self).__init__(*args)
retryable = kwargs.get("retryable", False)
self._retryable = retryable
@property
def retryable(self):
return self._retryable
class TransportError(GoogleAuthError):
"""Used to indicate an error occurred during an HTTP request."""
class RefreshError(GoogleAuthError):
"""Used to indicate that an refreshing the credentials' access token
failed."""
class UserAccessTokenError(GoogleAuthError):
"""Used to indicate ``gcloud auth print-access-token`` command failed."""
class DefaultCredentialsError(GoogleAuthError):
"""Used to indicate that acquiring default credentials failed."""
class MutualTLSChannelError(GoogleAuthError):
"""Used to indicate that mutual TLS channel creation is failed, or mutual
TLS channel credentials is missing or invalid."""
class ClientCertError(GoogleAuthError):
"""Used to indicate that client certificate is missing or invalid."""
@property
def retryable(self):
return False
class OAuthError(GoogleAuthError):
"""Used to indicate an error occurred during an OAuth related HTTP
request."""
class ReauthFailError(RefreshError):
"""An exception for when reauth failed."""
def __init__(self, message=None, **kwargs):
super(ReauthFailError, self).__init__(
"Reauthentication failed. {0}".format(message), **kwargs
)
class ReauthSamlChallengeFailError(ReauthFailError):
"""An exception for SAML reauth challenge failures."""
class MalformedError(DefaultCredentialsError, ValueError):
"""An exception for malformed data."""
class InvalidResource(DefaultCredentialsError, ValueError):
"""An exception for URL error."""
class InvalidOperation(DefaultCredentialsError, ValueError):
"""An exception for invalid operation."""
class InvalidValue(DefaultCredentialsError, ValueError):
"""Used to wrap general ValueError of python."""
class InvalidType(DefaultCredentialsError, TypeError):
"""Used to wrap general TypeError of python."""
class OSError(DefaultCredentialsError, EnvironmentError):
"""Used to wrap EnvironmentError(OSError after python3.3)."""
class TimeoutError(GoogleAuthError):
"""Used to indicate a timeout error occurred during an HTTP request."""
class ResponseError(GoogleAuthError):
"""Used to indicate an error occurred when reading an HTTP response."""

View File

@@ -0,0 +1,716 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""External Account Credentials.
This module provides credentials that exchange workload identity pool external
credentials for Google access tokens. This facilitates accessing Google Cloud
Platform resources from on-prem and non-Google Cloud platforms (e.g. AWS,
Microsoft Azure, OIDC identity providers), using native credentials retrieved
from the current environment without the need to copy, save and manage
long-lived service account credentials.
Specifically, this is intended to use access tokens acquired using the GCP STS
token exchange endpoint following the `OAuth 2.0 Token Exchange`_ spec.
.. _OAuth 2.0 Token Exchange: https://tools.ietf.org/html/rfc8693
"""
import abc
import copy
from dataclasses import dataclass
import datetime
import functools
import io
import json
import re
from google.auth import _constants
from google.auth import _helpers
from google.auth import credentials
from google.auth import exceptions
from google.auth import impersonated_credentials
from google.auth import metrics
from google.oauth2 import sts
from google.oauth2 import utils
# External account JSON type identifier.
_EXTERNAL_ACCOUNT_JSON_TYPE = "external_account"
# The token exchange grant_type used for exchanging credentials.
_STS_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:token-exchange"
# The token exchange requested_token_type. This is always an access_token.
_STS_REQUESTED_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token"
# Cloud resource manager URL used to retrieve project information.
_CLOUD_RESOURCE_MANAGER = "https://cloudresourcemanager.googleapis.com/v1/projects/"
# Default Google sts token url.
_DEFAULT_TOKEN_URL = "https://sts.{universe_domain}/v1/token"
@dataclass
class SupplierContext:
"""A context class that contains information about the requested third party credential that is passed
to AWS security credential and subject token suppliers.
Attributes:
subject_token_type (str): The requested subject token type based on the Oauth2.0 token exchange spec.
Expected values include::
“urn:ietf:params:oauth:token-type:jwt”
“urn:ietf:params:oauth:token-type:id-token”
“urn:ietf:params:oauth:token-type:saml2”
“urn:ietf:params:aws:token-type:aws4_request”
audience (str): The requested audience for the subject token.
"""
subject_token_type: str
audience: str
class Credentials(
credentials.Scoped,
credentials.CredentialsWithQuotaProject,
credentials.CredentialsWithTokenUri,
credentials.CredentialsWithTrustBoundary,
metaclass=abc.ABCMeta,
):
"""Base class for all external account credentials.
This is used to instantiate Credentials for exchanging external account
credentials for Google access token and authorizing requests to Google APIs.
The base class implements the common logic for exchanging external account
credentials for Google access tokens.
**IMPORTANT**:
This class does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
"""
def __init__(
self,
audience,
subject_token_type,
token_url,
credential_source,
service_account_impersonation_url=None,
service_account_impersonation_options=None,
client_id=None,
client_secret=None,
token_info_url=None,
quota_project_id=None,
scopes=None,
default_scopes=None,
workforce_pool_user_project=None,
universe_domain=credentials.DEFAULT_UNIVERSE_DOMAIN,
trust_boundary=None,
):
"""Instantiates an external account credentials object.
Args:
audience (str): The STS audience field.
subject_token_type (str): The subject token type based on the Oauth2.0 token exchange spec.
Expected values include::
“urn:ietf:params:oauth:token-type:jwt”
“urn:ietf:params:oauth:token-type:id-token”
“urn:ietf:params:oauth:token-type:saml2”
“urn:ietf:params:aws:token-type:aws4_request”
token_url (str): The STS endpoint URL.
credential_source (Mapping): The credential source dictionary.
service_account_impersonation_url (Optional[str]): The optional service account
impersonation generateAccessToken URL.
client_id (Optional[str]): The optional client ID.
client_secret (Optional[str]): The optional client secret.
token_info_url (str): The optional STS endpoint URL for token introspection.
quota_project_id (Optional[str]): The optional quota project ID.
scopes (Optional[Sequence[str]]): Optional scopes to request during the
authorization grant.
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
workforce_pool_user_project (Optona[str]): The optional workforce pool user
project number when the credential corresponds to a workforce pool and not
a workload identity pool. The underlying principal must still have
serviceusage.services.use IAM permission to use the project for
billing/quota.
universe_domain (str): The universe domain. The default universe
domain is googleapis.com.
trust_boundary (str): String representation of trust boundary meta.
Raises:
google.auth.exceptions.RefreshError: If the generateAccessToken
endpoint returned an error.
"""
super(Credentials, self).__init__()
self._audience = audience
self._subject_token_type = subject_token_type
self._universe_domain = universe_domain
self._token_url = token_url
if self._token_url == _DEFAULT_TOKEN_URL:
self._token_url = self._token_url.replace(
"{universe_domain}", self._universe_domain
)
self._token_info_url = token_info_url
self._credential_source = credential_source
self._service_account_impersonation_url = service_account_impersonation_url
self._service_account_impersonation_options = (
service_account_impersonation_options or {}
)
self._client_id = client_id
self._client_secret = client_secret
self._quota_project_id = quota_project_id
self._scopes = scopes
self._default_scopes = default_scopes
self._workforce_pool_user_project = workforce_pool_user_project
self._trust_boundary = trust_boundary
if self._client_id:
self._client_auth = utils.ClientAuthentication(
utils.ClientAuthType.basic, self._client_id, self._client_secret
)
else:
self._client_auth = None
self._sts_client = sts.Client(self._token_url, self._client_auth)
self._metrics_options = self._create_default_metrics_options()
self._impersonated_credentials = None
self._project_id = None
self._supplier_context = SupplierContext(
self._subject_token_type, self._audience
)
self._cred_file_path = None
if not self.is_workforce_pool and self._workforce_pool_user_project:
# Workload identity pools do not support workforce pool user projects.
raise exceptions.InvalidValue(
"workforce_pool_user_project should not be set for non-workforce pool "
"credentials"
)
@property
def info(self):
"""Generates the dictionary representation of the current credentials.
Returns:
Mapping: The dictionary representation of the credentials. This is the
reverse of "from_info" defined on the subclasses of this class. It is
useful for serializing the current credentials so it can deserialized
later.
"""
config_info = self._constructor_args()
config_info.update(
type=_EXTERNAL_ACCOUNT_JSON_TYPE,
service_account_impersonation=config_info.pop(
"service_account_impersonation_options", None
),
)
config_info.pop("scopes", None)
config_info.pop("default_scopes", None)
return {key: value for key, value in config_info.items() if value is not None}
def _constructor_args(self):
args = {
"audience": self._audience,
"subject_token_type": self._subject_token_type,
"token_url": self._token_url,
"token_info_url": self._token_info_url,
"service_account_impersonation_url": self._service_account_impersonation_url,
"service_account_impersonation_options": copy.deepcopy(
self._service_account_impersonation_options
)
or None,
"credential_source": copy.deepcopy(self._credential_source),
"quota_project_id": self._quota_project_id,
"client_id": self._client_id,
"client_secret": self._client_secret,
"workforce_pool_user_project": self._workforce_pool_user_project,
"scopes": self._scopes,
"default_scopes": self._default_scopes,
"universe_domain": self._universe_domain,
"trust_boundary": self._trust_boundary,
}
if not self.is_workforce_pool:
args.pop("workforce_pool_user_project")
return args
@property
def service_account_email(self):
"""Returns the service account email if service account impersonation is used.
Returns:
Optional[str]: The service account email if impersonation is used. Otherwise
None is returned.
"""
if self._service_account_impersonation_url:
# Parse email from URL. The formal looks as follows:
# https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken
url = self._service_account_impersonation_url
start_index = url.rfind("/")
end_index = url.find(":generateAccessToken")
if start_index != -1 and end_index != -1 and start_index < end_index:
start_index = start_index + 1
return url[start_index:end_index]
return None
@property
def is_user(self):
"""Returns whether the credentials represent a user (True) or workload (False).
Workloads behave similarly to service accounts. Currently workloads will use
service account impersonation but will eventually not require impersonation.
As a result, this property is more reliable than the service account email
property in determining if the credentials represent a user or workload.
Returns:
bool: True if the credentials represent a user. False if they represent a
workload.
"""
# If service account impersonation is used, the credentials will always represent a
# service account.
if self._service_account_impersonation_url:
return False
return self.is_workforce_pool
@property
def is_workforce_pool(self):
"""Returns whether the credentials represent a workforce pool (True) or
workload (False) based on the credentials' audience.
This will also return True for impersonated workforce pool credentials.
Returns:
bool: True if the credentials represent a workforce pool. False if they
represent a workload.
"""
# Workforce pools representing users have the following audience format:
# //iam.googleapis.com/locations/$location/workforcePools/$poolId/providers/$providerId
p = re.compile(r"//iam\.googleapis\.com/locations/[^/]+/workforcePools/")
return p.match(self._audience or "") is not None
@property
def requires_scopes(self):
"""Checks if the credentials requires scopes.
Returns:
bool: True if there are no scopes set otherwise False.
"""
return not self._scopes and not self._default_scopes
@property
def project_number(self):
"""Optional[str]: The project number corresponding to the workload identity pool."""
# STS audience pattern:
# //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/...
components = self._audience.split("/")
try:
project_index = components.index("projects")
if project_index + 1 < len(components):
return components[project_index + 1] or None
except ValueError:
return None
@property
def token_info_url(self):
"""Optional[str]: The STS token introspection endpoint."""
return self._token_info_url
@_helpers.copy_docstring(credentials.Credentials)
def get_cred_info(self):
if self._cred_file_path:
cred_info_json = {
"credential_source": self._cred_file_path,
"credential_type": "external account credentials",
}
if self.service_account_email:
cred_info_json["principal"] = self.service_account_email
return cred_info_json
return None
@_helpers.copy_docstring(credentials.Scoped)
def with_scopes(self, scopes, default_scopes=None):
kwargs = self._constructor_args()
kwargs.update(scopes=scopes, default_scopes=default_scopes)
scoped = self.__class__(**kwargs)
scoped._cred_file_path = self._cred_file_path
scoped._metrics_options = self._metrics_options
return scoped
@abc.abstractmethod
def retrieve_subject_token(self, request):
"""Retrieves the subject token using the credential_source object.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Returns:
str: The retrieved subject token.
"""
# pylint: disable=missing-raises-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("retrieve_subject_token must be implemented")
def get_project_id(self, request):
"""Retrieves the project ID corresponding to the workload identity or workforce pool.
For workforce pool credentials, it returns the project ID corresponding to
the workforce_pool_user_project.
When not determinable, None is returned.
This is introduced to support the current pattern of using the Auth library:
credentials, project_id = google.auth.default()
The resource may not have permission (resourcemanager.projects.get) to
call this API or the required scopes may not be selected:
https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Returns:
Optional[str]: The project ID corresponding to the workload identity pool
or workforce pool if determinable.
"""
if self._project_id:
# If already retrieved, return the cached project ID value.
return self._project_id
scopes = self._scopes if self._scopes is not None else self._default_scopes
# Scopes are required in order to retrieve a valid access token.
project_number = self.project_number or self._workforce_pool_user_project
if project_number and scopes:
headers = {}
url = _CLOUD_RESOURCE_MANAGER + project_number
self.before_request(request, "GET", url, headers)
response = request(url=url, method="GET", headers=headers)
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
response_data = json.loads(response_body)
if response.status == 200:
# Cache result as this field is immutable.
self._project_id = response_data.get("projectId")
return self._project_id
return None
def refresh(self, request):
"""Refreshes the access token.
For impersonated credentials, this method will refresh the underlying
source credentials and the impersonated credentials. For non-impersonated
credentials, it will refresh the access token and the trust boundary.
"""
self._perform_refresh_token(request)
self._handle_trust_boundary(request)
def _handle_trust_boundary(self, request):
# If we are impersonating, the trust boundary is handled by the
# impersonated credentials object. We need to get it from there.
if self._service_account_impersonation_url:
self._trust_boundary = self._impersonated_credentials._trust_boundary
else:
# Otherwise, refresh the trust boundary for the external account.
self._refresh_trust_boundary(request)
def _perform_refresh_token(self, request, cert_fingerprint=None):
scopes = self._scopes if self._scopes is not None else self._default_scopes
# Inject client certificate into request.
if self._mtls_required():
request = functools.partial(
request, cert=self._get_mtls_cert_and_key_paths()
)
if self._should_initialize_impersonated_credentials():
self._impersonated_credentials = self._initialize_impersonated_credentials()
if self._impersonated_credentials:
self._impersonated_credentials.refresh(request)
self.token = self._impersonated_credentials.token
self.expiry = self._impersonated_credentials.expiry
else:
now = _helpers.utcnow()
additional_options = {}
# Do not pass workforce_pool_user_project when client authentication
# is used. The client ID is sufficient for determining the user project.
if self._workforce_pool_user_project and not self._client_id:
additional_options["userProject"] = self._workforce_pool_user_project
if cert_fingerprint:
additional_options["bindCertFingerprint"] = cert_fingerprint
additional_headers = {
metrics.API_CLIENT_HEADER: metrics.byoid_metrics_header(
self._metrics_options
)
}
response_data = self._sts_client.exchange_token(
request=request,
grant_type=_STS_GRANT_TYPE,
subject_token=self.retrieve_subject_token(request),
subject_token_type=self._subject_token_type,
audience=self._audience,
scopes=scopes,
requested_token_type=_STS_REQUESTED_TOKEN_TYPE,
additional_options=additional_options if additional_options else None,
additional_headers=additional_headers,
)
self.token = response_data.get("access_token")
expires_in = response_data.get("expires_in")
# Some services do not respect the OAUTH2.0 RFC and send expires_in as a
# JSON String.
if isinstance(expires_in, str):
expires_in = int(expires_in)
lifetime = datetime.timedelta(seconds=expires_in)
self.expiry = now + lifetime
def _build_trust_boundary_lookup_url(self):
"""Builds and returns the URL for the trust boundary lookup API."""
url = None
# Try to parse as a workload identity pool.
# Audience format: //iam.googleapis.com/projects/PROJECT_NUMBER/locations/global/workloadIdentityPools/POOL_ID/providers/PROVIDER_ID
workload_match = re.search(
r"projects/([^/]+)/locations/global/workloadIdentityPools/([^/]+)",
self._audience,
)
if workload_match:
project_number, pool_id = workload_match.groups()
url = _constants._WORKLOAD_IDENTITY_POOL_TRUST_BOUNDARY_LOOKUP_ENDPOINT.format(
universe_domain=self._universe_domain,
project_number=project_number,
pool_id=pool_id,
)
else:
# If that fails, try to parse as a workforce pool.
# Audience format: //iam.googleapis.com/locations/global/workforcePools/POOL_ID/providers/PROVIDER_ID
workforce_match = re.search(
r"locations/[^/]+/workforcePools/([^/]+)", self._audience
)
if workforce_match:
pool_id = workforce_match.groups()[0]
url = _constants._WORKFORCE_POOL_TRUST_BOUNDARY_LOOKUP_ENDPOINT.format(
universe_domain=self._universe_domain, pool_id=pool_id
)
if url:
return url
else:
# If both fail, the audience format is invalid.
raise exceptions.InvalidValue("Invalid audience format.")
def _make_copy(self):
kwargs = self._constructor_args()
new_cred = self.__class__(**kwargs)
new_cred._cred_file_path = self._cred_file_path
new_cred._metrics_options = self._metrics_options
return new_cred
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
# Return copy of instance with the provided quota project ID.
cred = self._make_copy()
cred._quota_project_id = quota_project_id
return cred
@_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
def with_token_uri(self, token_uri):
cred = self._make_copy()
cred._token_url = token_uri
return cred
@_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
def with_universe_domain(self, universe_domain):
cred = self._make_copy()
cred._universe_domain = universe_domain
return cred
@_helpers.copy_docstring(credentials.CredentialsWithTrustBoundary)
def with_trust_boundary(self, trust_boundary):
cred = self._make_copy()
cred._trust_boundary = trust_boundary
return cred
def _should_initialize_impersonated_credentials(self):
return (
self._service_account_impersonation_url is not None
and self._impersonated_credentials is None
)
def _initialize_impersonated_credentials(self):
"""Generates an impersonated credentials.
For more details, see `projects.serviceAccounts.generateAccessToken`_.
.. _projects.serviceAccounts.generateAccessToken: https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateAccessToken
Returns:
impersonated_credentials.Credential: The impersonated credentials
object.
Raises:
google.auth.exceptions.RefreshError: If the generateAccessToken
endpoint returned an error.
"""
# Return copy of instance with no service account impersonation.
kwargs = self._constructor_args()
kwargs.update(
service_account_impersonation_url=None,
service_account_impersonation_options={},
)
source_credentials = self.__class__(**kwargs)
source_credentials._metrics_options = self._metrics_options
# Determine target_principal.
target_principal = self.service_account_email
if not target_principal:
raise exceptions.RefreshError(
"Unable to determine target principal from service account impersonation URL."
)
scopes = self._scopes if self._scopes is not None else self._default_scopes
# Initialize and return impersonated credentials.
return impersonated_credentials.Credentials(
source_credentials=source_credentials,
target_principal=target_principal,
target_scopes=scopes,
quota_project_id=self._quota_project_id,
iam_endpoint_override=self._service_account_impersonation_url,
lifetime=self._service_account_impersonation_options.get(
"token_lifetime_seconds"
),
trust_boundary=self._trust_boundary,
)
def _create_default_metrics_options(self):
metrics_options = {}
if self._service_account_impersonation_url:
metrics_options["sa-impersonation"] = "true"
else:
metrics_options["sa-impersonation"] = "false"
if self._service_account_impersonation_options.get("token_lifetime_seconds"):
metrics_options["config-lifetime"] = "true"
else:
metrics_options["config-lifetime"] = "false"
return metrics_options
def _mtls_required(self):
"""Returns a boolean representing whether the current credential is configured
for mTLS and should add a certificate to the outgoing calls to the sts and service
account impersonation endpoint.
Returns:
bool: True if the credential is configured for mTLS, False if it is not.
"""
return False
def _get_mtls_cert_and_key_paths(self):
"""Gets the file locations for a certificate and private key file
to be used for configuring mTLS for the sts and service account
impersonation calls. Currently only expected to return a value when using
X509 workload identity federation.
Returns:
Tuple[str, str]: The cert and key file locations as strings in a tuple.
Raises:
NotImplementedError: When the current credential is not configured for
mTLS.
"""
raise NotImplementedError(
"_get_mtls_cert_and_key_location must be implemented."
)
@classmethod
def from_info(cls, info, **kwargs):
"""Creates a Credentials instance from parsed external account info.
**IMPORTANT**:
This method does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using with this method.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
Args:
info (Mapping[str, str]): The external account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.identity_pool.Credentials: The constructed
credentials.
Raises:
InvalidValue: For invalid parameters.
"""
return cls(
audience=info.get("audience"),
subject_token_type=info.get("subject_token_type"),
token_url=info.get("token_url"),
token_info_url=info.get("token_info_url"),
service_account_impersonation_url=info.get(
"service_account_impersonation_url"
),
service_account_impersonation_options=info.get(
"service_account_impersonation"
)
or {},
client_id=info.get("client_id"),
client_secret=info.get("client_secret"),
credential_source=info.get("credential_source"),
quota_project_id=info.get("quota_project_id"),
workforce_pool_user_project=info.get("workforce_pool_user_project"),
universe_domain=info.get(
"universe_domain", credentials.DEFAULT_UNIVERSE_DOMAIN
),
trust_boundary=info.get("trust_boundary"),
**kwargs
)
@classmethod
def from_file(cls, filename, **kwargs):
"""Creates a Credentials instance from an external account json file.
**IMPORTANT**:
This method does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using with this method.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
Args:
filename (str): The path to the external account json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.identity_pool.Credentials: The constructed
credentials.
"""
with io.open(filename, "r", encoding="utf-8") as json_file:
data = json.load(json_file)
return cls.from_info(data, **kwargs)

View File

@@ -0,0 +1,458 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""External Account Authorized User Credentials.
This module provides credentials based on OAuth 2.0 access and refresh tokens.
These credentials usually access resources on behalf of a user (resource
owner).
Specifically, these are sourced using external identities via Workforce Identity Federation.
Obtaining the initial access and refresh token can be done through the Google Cloud CLI.
Example credential:
{
"type": "external_account_authorized_user",
"audience": "//iam.googleapis.com/locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID",
"refresh_token": "refreshToken",
"token_url": "https://sts.googleapis.com/v1/oauth/token",
"token_info_url": "https://sts.googleapis.com/v1/instrospect",
"client_id": "clientId",
"client_secret": "clientSecret"
}
"""
import datetime
import io
import json
import re
from google.auth import _constants
from google.auth import _helpers
from google.auth import credentials
from google.auth import exceptions
from google.oauth2 import sts
from google.oauth2 import utils
_EXTERNAL_ACCOUNT_AUTHORIZED_USER_JSON_TYPE = "external_account_authorized_user"
class Credentials(
credentials.CredentialsWithQuotaProject,
credentials.ReadOnlyScoped,
credentials.CredentialsWithTokenUri,
credentials.CredentialsWithTrustBoundary,
):
"""Credentials for External Account Authorized Users.
This is used to instantiate Credentials for exchanging refresh tokens from
authorized users for Google access token and authorizing requests to Google
APIs.
The credentials are considered immutable. If you want to modify the
quota project, use `with_quota_project` and if you want to modify the token
uri, use `with_token_uri`.
**IMPORTANT**:
This class does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
"""
def __init__(
self,
token=None,
expiry=None,
refresh_token=None,
audience=None,
client_id=None,
client_secret=None,
token_url=None,
token_info_url=None,
revoke_url=None,
scopes=None,
quota_project_id=None,
universe_domain=credentials.DEFAULT_UNIVERSE_DOMAIN,
trust_boundary=None,
):
"""Instantiates a external account authorized user credentials object.
Args:
token (str): The OAuth 2.0 access token. Can be None if refresh information
is provided.
expiry (datetime.datetime): The optional expiration datetime of the OAuth 2.0 access
token.
refresh_token (str): The optional OAuth 2.0 refresh token. If specified,
credentials can be refreshed.
audience (str): The optional STS audience which contains the resource name for the workforce
pool and the provider identifier in that pool.
client_id (str): The OAuth 2.0 client ID. Must be specified for refresh, can be left as
None if the token can not be refreshed.
client_secret (str): The OAuth 2.0 client secret. Must be specified for refresh, can be
left as None if the token can not be refreshed.
token_url (str): The optional STS token exchange endpoint for refresh. Must be specified for
refresh, can be left as None if the token can not be refreshed.
token_info_url (str): The optional STS endpoint URL for token introspection.
revoke_url (str): The optional STS endpoint URL for revoking tokens.
quota_project_id (str): The optional project ID used for quota and billing.
This project may be different from the project used to
create the credentials.
universe_domain (Optional[str]): The universe domain. The default value
is googleapis.com.
trust_boundary (Mapping[str,str]): A credential trust boundary.
Returns:
google.auth.external_account_authorized_user.Credentials: The
constructed credentials.
"""
super(Credentials, self).__init__()
self.token = token
self.expiry = expiry
self._audience = audience
self._refresh_token = refresh_token
self._token_url = token_url
self._token_info_url = token_info_url
self._client_id = client_id
self._client_secret = client_secret
self._revoke_url = revoke_url
self._quota_project_id = quota_project_id
self._scopes = scopes
self._universe_domain = universe_domain or credentials.DEFAULT_UNIVERSE_DOMAIN
self._cred_file_path = None
self._trust_boundary = trust_boundary
if not self.valid and not self.can_refresh:
raise exceptions.InvalidOperation(
"Token should be created with fields to make it valid (`token` and "
"`expiry`), or fields to allow it to refresh (`refresh_token`, "
"`token_url`, `client_id`, `client_secret`)."
)
self._client_auth = None
if self._client_id:
self._client_auth = utils.ClientAuthentication(
utils.ClientAuthType.basic, self._client_id, self._client_secret
)
self._sts_client = sts.Client(self._token_url, self._client_auth)
@property
def info(self):
"""Generates the serializable dictionary representation of the current
credentials.
Returns:
Mapping: The dictionary representation of the credentials. This is the
reverse of the "from_info" method defined in this class. It is
useful for serializing the current credentials so it can deserialized
later.
"""
config_info = self.constructor_args()
config_info.update(type=_EXTERNAL_ACCOUNT_AUTHORIZED_USER_JSON_TYPE)
if config_info["expiry"]:
config_info["expiry"] = config_info["expiry"].isoformat() + "Z"
return {key: value for key, value in config_info.items() if value is not None}
def constructor_args(self):
return {
"audience": self._audience,
"refresh_token": self._refresh_token,
"token_url": self._token_url,
"token_info_url": self._token_info_url,
"client_id": self._client_id,
"client_secret": self._client_secret,
"token": self.token,
"expiry": self.expiry,
"revoke_url": self._revoke_url,
"scopes": self._scopes,
"quota_project_id": self._quota_project_id,
"universe_domain": self._universe_domain,
"trust_boundary": self._trust_boundary,
}
@property
def scopes(self):
"""Optional[str]: The OAuth 2.0 permission scopes."""
return self._scopes
@property
def requires_scopes(self):
"""False: OAuth 2.0 credentials have their scopes set when
the initial token is requested and can not be changed."""
return False
@property
def client_id(self):
"""Optional[str]: The OAuth 2.0 client ID."""
return self._client_id
@property
def client_secret(self):
"""Optional[str]: The OAuth 2.0 client secret."""
return self._client_secret
@property
def audience(self):
"""Optional[str]: The STS audience which contains the resource name for the
workforce pool and the provider identifier in that pool."""
return self._audience
@property
def refresh_token(self):
"""Optional[str]: The OAuth 2.0 refresh token."""
return self._refresh_token
@property
def token_url(self):
"""Optional[str]: The STS token exchange endpoint for refresh."""
return self._token_url
@property
def token_info_url(self):
"""Optional[str]: The STS endpoint for token info."""
return self._token_info_url
@property
def revoke_url(self):
"""Optional[str]: The STS endpoint for token revocation."""
return self._revoke_url
@property
def is_user(self):
"""True: This credential always represents a user."""
return True
@property
def can_refresh(self):
return all(
(
self._refresh_token,
self._token_url,
self._client_id,
self._client_secret,
)
)
def get_project_id(self, request=None):
"""Retrieves the project ID corresponding to the workload identity or workforce pool.
For workforce pool credentials, it returns the project ID corresponding to
the workforce_pool_user_project.
When not determinable, None is returned.
Args:
request (google.auth.transport.requests.Request): Request object.
Unused here, but passed from _default.default().
Return:
str: project ID is not determinable for this credential type so it returns None
"""
return None
def to_json(self, strip=None):
"""Utility function that creates a JSON representation of this
credential.
Args:
strip (Sequence[str]): Optional list of members to exclude from the
generated JSON.
Returns:
str: A JSON representation of this instance. When converted into
a dictionary, it can be passed to from_info()
to create a new instance.
"""
strip = strip if strip else []
return json.dumps({k: v for (k, v) in self.info.items() if k not in strip})
def _perform_refresh_token(self, request):
"""Refreshes the access token.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the credentials could
not be refreshed.
"""
if not self.can_refresh:
raise exceptions.RefreshError(
"The credentials do not contain the necessary fields need to "
"refresh the access token. You must specify refresh_token, "
"token_url, client_id, and client_secret."
)
now = _helpers.utcnow()
response_data = self._sts_client.refresh_token(request, self._refresh_token)
self.token = response_data.get("access_token")
lifetime = datetime.timedelta(seconds=response_data.get("expires_in"))
self.expiry = now + lifetime
if "refresh_token" in response_data:
self._refresh_token = response_data["refresh_token"]
def _build_trust_boundary_lookup_url(self):
"""Builds and returns the URL for the trust boundary lookup API."""
# Audience format: //iam.googleapis.com/locations/global/workforcePools/POOL_ID/providers/PROVIDER_ID
match = re.search(r"locations/[^/]+/workforcePools/([^/]+)", self._audience)
if not match:
raise exceptions.InvalidValue("Invalid workforce pool audience format.")
pool_id = match.groups()[0]
return _constants._WORKFORCE_POOL_TRUST_BOUNDARY_LOOKUP_ENDPOINT.format(
universe_domain=self._universe_domain, pool_id=pool_id
)
def revoke(self, request):
"""Revokes the refresh token.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.OAuthError: If the token could not be
revoked.
"""
if not self._revoke_url or not self._refresh_token:
raise exceptions.OAuthError(
"The credentials do not contain the necessary fields to "
"revoke the refresh token. You must specify revoke_url and "
"refresh_token."
)
self._sts_client.revoke_token(
request, self._refresh_token, "refresh_token", self._revoke_url
)
self.token = None
self._refresh_token = None
@_helpers.copy_docstring(credentials.Credentials)
def get_cred_info(self):
if self._cred_file_path:
return {
"credential_source": self._cred_file_path,
"credential_type": "external account authorized user credentials",
}
return None
def _make_copy(self):
kwargs = self.constructor_args()
cred = self.__class__(**kwargs)
cred._cred_file_path = self._cred_file_path
return cred
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
cred = self._make_copy()
cred._quota_project_id = quota_project_id
return cred
@_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
def with_token_uri(self, token_uri):
cred = self._make_copy()
cred._token_url = token_uri
return cred
@_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
def with_universe_domain(self, universe_domain):
cred = self._make_copy()
cred._universe_domain = universe_domain
return cred
@_helpers.copy_docstring(credentials.CredentialsWithTrustBoundary)
def with_trust_boundary(self, trust_boundary):
cred = self._make_copy()
cred._trust_boundary = trust_boundary
return cred
@classmethod
def from_info(cls, info, **kwargs):
"""Creates a Credentials instance from parsed external account info.
**IMPORTANT**:
This method does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using with this method.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
Args:
info (Mapping[str, str]): The external account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.external_account_authorized_user.Credentials: The
constructed credentials.
Raises:
ValueError: For invalid parameters.
"""
expiry = info.get("expiry")
if expiry:
expiry = datetime.datetime.strptime(
expiry.rstrip("Z").split(".")[0], "%Y-%m-%dT%H:%M:%S"
)
return cls(
audience=info.get("audience"),
refresh_token=info.get("refresh_token"),
token_url=info.get("token_url"),
token_info_url=info.get("token_info_url"),
client_id=info.get("client_id"),
client_secret=info.get("client_secret"),
token=info.get("token"),
expiry=expiry,
revoke_url=info.get("revoke_url"),
quota_project_id=info.get("quota_project_id"),
scopes=info.get("scopes"),
universe_domain=info.get(
"universe_domain", credentials.DEFAULT_UNIVERSE_DOMAIN
),
trust_boundary=info.get("trust_boundary"),
**kwargs
)
@classmethod
def from_file(cls, filename, **kwargs):
"""Creates a Credentials instance from an external account json file.
**IMPORTANT**:
This method does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using with this method.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
Args:
filename (str): The path to the external account json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.external_account_authorized_user.Credentials: The
constructed credentials.
"""
with io.open(filename, "r", encoding="utf-8") as json_file:
data = json.load(json_file)
return cls.from_info(data, **kwargs)

View File

@@ -0,0 +1,146 @@
# Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for using the Google `Cloud Identity and Access Management (IAM)
API`_'s auth-related functionality.
.. _Cloud Identity and Access Management (IAM) API:
https://cloud.google.com/iam/docs/
"""
import base64
import http.client as http_client
import json
import os
from google.auth import _exponential_backoff
from google.auth import _helpers
from google.auth import credentials
from google.auth import crypt
from google.auth import exceptions
from google.auth.transport import mtls
IAM_RETRY_CODES = {
http_client.INTERNAL_SERVER_ERROR,
http_client.BAD_GATEWAY,
http_client.SERVICE_UNAVAILABLE,
http_client.GATEWAY_TIMEOUT,
}
_IAM_SCOPE = ["https://www.googleapis.com/auth/iam"]
# 1. Determine if we should use mTLS.
# Note: We only support automatic mTLS on the default googleapis.com universe.
if hasattr(mtls, "should_use_client_cert"):
use_client_cert = mtls.should_use_client_cert()
else: # pragma: NO COVER
# if unsupported, fallback to reading from env var
use_client_cert = (
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() == "true"
)
# 2. Construct the template domain using the library's DEFAULT_UNIVERSE_DOMAIN constant.
# This ensures that the .replace() calls in the classes will work correctly.
if use_client_cert:
# We use the .mtls. prefix only for the default universe template
_IAM_DOMAIN = f"iamcredentials.mtls.{credentials.DEFAULT_UNIVERSE_DOMAIN}"
else:
_IAM_DOMAIN = f"iamcredentials.{credentials.DEFAULT_UNIVERSE_DOMAIN}"
# 3. Create the common base URL template
# We use double brackets {{}} so .format() can be called later for the email.
_IAM_BASE_URL = f"https://{_IAM_DOMAIN}/v1/projects/-/serviceAccounts/{{}}"
# 4. Define the endpoints as templates
_IAM_ENDPOINT = _IAM_BASE_URL + ":generateAccessToken"
_IAM_SIGN_ENDPOINT = _IAM_BASE_URL + ":signBlob"
_IAM_SIGNJWT_ENDPOINT = _IAM_BASE_URL + ":signJwt"
_IAM_IDTOKEN_ENDPOINT = _IAM_BASE_URL + ":generateIdToken"
class Signer(crypt.Signer):
"""Signs messages using the IAM `signBlob API`_.
This is useful when you need to sign bytes but do not have access to the
credential's private key file.
.. _signBlob API:
https://cloud.google.com/iam/reference/rest/v1/projects.serviceAccounts
/signBlob
"""
def __init__(self, request, credentials, service_account_email):
"""
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
credentials (google.auth.credentials.Credentials): The credentials
that will be used to authenticate the request to the IAM API.
The credentials must have of one the following scopes:
- https://www.googleapis.com/auth/iam
- https://www.googleapis.com/auth/cloud-platform
service_account_email (str): The service account email identifying
which service account to use to sign bytes. Often, this can
be the same as the service account email in the given
credentials.
"""
self._request = request
self._credentials = credentials
self._service_account_email = service_account_email
def _make_signing_request(self, message):
"""Makes a request to the API signBlob API."""
message = _helpers.to_bytes(message)
method = "POST"
url = _IAM_SIGN_ENDPOINT.replace(
credentials.DEFAULT_UNIVERSE_DOMAIN, self._credentials.universe_domain
).format(self._service_account_email)
headers = {"Content-Type": "application/json"}
body = json.dumps(
{"payload": base64.b64encode(message).decode("utf-8")}
).encode("utf-8")
retries = _exponential_backoff.ExponentialBackoff()
for _ in retries:
self._credentials.before_request(self._request, method, url, headers)
response = self._request(url=url, method=method, body=body, headers=headers)
if response.status in IAM_RETRY_CODES:
continue
if response.status != http_client.OK:
raise exceptions.TransportError(
"Error calling the IAM signBlob API: {}".format(response.data)
)
return json.loads(response.data.decode("utf-8"))
raise exceptions.TransportError("exhausted signBlob endpoint retries")
@property
def key_id(self):
"""Optional[str]: The key ID used to identify this private key.
.. warning::
This is always ``None``. The key ID used by IAM can not
be reliably determined ahead of time.
"""
return None
@_helpers.copy_docstring(crypt.Signer)
def sign(self, message):
response = self._make_signing_request(message)
return base64.b64decode(response["signedBlob"])

View File

@@ -0,0 +1,575 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Identity Pool Credentials.
This module provides credentials to access Google Cloud resources from on-prem
or non-Google Cloud platforms which support external credentials (e.g. OIDC ID
tokens) retrieved from local file locations or local servers. This includes
Microsoft Azure and OIDC identity providers (e.g. K8s workloads registered with
Hub with Hub workload identity enabled).
These credentials are recommended over the use of service account credentials
in on-prem/non-Google Cloud platforms as they do not involve the management of
long-live service account private keys.
Identity Pool Credentials are initialized using external_account
arguments which are typically loaded from an external credentials file or
an external credentials URL.
This module also provides a definition for an abstract subject token supplier.
This supplier can be implemented to return a valid OIDC or SAML2.0 subject token
and used to create Identity Pool credentials. The credentials will then call the
supplier instead of using pre-defined methods such as reading a local file or
calling a URL.
"""
try:
from collections.abc import Mapping
# Python 2.7 compatibility
except ImportError: # pragma: NO COVER
from collections import Mapping # type: ignore
import abc
import base64
import json
import os
from typing import NamedTuple
from google.auth import _helpers
from google.auth import exceptions
from google.auth import external_account
from google.auth.transport import _mtls_helper
class SubjectTokenSupplier(metaclass=abc.ABCMeta):
"""Base class for subject token suppliers. This can be implemented with custom logic to retrieve
a subject token to exchange for a Google Cloud access token when using Workload or
Workforce Identity Federation. The identity pool credential does not cache the subject token,
so caching logic should be added in the implementation.
"""
@abc.abstractmethod
def get_subject_token(self, context, request):
"""Returns the requested subject token. The subject token must be valid.
.. warning: This is not cached by the calling Google credential, so caching logic should be implemented in the supplier.
Args:
context (google.auth.externalaccount.SupplierContext): The context object
containing information about the requested audience and subject token type.
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If an error is encountered during
subject token retrieval logic.
Returns:
str: The requested subject token string.
"""
raise NotImplementedError("")
class _TokenContent(NamedTuple):
"""Models the token content response from file and url internal suppliers.
Attributes:
content (str): The string content of the file or URL response.
location (str): The location the content was retrieved from. This will either be a file location or a URL.
"""
content: str
location: str
class _FileSupplier(SubjectTokenSupplier):
"""Internal implementation of subject token supplier which supports reading a subject token from a file."""
def __init__(self, path, format_type, subject_token_field_name):
self._path = path
self._format_type = format_type
self._subject_token_field_name = subject_token_field_name
@_helpers.copy_docstring(SubjectTokenSupplier)
def get_subject_token(self, context, request):
if not os.path.exists(self._path):
raise exceptions.RefreshError("File '{}' was not found.".format(self._path))
with open(self._path, "r", encoding="utf-8") as file_obj:
token_content = _TokenContent(file_obj.read(), self._path)
return _parse_token_data(
token_content, self._format_type, self._subject_token_field_name
)
class _UrlSupplier(SubjectTokenSupplier):
"""Internal implementation of subject token supplier which supports retrieving a subject token by calling a URL endpoint."""
def __init__(self, url, format_type, subject_token_field_name, headers):
self._url = url
self._format_type = format_type
self._subject_token_field_name = subject_token_field_name
self._headers = headers
@_helpers.copy_docstring(SubjectTokenSupplier)
def get_subject_token(self, context, request):
response = request(url=self._url, method="GET", headers=self._headers)
# support both string and bytes type response.data
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
if response.status != 200:
raise exceptions.RefreshError(
"Unable to retrieve Identity Pool subject token", response_body
)
token_content = _TokenContent(response_body, self._url)
return _parse_token_data(
token_content, self._format_type, self._subject_token_field_name
)
class _X509Supplier(SubjectTokenSupplier):
"""Internal supplier for X509 workload credentials. This class is used internally and always returns an empty string as the subject token."""
def __init__(self, trust_chain_path, leaf_cert_callback):
self._trust_chain_path = trust_chain_path
self._leaf_cert_callback = leaf_cert_callback
@_helpers.copy_docstring(SubjectTokenSupplier)
def get_subject_token(self, context, request):
# Import OpennSSL inline because it is an extra import only required by customers
# using mTLS.
from OpenSSL import crypto
leaf_cert = crypto.load_certificate(
crypto.FILETYPE_PEM, self._leaf_cert_callback()
)
trust_chain = self._read_trust_chain()
cert_chain = []
cert_chain.append(_X509Supplier._encode_cert(leaf_cert))
if trust_chain is None or len(trust_chain) == 0:
return json.dumps(cert_chain)
# Append the first cert if it is not the leaf cert.
first_cert = _X509Supplier._encode_cert(trust_chain[0])
if first_cert != cert_chain[0]:
cert_chain.append(first_cert)
for i in range(1, len(trust_chain)):
encoded = _X509Supplier._encode_cert(trust_chain[i])
# Check if the current cert is the leaf cert and raise an exception if it is.
if encoded == cert_chain[0]:
raise exceptions.RefreshError(
"The leaf certificate must be at the top of the trust chain file"
)
else:
cert_chain.append(encoded)
return json.dumps(cert_chain)
def _read_trust_chain(self):
# Import OpennSSL inline because it is an extra import only required by customers
# using mTLS.
from OpenSSL import crypto
certificate_trust_chain = []
# If no trust chain path was provided, return an empty list.
if self._trust_chain_path is None or self._trust_chain_path == "":
return certificate_trust_chain
try:
# Open the trust chain file.
with open(self._trust_chain_path, "rb") as f:
trust_chain_data = f.read()
# Split PEM data into individual certificates.
cert_blocks = trust_chain_data.split(b"-----BEGIN CERTIFICATE-----")
for cert_block in cert_blocks:
# Skip empty blocks.
if cert_block.strip():
cert_data = b"-----BEGIN CERTIFICATE-----" + cert_block
try:
# Load each certificate and add it to the trust chain.
cert = crypto.load_certificate(
crypto.FILETYPE_PEM, cert_data
)
certificate_trust_chain.append(cert)
except Exception as e:
raise exceptions.RefreshError(
"Error loading PEM certificates from the trust chain file '{}'".format(
self._trust_chain_path
)
) from e
return certificate_trust_chain
except FileNotFoundError:
raise exceptions.RefreshError(
"Trust chain file '{}' was not found.".format(self._trust_chain_path)
)
def _encode_cert(cert):
# Import OpennSSL inline because it is an extra import only required by customers
# using mTLS.
from OpenSSL import crypto
return base64.b64encode(
crypto.dump_certificate(crypto.FILETYPE_ASN1, cert)
).decode("utf-8")
def _parse_token_data(token_content, format_type="text", subject_token_field_name=None):
if format_type == "text":
token = token_content.content
else:
try:
# Parse file content as JSON.
response_data = json.loads(token_content.content)
# Get the subject_token.
token = response_data[subject_token_field_name]
except (KeyError, ValueError):
raise exceptions.RefreshError(
"Unable to parse subject_token from JSON file '{}' using key '{}'".format(
token_content.location, subject_token_field_name
)
)
if not token:
raise exceptions.RefreshError(
"Missing subject_token in the credential_source file"
)
return token
class Credentials(external_account.Credentials):
"""External account credentials sourced from files and URLs.
**IMPORTANT**:
This class does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
"""
def __init__(
self,
audience,
subject_token_type,
token_url=external_account._DEFAULT_TOKEN_URL,
credential_source=None,
subject_token_supplier=None,
*args,
**kwargs
):
"""Instantiates an external account credentials object from a file/URL.
Args:
audience (str): The STS audience field.
subject_token_type (str): The subject token type based on the Oauth2.0 token exchange spec.
Expected values include::
“urn:ietf:params:oauth:token-type:jwt”
“urn:ietf:params:oauth:token-type:id-token”
“urn:ietf:params:oauth:token-type:saml2”
token_url (Optional [str]): The STS endpoint URL. If not provided, will default to "https://sts.googleapis.com/v1/token".
credential_source (Optional [Mapping]): The credential source dictionary used to
provide instructions on how to retrieve external credential to be
exchanged for Google access tokens. Either a credential source or
a subject token supplier must be provided.
Example credential_source for url-sourced credential::
{
"url": "http://www.example.com",
"format": {
"type": "json",
"subject_token_field_name": "access_token",
},
"headers": {"foo": "bar"},
}
Example credential_source for file-sourced credential::
{
"file": "/path/to/token/file.txt"
}
subject_token_supplier (Optional [SubjectTokenSupplier]): Optional subject token supplier.
This will be called to supply a valid subject token which will then
be exchanged for Google access tokens. Either a subject token supplier
or a credential source must be provided.
args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
Raises:
google.auth.exceptions.RefreshError: If an error is encountered during
access token retrieval logic.
ValueError: For invalid parameters.
.. note:: Typically one of the helper constructors
:meth:`from_file` or
:meth:`from_info` are used instead of calling the constructor directly.
"""
super(Credentials, self).__init__(
audience=audience,
subject_token_type=subject_token_type,
token_url=token_url,
credential_source=credential_source,
*args,
**kwargs
)
if credential_source is None and subject_token_supplier is None:
raise exceptions.InvalidValue(
"A valid credential source or a subject token supplier must be provided."
)
if credential_source is not None and subject_token_supplier is not None:
raise exceptions.InvalidValue(
"Identity pool credential cannot have both a credential source and a subject token supplier."
)
if subject_token_supplier is not None:
self._subject_token_supplier = subject_token_supplier
self._credential_source_file = None
self._credential_source_url = None
self._credential_source_certificate = None
else:
if not isinstance(credential_source, Mapping):
self._credential_source_executable = None
raise exceptions.MalformedError(
"Invalid credential_source. The credential_source is not a dict."
)
self._credential_source_file = credential_source.get("file")
self._credential_source_url = credential_source.get("url")
self._credential_source_certificate = credential_source.get("certificate")
# environment_id is only supported in AWS or dedicated future external
# account credentials.
if "environment_id" in credential_source:
raise exceptions.MalformedError(
"Invalid Identity Pool credential_source field 'environment_id'"
)
# check that only one of file, url, or certificate are provided.
self._validate_single_source()
if self._credential_source_certificate:
self._validate_certificate_config()
else:
self._validate_file_or_url_config(credential_source)
if self._credential_source_file:
self._subject_token_supplier = _FileSupplier(
self._credential_source_file,
self._credential_source_format_type,
self._credential_source_field_name,
)
elif self._credential_source_url:
self._subject_token_supplier = _UrlSupplier(
self._credential_source_url,
self._credential_source_format_type,
self._credential_source_field_name,
self._credential_source_headers,
)
else: # self._credential_source_certificate
self._subject_token_supplier = _X509Supplier(
self._trust_chain_path, self._get_cert_bytes
)
@_helpers.copy_docstring(external_account.Credentials)
def retrieve_subject_token(self, request):
return self._subject_token_supplier.get_subject_token(
self._supplier_context, request
)
def _get_mtls_cert_and_key_paths(self):
if self._credential_source_certificate is None:
raise exceptions.RefreshError(
'The credential is not configured to use mtls requests. The credential should include a "certificate" section in the credential source.'
)
else:
return _mtls_helper._get_workload_cert_and_key_paths(
self._certificate_config_location
)
def _get_cert_bytes(self):
cert_path, _ = self._get_mtls_cert_and_key_paths()
return _mtls_helper._read_cert_file(cert_path)
def _mtls_required(self):
return self._credential_source_certificate is not None
def _create_default_metrics_options(self):
metrics_options = super(Credentials, self)._create_default_metrics_options()
# Check that credential source is a dict before checking for credential type. This check needs to be done
# here because the external_account credential constructor needs to pass the metrics options to the
# impersonated credential object before the identity_pool credentials are validated.
if isinstance(self._credential_source, Mapping):
if self._credential_source.get("file"):
metrics_options["source"] = "file"
elif self._credential_source.get("url"):
metrics_options["source"] = "url"
else:
metrics_options["source"] = "x509"
else:
metrics_options["source"] = "programmatic"
return metrics_options
def _has_custom_supplier(self):
return self._credential_source is None
def _constructor_args(self):
args = super(Credentials, self)._constructor_args()
# If a custom supplier was used, append it to the args dict.
if self._has_custom_supplier():
args.update({"subject_token_supplier": self._subject_token_supplier})
return args
def _validate_certificate_config(self):
self._certificate_config_location = self._credential_source_certificate.get(
"certificate_config_location"
)
use_default = self._credential_source_certificate.get(
"use_default_certificate_config"
)
self._trust_chain_path = self._credential_source_certificate.get(
"trust_chain_path"
)
if self._certificate_config_location and use_default:
raise exceptions.MalformedError(
"Invalid certificate configuration, certificate_config_location cannot be specified when use_default_certificate_config = true."
)
if not self._certificate_config_location and not use_default:
raise exceptions.MalformedError(
"Invalid certificate configuration, use_default_certificate_config should be true if no certificate_config_location is provided."
)
def _validate_file_or_url_config(self, credential_source):
self._credential_source_headers = credential_source.get("headers")
credential_source_format = credential_source.get("format", {})
# Get credential_source format type. When not provided, this
# defaults to text.
self._credential_source_format_type = (
credential_source_format.get("type") or "text"
)
if self._credential_source_format_type not in ["text", "json"]:
raise exceptions.MalformedError(
"Invalid credential_source format '{}'".format(
self._credential_source_format_type
)
)
# For JSON types, get the required subject_token field name.
if self._credential_source_format_type == "json":
self._credential_source_field_name = credential_source_format.get(
"subject_token_field_name"
)
if self._credential_source_field_name is None:
raise exceptions.MalformedError(
"Missing subject_token_field_name for JSON credential_source format"
)
else:
self._credential_source_field_name = None
def _validate_single_source(self):
credential_sources = [
self._credential_source_file,
self._credential_source_url,
self._credential_source_certificate,
]
valid_credential_sources = list(
filter(lambda source: source is not None, credential_sources)
)
if len(valid_credential_sources) > 1:
raise exceptions.MalformedError(
"Ambiguous credential_source. 'file', 'url', and 'certificate' are mutually exclusive.."
)
if len(valid_credential_sources) != 1:
raise exceptions.MalformedError(
"Missing credential_source. A 'file', 'url', or 'certificate' must be provided."
)
@classmethod
def from_info(cls, info, **kwargs):
"""Creates an Identity Pool Credentials instance from parsed external account info.
**IMPORTANT**:
This method does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using with this method.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
Args:
info (Mapping[str, str]): The Identity Pool external account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.identity_pool.Credentials: The constructed
credentials.
Raises:
ValueError: For invalid parameters.
"""
subject_token_supplier = info.get("subject_token_supplier")
kwargs.update({"subject_token_supplier": subject_token_supplier})
return super(Credentials, cls).from_info(info, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
"""Creates an IdentityPool Credentials instance from an external account json file.
**IMPORTANT**:
This method does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using with this method.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
Args:
filename (str): The path to the IdentityPool external account json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.identity_pool.Credentials: The constructed
credentials.
"""
return super(Credentials, cls).from_file(filename, **kwargs)
def refresh(self, request):
"""Refreshes the access token.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
"""
from google.auth import _agent_identity_utils
cert_fingerprint = None
# Check if the credential is X.509 based.
if self._credential_source_certificate is not None:
cert_bytes = self._get_cert_bytes()
cert = _agent_identity_utils.parse_certificate(cert_bytes)
if _agent_identity_utils.should_request_bound_token(cert):
cert_fingerprint = (
_agent_identity_utils.calculate_certificate_fingerprint(cert)
)
self._perform_refresh_token(request, cert_fingerprint=cert_fingerprint)
self._handle_trust_boundary(request)

View File

@@ -0,0 +1,712 @@
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Impersonated credentials.
This module provides authentication for applications where local credentials
impersonates a remote service account using `IAM Credentials API`_.
This class can be used to impersonate a service account as long as the original
Credential object has the "Service Account Token Creator" role on the target
service account.
.. _IAM Credentials API:
https://cloud.google.com/iam/credentials/reference/rest/
"""
import base64
import copy
from datetime import datetime
import http.client as http_client
import json
from google.auth import _exponential_backoff
from google.auth import _helpers
from google.auth import credentials
from google.auth import exceptions
from google.auth import iam
from google.auth import jwt
from google.auth import metrics
from google.oauth2 import _client
_REFRESH_ERROR = "Unable to acquire impersonated credentials"
_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
_GOOGLE_OAUTH2_TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"
_TRUST_BOUNDARY_LOOKUP_ENDPOINT = (
"https://iamcredentials.{}/v1/projects/-/serviceAccounts/{}/allowedLocations"
)
_SOURCE_CREDENTIAL_AUTHORIZED_USER_TYPE = "authorized_user"
_SOURCE_CREDENTIAL_SERVICE_ACCOUNT_TYPE = "service_account"
_SOURCE_CREDENTIAL_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = (
"external_account_authorized_user"
)
def _make_iam_token_request(
request,
principal,
headers,
body,
universe_domain=credentials.DEFAULT_UNIVERSE_DOMAIN,
iam_endpoint_override=None,
):
"""Makes a request to the Google Cloud IAM service for an access token.
Args:
request (Request): The Request object to use.
principal (str): The principal to request an access token for.
headers (Mapping[str, str]): Map of headers to transmit.
body (Mapping[str, str]): JSON Payload body for the iamcredentials
API call.
iam_endpoint_override (Optiona[str]): The full IAM endpoint override
with the target_principal embedded. This is useful when supporting
impersonation with regional endpoints.
Raises:
google.auth.exceptions.TransportError: Raised if there is an underlying
HTTP connection error
google.auth.exceptions.RefreshError: Raised if the impersonated
credentials are not available. Common reasons are
`iamcredentials.googleapis.com` is not enabled or the
`Service Account Token Creator` is not assigned
"""
iam_endpoint = iam_endpoint_override or iam._IAM_ENDPOINT.replace(
credentials.DEFAULT_UNIVERSE_DOMAIN, universe_domain
).format(principal)
body = json.dumps(body).encode("utf-8")
response = request(url=iam_endpoint, method="POST", headers=headers, body=body)
# support both string and bytes type response.data
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
if response.status != http_client.OK:
raise exceptions.RefreshError(_REFRESH_ERROR, response_body)
try:
token_response = json.loads(response_body)
token = token_response["accessToken"]
expiry = datetime.strptime(token_response["expireTime"], "%Y-%m-%dT%H:%M:%SZ")
return token, expiry
except (KeyError, ValueError) as caught_exc:
new_exc = exceptions.RefreshError(
"{}: No access token or invalid expiration in response.".format(
_REFRESH_ERROR
),
response_body,
)
raise new_exc from caught_exc
class Credentials(
credentials.Scoped,
credentials.CredentialsWithQuotaProject,
credentials.Signing,
credentials.CredentialsWithTrustBoundary,
):
"""This module defines impersonated credentials which are essentially
impersonated identities.
Impersonated Credentials allows credentials issued to a user or
service account to impersonate another. The target service account must
grant the originating credential principal the
`Service Account Token Creator`_ IAM role:
For more information about Token Creator IAM role and
IAMCredentials API, see
`Creating Short-Lived Service Account Credentials`_.
.. _Service Account Token Creator:
https://cloud.google.com/iam/docs/service-accounts#the_service_account_token_creator_role
.. _Creating Short-Lived Service Account Credentials:
https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials
Usage:
First grant source_credentials the `Service Account Token Creator`
role on the target account to impersonate. In this example, the
service account represented by svc_account.json has the
token creator role on
`impersonated-account@_project_.iam.gserviceaccount.com`.
Enable the IAMCredentials API on the source project:
`gcloud services enable iamcredentials.googleapis.com`.
Initialize a source credential which does not have access to
list bucket::
from google.oauth2 import service_account
target_scopes = [
'https://www.googleapis.com/auth/devstorage.read_only']
source_credentials = (
service_account.Credentials.from_service_account_file(
'/path/to/svc_account.json',
scopes=target_scopes))
Now use the source credentials to acquire credentials to impersonate
another service account::
from google.auth import impersonated_credentials
target_credentials = impersonated_credentials.Credentials(
source_credentials=source_credentials,
target_principal='impersonated-account@_project_.iam.gserviceaccount.com',
target_scopes = target_scopes,
lifetime=500)
Resource access is granted::
client = storage.Client(credentials=target_credentials)
buckets = client.list_buckets(project='your_project')
for bucket in buckets:
print(bucket.name)
**IMPORTANT**:
This class does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
"""
def __init__(
self,
source_credentials,
target_principal,
target_scopes,
delegates=None,
subject=None,
lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
quota_project_id=None,
iam_endpoint_override=None,
trust_boundary=None,
):
"""
Args:
source_credentials (google.auth.Credentials): The source credential
used as to acquire the impersonated credentials.
target_principal (str): The service account to impersonate.
target_scopes (Sequence[str]): Scopes to request during the
authorization grant.
delegates (Sequence[str]): The chained list of delegates required
to grant the final access_token. If set, the sequence of
identities must have "Service Account Token Creator" capability
granted to the prceeding identity. For example, if set to
[serviceAccountB, serviceAccountC], the source_credential
must have the Token Creator role on serviceAccountB.
serviceAccountB must have the Token Creator on
serviceAccountC.
Finally, C must have Token Creator on target_principal.
If left unset, source_credential must have that role on
target_principal.
lifetime (int): Number of seconds the delegated credential should
be valid for (upto 3600).
quota_project_id (Optional[str]): The project ID used for quota and billing.
This project may be different from the project used to
create the credentials.
iam_endpoint_override (Optional[str]): The full IAM endpoint override
with the target_principal embedded. This is useful when supporting
impersonation with regional endpoints.
subject (Optional[str]): sub field of a JWT. This field should only be set
if you wish to impersonate as a user. This feature is useful when
using domain wide delegation.
trust_boundary (Mapping[str,str]): A credential trust boundary.
"""
super(Credentials, self).__init__()
self._source_credentials = copy.copy(source_credentials)
# Service account source credentials must have the _IAM_SCOPE
# added to refresh correctly. User credentials cannot have
# their original scopes modified.
if isinstance(self._source_credentials, credentials.Scoped):
self._source_credentials = self._source_credentials.with_scopes(
iam._IAM_SCOPE
)
# If the source credential is service account and self signed jwt
# is needed, we need to create a jwt credential inside it
if (
hasattr(self._source_credentials, "_create_self_signed_jwt")
and self._source_credentials._always_use_jwt_access
):
self._source_credentials._create_self_signed_jwt(None)
self._universe_domain = source_credentials.universe_domain
self._target_principal = target_principal
self._target_scopes = target_scopes
self._delegates = delegates
self._subject = subject
self._lifetime = lifetime or _DEFAULT_TOKEN_LIFETIME_SECS
self.token = None
self.expiry = _helpers.utcnow()
self._quota_project_id = quota_project_id
self._iam_endpoint_override = iam_endpoint_override
self._cred_file_path = None
self._trust_boundary = trust_boundary
def _metric_header_for_usage(self):
return metrics.CRED_TYPE_SA_IMPERSONATE
def _perform_refresh_token(self, request):
"""Updates credentials with a new access_token representing
the impersonated account.
Args:
request (google.auth.transport.requests.Request): Request object
to use for refreshing credentials.
"""
# Refresh our source credentials if it is not valid.
if (
self._source_credentials.token_state == credentials.TokenState.STALE
or self._source_credentials.token_state == credentials.TokenState.INVALID
):
self._source_credentials.refresh(request)
body = {
"delegates": self._delegates,
"scope": self._target_scopes,
"lifetime": str(self._lifetime) + "s",
}
headers = {
"Content-Type": "application/json",
metrics.API_CLIENT_HEADER: metrics.token_request_access_token_impersonate(),
}
# Apply the source credentials authentication info.
self._source_credentials.apply(headers)
# If a subject is specified a domain-wide delegation auth-flow is initiated
# to impersonate as the provided subject (user).
if self._subject:
if self.universe_domain != credentials.DEFAULT_UNIVERSE_DOMAIN:
raise exceptions.GoogleAuthError(
"Domain-wide delegation is not supported in universes other "
+ "than googleapis.com"
)
now = _helpers.utcnow()
payload = {
"iss": self._target_principal,
"scope": _helpers.scopes_to_string(self._target_scopes or ()),
"sub": self._subject,
"aud": _GOOGLE_OAUTH2_TOKEN_ENDPOINT,
"iat": _helpers.datetime_to_secs(now),
"exp": _helpers.datetime_to_secs(now) + _DEFAULT_TOKEN_LIFETIME_SECS,
}
assertion = _sign_jwt_request(
request=request,
principal=self._target_principal,
headers=headers,
payload=payload,
delegates=self._delegates,
)
self.token, self.expiry, _ = _client.jwt_grant(
request, _GOOGLE_OAUTH2_TOKEN_ENDPOINT, assertion
)
return
self.token, self.expiry = _make_iam_token_request(
request=request,
principal=self._target_principal,
headers=headers,
body=body,
universe_domain=self.universe_domain,
iam_endpoint_override=self._iam_endpoint_override,
)
def _build_trust_boundary_lookup_url(self):
"""Builds and returns the URL for the trust boundary lookup API.
This method constructs the specific URL for the IAM Credentials API's
`allowedLocations` endpoint, using the credential's universe domain
and service account email.
Raises:
ValueError: If `self.service_account_email` is None or an empty
string, as it's required to form the URL.
Returns:
str: The URL for the trust boundary lookup endpoint.
"""
if not self.service_account_email:
raise ValueError(
"Service account email is required to build the trust boundary lookup URL."
)
return _TRUST_BOUNDARY_LOOKUP_ENDPOINT.format(
self.universe_domain, self.service_account_email
)
def sign_bytes(self, message):
from google.auth.transport.requests import AuthorizedSession
iam_sign_endpoint = iam._IAM_SIGN_ENDPOINT.replace(
credentials.DEFAULT_UNIVERSE_DOMAIN, self.universe_domain
).format(self._target_principal)
body = {
"payload": base64.b64encode(message).decode("utf-8"),
"delegates": self._delegates,
}
headers = {"Content-Type": "application/json"}
authed_session = AuthorizedSession(self._source_credentials)
try:
retries = _exponential_backoff.ExponentialBackoff()
for _ in retries:
response = authed_session.post(
url=iam_sign_endpoint, headers=headers, json=body
)
if response.status_code in iam.IAM_RETRY_CODES:
continue
if response.status_code != http_client.OK:
raise exceptions.TransportError(
"Error calling sign_bytes: {}".format(response.json())
)
return base64.b64decode(response.json()["signedBlob"])
finally:
authed_session.close()
raise exceptions.TransportError("exhausted signBlob endpoint retries")
@property
def signer_email(self):
return self._target_principal
@property
def service_account_email(self):
return self._target_principal
@property
def signer(self):
return self
@property
def requires_scopes(self):
return not self._target_scopes
@_helpers.copy_docstring(credentials.Credentials)
def get_cred_info(self):
if self._cred_file_path:
return {
"credential_source": self._cred_file_path,
"credential_type": "impersonated credentials",
"principal": self._target_principal,
}
return None
def _make_copy(self):
cred = self.__class__(
self._source_credentials,
target_principal=self._target_principal,
target_scopes=self._target_scopes,
delegates=self._delegates,
lifetime=self._lifetime,
quota_project_id=self._quota_project_id,
iam_endpoint_override=self._iam_endpoint_override,
trust_boundary=self._trust_boundary,
)
cred._cred_file_path = self._cred_file_path
return cred
@_helpers.copy_docstring(credentials.CredentialsWithTrustBoundary)
def with_trust_boundary(self, trust_boundary):
cred = self._make_copy()
cred._trust_boundary = trust_boundary
return cred
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
cred = self._make_copy()
cred._quota_project_id = quota_project_id
return cred
@_helpers.copy_docstring(credentials.Scoped)
def with_scopes(self, scopes, default_scopes=None):
cred = self._make_copy()
cred._target_scopes = scopes or default_scopes
return cred
@classmethod
def from_impersonated_service_account_info(cls, info, scopes=None):
"""Creates a Credentials instance from parsed impersonated service account credentials info.
**IMPORTANT**:
This method does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using with this method.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
Args:
info (Mapping[str, str]): The impersonated service account credentials info in Google
format.
scopes (Sequence[str]): Optional list of scopes to include in the
credentials.
Returns:
google.oauth2.credentials.Credentials: The constructed
credentials.
Raises:
InvalidType: If the info["source_credentials"] are not a supported impersonation type
InvalidValue: If the info["service_account_impersonation_url"] is not in the expected format.
ValueError: If the info is not in the expected format.
"""
source_credentials_info = info.get("source_credentials")
source_credentials_type = source_credentials_info.get("type")
if source_credentials_type == _SOURCE_CREDENTIAL_AUTHORIZED_USER_TYPE:
from google.oauth2 import credentials
source_credentials = credentials.Credentials.from_authorized_user_info(
source_credentials_info
)
elif source_credentials_type == _SOURCE_CREDENTIAL_SERVICE_ACCOUNT_TYPE:
from google.oauth2 import service_account
source_credentials = service_account.Credentials.from_service_account_info(
source_credentials_info
)
elif (
source_credentials_type
== _SOURCE_CREDENTIAL_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE
):
from google.auth import external_account_authorized_user
source_credentials = external_account_authorized_user.Credentials.from_info(
source_credentials_info
)
else:
raise exceptions.InvalidType(
"source credential of type {} is not supported.".format(
source_credentials_type
)
)
impersonation_url = info.get("service_account_impersonation_url")
start_index = impersonation_url.rfind("/")
end_index = impersonation_url.find(":generateAccessToken")
if start_index == -1 or end_index == -1 or start_index > end_index:
raise exceptions.InvalidValue(
"Cannot extract target principal from {}".format(impersonation_url)
)
target_principal = impersonation_url[start_index + 1 : end_index]
delegates = info.get("delegates")
quota_project_id = info.get("quota_project_id")
scopes = scopes or info.get("scopes")
trust_boundary = info.get("trust_boundary")
return cls(
source_credentials,
target_principal,
scopes,
delegates,
quota_project_id=quota_project_id,
trust_boundary=trust_boundary,
)
class IDTokenCredentials(credentials.CredentialsWithQuotaProject):
"""Open ID Connect ID Token-based service account credentials."""
def __init__(
self,
target_credentials,
target_audience=None,
include_email=False,
quota_project_id=None,
):
"""
Args:
target_credentials (google.auth.Credentials): The target
credential used as to acquire the id tokens for.
target_audience (string): Audience to issue the token for.
include_email (bool): Include email in IdToken
quota_project_id (Optional[str]): The project ID used for
quota and billing.
"""
super(IDTokenCredentials, self).__init__()
if not isinstance(target_credentials, Credentials):
raise exceptions.GoogleAuthError(
"Provided Credential must be " "impersonated_credentials"
)
self._target_credentials = target_credentials
self._target_audience = target_audience
self._include_email = include_email
self._quota_project_id = quota_project_id
def from_credentials(self, target_credentials, target_audience=None):
return self.__class__(
target_credentials=target_credentials,
target_audience=target_audience,
include_email=self._include_email,
quota_project_id=self._quota_project_id,
)
def with_target_audience(self, target_audience):
return self.__class__(
target_credentials=self._target_credentials,
target_audience=target_audience,
include_email=self._include_email,
quota_project_id=self._quota_project_id,
)
def with_include_email(self, include_email):
return self.__class__(
target_credentials=self._target_credentials,
target_audience=self._target_audience,
include_email=include_email,
quota_project_id=self._quota_project_id,
)
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
return self.__class__(
target_credentials=self._target_credentials,
target_audience=self._target_audience,
include_email=self._include_email,
quota_project_id=quota_project_id,
)
@_helpers.copy_docstring(credentials.Credentials)
def refresh(self, request):
from google.auth.transport.requests import AuthorizedSession
iam_sign_endpoint = iam._IAM_IDTOKEN_ENDPOINT.replace(
credentials.DEFAULT_UNIVERSE_DOMAIN,
self._target_credentials.universe_domain,
).format(self._target_credentials.signer_email)
body = {
"audience": self._target_audience,
"delegates": self._target_credentials._delegates,
"includeEmail": self._include_email,
}
headers = {
"Content-Type": "application/json",
metrics.API_CLIENT_HEADER: metrics.token_request_id_token_impersonate(),
}
authed_session = AuthorizedSession(
self._target_credentials._source_credentials, auth_request=request
)
try:
response = authed_session.post(
url=iam_sign_endpoint,
headers=headers,
data=json.dumps(body).encode("utf-8"),
)
finally:
authed_session.close()
if response.status_code != http_client.OK:
raise exceptions.RefreshError(
"Error getting ID token: {}".format(response.json())
)
try:
id_token = response.json()["token"]
except (KeyError, ValueError) as caught_exc:
new_exc = exceptions.RefreshError(
"No ID token in response.", response.json()
)
raise new_exc from caught_exc
self.token = id_token
self.expiry = _helpers.utcfromtimestamp(
jwt.decode(id_token, verify=False)["exp"]
)
def _sign_jwt_request(request, principal, headers, payload, delegates=[]):
"""Makes a request to the Google Cloud IAM service to sign a JWT using a
service account's system-managed private key.
Args:
request (Request): The Request object to use.
principal (str): The principal to request an access token for.
headers (Mapping[str, str]): Map of headers to transmit.
payload (Mapping[str, str]): The JWT payload to sign. Must be a
serialized JSON object that contains a JWT Claims Set.
delegates (Sequence[str]): The chained list of delegates required
to grant the final access_token. If set, the sequence of
identities must have "Service Account Token Creator" capability
granted to the prceeding identity. For example, if set to
[serviceAccountB, serviceAccountC], the source_credential
must have the Token Creator role on serviceAccountB.
serviceAccountB must have the Token Creator on
serviceAccountC.
Finally, C must have Token Creator on target_principal.
If left unset, source_credential must have that role on
target_principal.
Raises:
google.auth.exceptions.TransportError: Raised if there is an underlying
HTTP connection error
google.auth.exceptions.RefreshError: Raised if the impersonated
credentials are not available. Common reasons are
`iamcredentials.googleapis.com` is not enabled or the
`Service Account Token Creator` is not assigned
"""
iam_endpoint = iam._IAM_SIGNJWT_ENDPOINT.format(principal)
body = {"delegates": delegates, "payload": json.dumps(payload)}
body = json.dumps(body).encode("utf-8")
response = request(url=iam_endpoint, method="POST", headers=headers, body=body)
# support both string and bytes type response.data
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
if response.status != http_client.OK:
raise exceptions.RefreshError(_REFRESH_ERROR, response_body)
try:
jwt_response = json.loads(response_body)
signed_jwt = jwt_response["signedJwt"]
return signed_jwt
except (KeyError, ValueError) as caught_exc:
new_exc = exceptions.RefreshError(
"{}: No signed JWT in response.".format(_REFRESH_ERROR), response_body
)
raise new_exc from caught_exc

View File

@@ -0,0 +1,877 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""JSON Web Tokens
Provides support for creating (encoding) and verifying (decoding) JWTs,
especially JWTs generated and consumed by Google infrastructure.
See `rfc7519`_ for more details on JWTs.
To encode a JWT use :func:`encode`::
from google.auth import crypt
from google.auth import jwt
signer = crypt.Signer(private_key)
payload = {'some': 'payload'}
encoded = jwt.encode(signer, payload)
To decode a JWT and verify claims use :func:`decode`::
claims = jwt.decode(encoded, certs=public_certs)
You can also skip verification::
claims = jwt.decode(encoded, verify=False)
.. _rfc7519: https://tools.ietf.org/html/rfc7519
"""
try:
from collections.abc import Mapping
# Python 2.7 compatibility
except ImportError: # pragma: NO COVER
from collections import Mapping # type: ignore
import copy
import datetime
import json
import urllib
from google.auth import _cache
from google.auth import _helpers
from google.auth import _service_account_info
from google.auth import crypt
from google.auth import exceptions
import google.auth.credentials
try:
from google.auth.crypt import es
except ImportError: # pragma: NO COVER
es = None # type: ignore
_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
_DEFAULT_MAX_CACHE_SIZE = 10
_ALGORITHM_TO_VERIFIER_CLASS = {"RS256": crypt.RSAVerifier}
_CRYPTOGRAPHY_BASED_ALGORITHMS = frozenset(["ES256", "ES384"])
if es is not None: # pragma: NO COVER
_ALGORITHM_TO_VERIFIER_CLASS["ES256"] = es.EsVerifier # type: ignore
_ALGORITHM_TO_VERIFIER_CLASS["ES384"] = es.EsVerifier # type: ignore
def encode(signer, payload, header=None, key_id=None):
"""Make a signed JWT.
Args:
signer (google.auth.crypt.Signer): The signer used to sign the JWT.
payload (Mapping[str, str]): The JWT payload.
header (Mapping[str, str]): Additional JWT header payload.
key_id (str): The key id to add to the JWT header. If the
signer has a key id it will be used as the default. If this is
specified it will override the signer's key id.
Returns:
bytes: The encoded JWT.
"""
if header is None:
header = {}
if key_id is None:
key_id = signer.key_id
header.update({"typ": "JWT"})
if "alg" not in header:
if es is not None and isinstance(signer, es.EsSigner):
header.update({"alg": signer.algorithm})
else:
header.update({"alg": "RS256"})
if key_id is not None:
header["kid"] = key_id
segments = [
_helpers.unpadded_urlsafe_b64encode(json.dumps(header).encode("utf-8")),
_helpers.unpadded_urlsafe_b64encode(json.dumps(payload).encode("utf-8")),
]
signing_input = b".".join(segments)
signature = signer.sign(signing_input)
segments.append(_helpers.unpadded_urlsafe_b64encode(signature))
return b".".join(segments)
def _decode_jwt_segment(encoded_section):
"""Decodes a single JWT segment."""
section_bytes = _helpers.padded_urlsafe_b64decode(encoded_section)
try:
return json.loads(section_bytes.decode("utf-8"))
except ValueError as caught_exc:
new_exc = exceptions.MalformedError(
"Can't parse segment: {0}".format(section_bytes)
)
raise new_exc from caught_exc
def _unverified_decode(token):
"""Decodes a token and does no verification.
Args:
token (Union[str, bytes]): The encoded JWT.
Returns:
Tuple[Mapping, Mapping, str, str]: header, payload, signed_section, and
signature.
Raises:
google.auth.exceptions.MalformedError: if there are an incorrect amount of segments in the token or segments of the wrong type.
"""
token = _helpers.to_bytes(token)
if token.count(b".") != 2:
raise exceptions.MalformedError(
"Wrong number of segments in token: {0}".format(token)
)
encoded_header, encoded_payload, signature = token.split(b".")
signed_section = encoded_header + b"." + encoded_payload
signature = _helpers.padded_urlsafe_b64decode(signature)
# Parse segments
header = _decode_jwt_segment(encoded_header)
payload = _decode_jwt_segment(encoded_payload)
if not isinstance(header, Mapping):
raise exceptions.MalformedError(
"Header segment should be a JSON object: {0}".format(encoded_header)
)
if not isinstance(payload, Mapping):
raise exceptions.MalformedError(
"Payload segment should be a JSON object: {0}".format(encoded_payload)
)
return header, payload, signed_section, signature
def decode_header(token):
"""Return the decoded header of a token.
No verification is done. This is useful to extract the key id from
the header in order to acquire the appropriate certificate to verify
the token.
Args:
token (Union[str, bytes]): the encoded JWT.
Returns:
Mapping: The decoded JWT header.
"""
header, _, _, _ = _unverified_decode(token)
return header
def _verify_iat_and_exp(payload, clock_skew_in_seconds=0):
"""Verifies the ``iat`` (Issued At) and ``exp`` (Expires) claims in a token
payload.
Args:
payload (Mapping[str, str]): The JWT payload.
clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
validation.
Raises:
google.auth.exceptions.InvalidValue: if value validation failed.
google.auth.exceptions.MalformedError: if schema validation failed.
"""
now = _helpers.datetime_to_secs(_helpers.utcnow())
# Make sure the iat and exp claims are present.
for key in ("iat", "exp"):
if key not in payload:
raise exceptions.MalformedError(
"Token does not contain required claim {}".format(key)
)
# Make sure the token wasn't issued in the future.
iat = payload["iat"]
# Err on the side of accepting a token that is slightly early to account
# for clock skew.
earliest = iat - clock_skew_in_seconds
if now < earliest:
raise exceptions.InvalidValue(
"Token used too early, {} < {}. Check that your computer's clock is set correctly.".format(
now, iat
)
)
# Make sure the token wasn't issued in the past.
exp = payload["exp"]
# Err on the side of accepting a token that is slightly out of date
# to account for clow skew.
latest = exp + clock_skew_in_seconds
if latest < now:
raise exceptions.InvalidValue("Token expired, {} < {}".format(latest, now))
def decode(token, certs=None, verify=True, audience=None, clock_skew_in_seconds=0):
"""Decode and verify a JWT.
Args:
token (str): The encoded JWT.
certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The
certificate used to validate the JWT signature. If bytes or string,
it must the the public key certificate in PEM format. If a mapping,
it must be a mapping of key IDs to public key certificates in PEM
format. The mapping must contain the same key ID that's specified
in the token's header.
verify (bool): Whether to perform signature and claim validation.
Verification is done by default.
audience (str or list): The audience claim, 'aud', that this JWT should
contain. Or a list of audience claims. If None then the JWT's 'aud'
parameter is not verified.
clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
validation.
Returns:
Mapping[str, str]: The deserialized JSON payload in the JWT.
Raises:
google.auth.exceptions.InvalidValue: if value validation failed.
google.auth.exceptions.MalformedError: if schema validation failed.
"""
header, payload, signed_section, signature = _unverified_decode(token)
if not verify:
return payload
# Pluck the key id and algorithm from the header and make sure we have
# a verifier that can support it.
key_alg = header.get("alg")
key_id = header.get("kid")
try:
verifier_cls = _ALGORITHM_TO_VERIFIER_CLASS[key_alg]
except KeyError as exc:
if key_alg in _CRYPTOGRAPHY_BASED_ALGORITHMS:
raise exceptions.InvalidValue(
"The key algorithm {} requires the cryptography package to be installed.".format(
key_alg
)
) from exc
else:
raise exceptions.InvalidValue(
"Unsupported signature algorithm {}".format(key_alg)
) from exc
# If certs is specified as a dictionary of key IDs to certificates, then
# use the certificate identified by the key ID in the token header.
if isinstance(certs, Mapping):
if key_id:
if key_id not in certs:
raise exceptions.MalformedError(
"Certificate for key id {} not found.".format(key_id)
)
certs_to_check = [certs[key_id]]
# If there's no key id in the header, check against all of the certs.
else:
certs_to_check = certs.values()
else:
certs_to_check = certs
# Verify that the signature matches the message.
if not crypt.verify_signature(
signed_section, signature, certs_to_check, verifier_cls
):
raise exceptions.MalformedError("Could not verify token signature.")
# Verify the issued at and created times in the payload.
_verify_iat_and_exp(payload, clock_skew_in_seconds)
# Check audience.
if audience is not None:
claim_audience = payload.get("aud")
if isinstance(audience, str):
audience = [audience]
if claim_audience not in audience:
raise exceptions.InvalidValue(
"Token has wrong audience {}, expected one of {}".format(
claim_audience, audience
)
)
return payload
class Credentials(
google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
):
"""Credentials that use a JWT as the bearer token.
These credentials require an "audience" claim. This claim identifies the
intended recipient of the bearer token.
The constructor arguments determine the claims for the JWT that is
sent with requests. Usually, you'll construct these credentials with
one of the helper constructors as shown in the next section.
To create JWT credentials using a Google service account private key
JSON file::
audience = 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher'
credentials = jwt.Credentials.from_service_account_file(
'service-account.json',
audience=audience)
If you already have the service account file loaded and parsed::
service_account_info = json.load(open('service_account.json'))
credentials = jwt.Credentials.from_service_account_info(
service_account_info,
audience=audience)
Both helper methods pass on arguments to the constructor, so you can
specify the JWT claims::
credentials = jwt.Credentials.from_service_account_file(
'service-account.json',
audience=audience,
additional_claims={'meta': 'data'})
You can also construct the credentials directly if you have a
:class:`~google.auth.crypt.Signer` instance::
credentials = jwt.Credentials(
signer,
issuer='your-issuer',
subject='your-subject',
audience=audience)
The claims are considered immutable. If you want to modify the claims,
you can easily create another instance using :meth:`with_claims`::
new_audience = (
'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber')
new_credentials = credentials.with_claims(audience=new_audience)
"""
def __init__(
self,
signer,
issuer,
subject,
audience,
additional_claims=None,
token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
quota_project_id=None,
):
"""
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
issuer (str): The `iss` claim.
subject (str): The `sub` claim.
audience (str): the `aud` claim. The intended audience for the
credentials.
additional_claims (Mapping[str, str]): Any additional claims for
the JWT payload.
token_lifetime (int): The amount of time in seconds for
which the token is valid. Defaults to 1 hour.
quota_project_id (Optional[str]): The project ID used for quota
and billing.
"""
super(Credentials, self).__init__()
self._signer = signer
self._issuer = issuer
self._subject = subject
self._audience = audience
self._token_lifetime = token_lifetime
self._quota_project_id = quota_project_id
if additional_claims is None:
additional_claims = {}
self._additional_claims = additional_claims
@classmethod
def _from_signer_and_info(cls, signer, info, **kwargs):
"""Creates a Credentials instance from a signer and service account
info.
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
info (Mapping[str, str]): The service account info.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.Credentials: The constructed credentials.
Raises:
google.auth.exceptions.MalformedError: If the info is not in the expected format.
"""
kwargs.setdefault("subject", info["client_email"])
kwargs.setdefault("issuer", info["client_email"])
return cls(signer, **kwargs)
@classmethod
def from_service_account_info(cls, info, **kwargs):
"""Creates an Credentials instance from a dictionary.
Args:
info (Mapping[str, str]): The service account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.Credentials: The constructed credentials.
Raises:
google.auth.exceptions.MalformedError: If the info is not in the expected format.
"""
signer = _service_account_info.from_dict(info, require=["client_email"])
return cls._from_signer_and_info(signer, info, **kwargs)
@classmethod
def from_service_account_file(cls, filename, **kwargs):
"""Creates a Credentials instance from a service account .json file
in Google format.
Args:
filename (str): The path to the service account .json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.Credentials: The constructed credentials.
"""
info, signer = _service_account_info.from_filename(
filename, require=["client_email"]
)
return cls._from_signer_and_info(signer, info, **kwargs)
@classmethod
def from_signing_credentials(cls, credentials, audience, **kwargs):
"""Creates a new :class:`google.auth.jwt.Credentials` instance from an
existing :class:`google.auth.credentials.Signing` instance.
The new instance will use the same signer as the existing instance and
will use the existing instance's signer email as the issuer and
subject by default.
Example::
svc_creds = service_account.Credentials.from_service_account_file(
'service_account.json')
audience = (
'https://pubsub.googleapis.com/google.pubsub.v1.Publisher')
jwt_creds = jwt.Credentials.from_signing_credentials(
svc_creds, audience=audience)
Args:
credentials (google.auth.credentials.Signing): The credentials to
use to construct the new credentials.
audience (str): the `aud` claim. The intended audience for the
credentials.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.Credentials: A new Credentials instance.
"""
kwargs.setdefault("issuer", credentials.signer_email)
kwargs.setdefault("subject", credentials.signer_email)
return cls(credentials.signer, audience=audience, **kwargs)
def with_claims(
self, issuer=None, subject=None, audience=None, additional_claims=None
):
"""Returns a copy of these credentials with modified claims.
Args:
issuer (str): The `iss` claim. If unspecified the current issuer
claim will be used.
subject (str): The `sub` claim. If unspecified the current subject
claim will be used.
audience (str): the `aud` claim. If unspecified the current
audience claim will be used.
additional_claims (Mapping[str, str]): Any additional claims for
the JWT payload. This will be merged with the current
additional claims.
Returns:
google.auth.jwt.Credentials: A new credentials instance.
"""
new_additional_claims = copy.deepcopy(self._additional_claims)
new_additional_claims.update(additional_claims or {})
return self.__class__(
self._signer,
issuer=issuer if issuer is not None else self._issuer,
subject=subject if subject is not None else self._subject,
audience=audience if audience is not None else self._audience,
additional_claims=new_additional_claims,
quota_project_id=self._quota_project_id,
)
@_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
return self.__class__(
self._signer,
issuer=self._issuer,
subject=self._subject,
audience=self._audience,
additional_claims=self._additional_claims,
quota_project_id=quota_project_id,
)
def _make_jwt(self):
"""Make a signed JWT.
Returns:
Tuple[bytes, datetime]: The encoded JWT and the expiration.
"""
now = _helpers.utcnow()
lifetime = datetime.timedelta(seconds=self._token_lifetime)
expiry = now + lifetime
payload = {
"iss": self._issuer,
"sub": self._subject,
"iat": _helpers.datetime_to_secs(now),
"exp": _helpers.datetime_to_secs(expiry),
}
if self._audience:
payload["aud"] = self._audience
payload.update(self._additional_claims)
jwt = encode(self._signer, payload)
return jwt, expiry
def refresh(self, request):
"""Refreshes the access token.
Args:
request (Any): Unused.
"""
# pylint: disable=unused-argument
# (pylint doesn't correctly recognize overridden methods.)
self.token, self.expiry = self._make_jwt()
@_helpers.copy_docstring(google.auth.credentials.Signing)
def sign_bytes(self, message):
return self._signer.sign(message)
@property # type: ignore
@_helpers.copy_docstring(google.auth.credentials.Signing)
def signer_email(self):
return self._issuer
@property # type: ignore
@_helpers.copy_docstring(google.auth.credentials.Signing)
def signer(self):
return self._signer
@property # type: ignore
def additional_claims(self):
"""Additional claims the JWT object was created with."""
return self._additional_claims
class OnDemandCredentials(
google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
):
"""On-demand JWT credentials.
Like :class:`Credentials`, this class uses a JWT as the bearer token for
authentication. However, this class does not require the audience at
construction time. Instead, it will generate a new token on-demand for
each request using the request URI as the audience. It caches tokens
so that multiple requests to the same URI do not incur the overhead
of generating a new token every time.
This behavior is especially useful for `gRPC`_ clients. A gRPC service may
have multiple audience and gRPC clients may not know all of the audiences
required for accessing a particular service. With these credentials,
no knowledge of the audiences is required ahead of time.
.. _grpc: http://www.grpc.io/
"""
def __init__(
self,
signer,
issuer,
subject,
additional_claims=None,
token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
max_cache_size=_DEFAULT_MAX_CACHE_SIZE,
quota_project_id=None,
):
"""
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
issuer (str): The `iss` claim.
subject (str): The `sub` claim.
additional_claims (Mapping[str, str]): Any additional claims for
the JWT payload.
token_lifetime (int): The amount of time in seconds for
which the token is valid. Defaults to 1 hour.
max_cache_size (int): The maximum number of JWT tokens to keep in
cache. Tokens are cached using :class:`google.auth._cache.LRUCache`.
quota_project_id (Optional[str]): The project ID used for quota
and billing.
"""
super(OnDemandCredentials, self).__init__()
self._signer = signer
self._issuer = issuer
self._subject = subject
self._token_lifetime = token_lifetime
self._quota_project_id = quota_project_id
if additional_claims is None:
additional_claims = {}
self._additional_claims = additional_claims
self._cache = _cache.LRUCache(maxsize=max_cache_size)
@classmethod
def _from_signer_and_info(cls, signer, info, **kwargs):
"""Creates an OnDemandCredentials instance from a signer and service
account info.
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
info (Mapping[str, str]): The service account info.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.OnDemandCredentials: The constructed credentials.
Raises:
google.auth.exceptions.MalformedError: If the info is not in the expected format.
"""
kwargs.setdefault("subject", info["client_email"])
kwargs.setdefault("issuer", info["client_email"])
return cls(signer, **kwargs)
@classmethod
def from_service_account_info(cls, info, **kwargs):
"""Creates an OnDemandCredentials instance from a dictionary.
Args:
info (Mapping[str, str]): The service account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.OnDemandCredentials: The constructed credentials.
Raises:
google.auth.exceptions.MalformedError: If the info is not in the expected format.
"""
signer = _service_account_info.from_dict(info, require=["client_email"])
return cls._from_signer_and_info(signer, info, **kwargs)
@classmethod
def from_service_account_file(cls, filename, **kwargs):
"""Creates an OnDemandCredentials instance from a service account .json
file in Google format.
Args:
filename (str): The path to the service account .json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.OnDemandCredentials: The constructed credentials.
"""
info, signer = _service_account_info.from_filename(
filename, require=["client_email"]
)
return cls._from_signer_and_info(signer, info, **kwargs)
@classmethod
def from_signing_credentials(cls, credentials, **kwargs):
"""Creates a new :class:`google.auth.jwt.OnDemandCredentials` instance
from an existing :class:`google.auth.credentials.Signing` instance.
The new instance will use the same signer as the existing instance and
will use the existing instance's signer email as the issuer and
subject by default.
Example::
svc_creds = service_account.Credentials.from_service_account_file(
'service_account.json')
jwt_creds = jwt.OnDemandCredentials.from_signing_credentials(
svc_creds)
Args:
credentials (google.auth.credentials.Signing): The credentials to
use to construct the new credentials.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.Credentials: A new Credentials instance.
"""
kwargs.setdefault("issuer", credentials.signer_email)
kwargs.setdefault("subject", credentials.signer_email)
return cls(credentials.signer, **kwargs)
def with_claims(self, issuer=None, subject=None, additional_claims=None):
"""Returns a copy of these credentials with modified claims.
Args:
issuer (str): The `iss` claim. If unspecified the current issuer
claim will be used.
subject (str): The `sub` claim. If unspecified the current subject
claim will be used.
additional_claims (Mapping[str, str]): Any additional claims for
the JWT payload. This will be merged with the current
additional claims.
Returns:
google.auth.jwt.OnDemandCredentials: A new credentials instance.
"""
new_additional_claims = copy.deepcopy(self._additional_claims)
new_additional_claims.update(additional_claims or {})
return self.__class__(
self._signer,
issuer=issuer if issuer is not None else self._issuer,
subject=subject if subject is not None else self._subject,
additional_claims=new_additional_claims,
max_cache_size=self._cache.maxsize,
quota_project_id=self._quota_project_id,
)
@_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
return self.__class__(
self._signer,
issuer=self._issuer,
subject=self._subject,
additional_claims=self._additional_claims,
max_cache_size=self._cache.maxsize,
quota_project_id=quota_project_id,
)
@property
def valid(self):
"""Checks the validity of the credentials.
These credentials are always valid because it generates tokens on
demand.
"""
return True
def _make_jwt_for_audience(self, audience):
"""Make a new JWT for the given audience.
Args:
audience (str): The intended audience.
Returns:
Tuple[bytes, datetime]: The encoded JWT and the expiration.
"""
now = _helpers.utcnow()
lifetime = datetime.timedelta(seconds=self._token_lifetime)
expiry = now + lifetime
payload = {
"iss": self._issuer,
"sub": self._subject,
"iat": _helpers.datetime_to_secs(now),
"exp": _helpers.datetime_to_secs(expiry),
"aud": audience,
}
payload.update(self._additional_claims)
jwt = encode(self._signer, payload)
return jwt, expiry
def _get_jwt_for_audience(self, audience):
"""Get a JWT For a given audience.
If there is already an existing, non-expired token in the cache for
the audience, that token is used. Otherwise, a new token will be
created.
Args:
audience (str): The intended audience.
Returns:
bytes: The encoded JWT.
"""
token, expiry = self._cache.get(audience, (None, None))
if token is None or expiry < _helpers.utcnow():
token, expiry = self._make_jwt_for_audience(audience)
self._cache[audience] = token, expiry
return token
def refresh(self, request):
"""Raises an exception, these credentials can not be directly
refreshed.
Args:
request (Any): Unused.
Raises:
google.auth.RefreshError
"""
# pylint: disable=unused-argument
# (pylint doesn't correctly recognize overridden methods.)
raise exceptions.RefreshError(
"OnDemandCredentials can not be directly refreshed."
)
def before_request(self, request, method, url, headers):
"""Performs credential-specific before request logic.
Args:
request (Any): Unused. JWT credentials do not need to make an
HTTP request to refresh.
method (str): The request's HTTP method.
url (str): The request's URI. This is used as the audience claim
when generating the JWT.
headers (Mapping): The request's headers.
"""
# pylint: disable=unused-argument
# (pylint doesn't correctly recognize overridden methods.)
parts = urllib.parse.urlsplit(url)
# Strip query string and fragment
audience = urllib.parse.urlunsplit(
(parts.scheme, parts.netloc, parts.path, "", "")
)
token = self._get_jwt_for_audience(audience)
self.apply(headers, token=token)
@_helpers.copy_docstring(google.auth.credentials.Signing)
def sign_bytes(self, message):
return self._signer.sign(message)
@property # type: ignore
@_helpers.copy_docstring(google.auth.credentials.Signing)
def signer_email(self):
return self._issuer
@property # type: ignore
@_helpers.copy_docstring(google.auth.credentials.Signing)
def signer(self):
return self._signer

View File

@@ -0,0 +1,156 @@
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" We use x-goog-api-client header to report metrics. This module provides
the constants and helper methods to construct x-goog-api-client header.
"""
import platform
from google.auth import version
API_CLIENT_HEADER = "x-goog-api-client"
# BYOID Specific consts
BYOID_HEADER_SECTION = "google-byoid-sdk"
# Auth request type
REQUEST_TYPE_ACCESS_TOKEN = "auth-request-type/at"
REQUEST_TYPE_ID_TOKEN = "auth-request-type/it"
REQUEST_TYPE_MDS_PING = "auth-request-type/mds"
REQUEST_TYPE_REAUTH_START = "auth-request-type/re-start"
REQUEST_TYPE_REAUTH_CONTINUE = "auth-request-type/re-cont"
# Credential type
CRED_TYPE_USER = "cred-type/u"
CRED_TYPE_SA_ASSERTION = "cred-type/sa"
CRED_TYPE_SA_JWT = "cred-type/jwt"
CRED_TYPE_SA_MDS = "cred-type/mds"
CRED_TYPE_SA_IMPERSONATE = "cred-type/imp"
# Versions
def python_and_auth_lib_version():
return "gl-python/{} auth/{}".format(platform.python_version(), version.__version__)
# Token request metric header values
# x-goog-api-client header value for access token request via metadata server.
# Example: "gl-python/3.7 auth/1.1 auth-request-type/at cred-type/mds"
def token_request_access_token_mds():
return "{} {} {}".format(
python_and_auth_lib_version(), REQUEST_TYPE_ACCESS_TOKEN, CRED_TYPE_SA_MDS
)
# x-goog-api-client header value for ID token request via metadata server.
# Example: "gl-python/3.7 auth/1.1 auth-request-type/it cred-type/mds"
def token_request_id_token_mds():
return "{} {} {}".format(
python_and_auth_lib_version(), REQUEST_TYPE_ID_TOKEN, CRED_TYPE_SA_MDS
)
# x-goog-api-client header value for impersonated credentials access token request.
# Example: "gl-python/3.7 auth/1.1 auth-request-type/at cred-type/imp"
def token_request_access_token_impersonate():
return "{} {} {}".format(
python_and_auth_lib_version(),
REQUEST_TYPE_ACCESS_TOKEN,
CRED_TYPE_SA_IMPERSONATE,
)
# x-goog-api-client header value for impersonated credentials ID token request.
# Example: "gl-python/3.7 auth/1.1 auth-request-type/it cred-type/imp"
def token_request_id_token_impersonate():
return "{} {} {}".format(
python_and_auth_lib_version(), REQUEST_TYPE_ID_TOKEN, CRED_TYPE_SA_IMPERSONATE
)
# x-goog-api-client header value for service account credentials access token
# request (assertion flow).
# Example: "gl-python/3.7 auth/1.1 auth-request-type/at cred-type/sa"
def token_request_access_token_sa_assertion():
return "{} {} {}".format(
python_and_auth_lib_version(), REQUEST_TYPE_ACCESS_TOKEN, CRED_TYPE_SA_ASSERTION
)
# x-goog-api-client header value for service account credentials ID token
# request (assertion flow).
# Example: "gl-python/3.7 auth/1.1 auth-request-type/it cred-type/sa"
def token_request_id_token_sa_assertion():
return "{} {} {}".format(
python_and_auth_lib_version(), REQUEST_TYPE_ID_TOKEN, CRED_TYPE_SA_ASSERTION
)
# x-goog-api-client header value for user credentials token request.
# Example: "gl-python/3.7 auth/1.1 cred-type/u"
def token_request_user():
return "{} {}".format(python_and_auth_lib_version(), CRED_TYPE_USER)
# Miscellenous metrics
# x-goog-api-client header value for metadata server ping.
# Example: "gl-python/3.7 auth/1.1 auth-request-type/mds"
def mds_ping():
return "{} {}".format(python_and_auth_lib_version(), REQUEST_TYPE_MDS_PING)
# x-goog-api-client header value for reauth start endpoint calls.
# Example: "gl-python/3.7 auth/1.1 auth-request-type/re-start"
def reauth_start():
return "{} {}".format(python_and_auth_lib_version(), REQUEST_TYPE_REAUTH_START)
# x-goog-api-client header value for reauth continue endpoint calls.
# Example: "gl-python/3.7 auth/1.1 cred-type/re-cont"
def reauth_continue():
return "{} {}".format(python_and_auth_lib_version(), REQUEST_TYPE_REAUTH_CONTINUE)
# x-goog-api-client header value for BYOID calls to the Security Token Service exchange token endpoint.
# Example: "gl-python/3.7 auth/1.1 google-byoid-sdk source/aws sa-impersonation/true sa-impersonation/true"
def byoid_metrics_header(metrics_options):
header = "{} {}".format(python_and_auth_lib_version(), BYOID_HEADER_SECTION)
for key, value in metrics_options.items():
header = "{} {}/{}".format(header, key, value)
return header
def add_metric_header(headers, metric_header_value):
"""Add x-goog-api-client header with the given value.
Args:
headers (Mapping[str, str]): The headers to which we will add the
metric header.
metric_header_value (Optional[str]): If value is None, do nothing;
if headers already has a x-goog-api-client header, append the value
to the existing header; otherwise add a new x-goog-api-client
header with the given value.
"""
if not metric_header_value:
return
if API_CLIENT_HEADER not in headers:
headers[API_CLIENT_HEADER] = metric_header_value
else:
headers[API_CLIENT_HEADER] += " " + metric_header_value

View File

@@ -0,0 +1,445 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pluggable Credentials.
Pluggable Credentials are initialized using external_account arguments which
are typically loaded from third-party executables. Unlike other
credentials that can be initialized with a list of explicit arguments, secrets
or credentials, external account clients use the environment and hints/guidelines
provided by the external_account JSON file to retrieve credentials and exchange
them for Google access tokens.
Example credential_source for pluggable credential:
{
"executable": {
"command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
"timeout_millis": 5000,
"output_file": "/path/to/generated/cached/credentials"
}
}
"""
try:
from collections.abc import Mapping
# Python 2.7 compatibility
except ImportError: # pragma: NO COVER
from collections import Mapping # type: ignore
import json
import os
import shlex
import subprocess
import sys
import time
from google.auth import _helpers
from google.auth import exceptions
from google.auth import external_account
# The max supported executable spec version.
EXECUTABLE_SUPPORTED_MAX_VERSION = 1
EXECUTABLE_TIMEOUT_MILLIS_DEFAULT = 30 * 1000 # 30 seconds
EXECUTABLE_TIMEOUT_MILLIS_LOWER_BOUND = 5 * 1000 # 5 seconds
EXECUTABLE_TIMEOUT_MILLIS_UPPER_BOUND = 120 * 1000 # 2 minutes
EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_LOWER_BOUND = 30 * 1000 # 30 seconds
EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_UPPER_BOUND = 30 * 60 * 1000 # 30 minutes
class Credentials(external_account.Credentials):
"""External account credentials sourced from executables.
**IMPORTANT**:
This class does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
"""
def __init__(
self,
audience,
subject_token_type,
token_url,
credential_source,
*args,
**kwargs
):
"""Instantiates an external account credentials object from a executables.
Args:
audience (str): The STS audience field.
subject_token_type (str): The subject token type.
token_url (str): The STS endpoint URL.
credential_source (Mapping): The credential source dictionary used to
provide instructions on how to retrieve external credential to be
exchanged for Google access tokens.
Example credential_source for pluggable credential:
{
"executable": {
"command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
"timeout_millis": 5000,
"output_file": "/path/to/generated/cached/credentials"
}
}
args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
Raises:
google.auth.exceptions.RefreshError: If an error is encountered during
access token retrieval logic.
google.auth.exceptions.InvalidValue: For invalid parameters.
google.auth.exceptions.MalformedError: For invalid parameters.
.. note:: Typically one of the helper constructors
:meth:`from_file` or
:meth:`from_info` are used instead of calling the constructor directly.
"""
self.interactive = kwargs.pop("interactive", False)
super(Credentials, self).__init__(
audience=audience,
subject_token_type=subject_token_type,
token_url=token_url,
credential_source=credential_source,
*args,
**kwargs
)
if not isinstance(credential_source, Mapping):
self._credential_source_executable = None
raise exceptions.MalformedError(
"Missing credential_source. The credential_source is not a dict."
)
self._credential_source_executable = credential_source.get("executable")
if not self._credential_source_executable:
raise exceptions.MalformedError(
"Missing credential_source. An 'executable' must be provided."
)
self._credential_source_executable_command = (
self._credential_source_executable.get("command")
)
self._credential_source_executable_timeout_millis = (
self._credential_source_executable.get("timeout_millis")
)
self._credential_source_executable_interactive_timeout_millis = (
self._credential_source_executable.get("interactive_timeout_millis")
)
self._credential_source_executable_output_file = (
self._credential_source_executable.get("output_file")
)
# Dummy value. This variable is only used via injection, not exposed to ctor
self._tokeninfo_username = ""
if not self._credential_source_executable_command:
raise exceptions.MalformedError(
"Missing command field. Executable command must be provided."
)
if not self._credential_source_executable_timeout_millis:
self._credential_source_executable_timeout_millis = (
EXECUTABLE_TIMEOUT_MILLIS_DEFAULT
)
elif (
self._credential_source_executable_timeout_millis
< EXECUTABLE_TIMEOUT_MILLIS_LOWER_BOUND
or self._credential_source_executable_timeout_millis
> EXECUTABLE_TIMEOUT_MILLIS_UPPER_BOUND
):
raise exceptions.InvalidValue("Timeout must be between 5 and 120 seconds.")
if self._credential_source_executable_interactive_timeout_millis:
if (
self._credential_source_executable_interactive_timeout_millis
< EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_LOWER_BOUND
or self._credential_source_executable_interactive_timeout_millis
> EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_UPPER_BOUND
):
raise exceptions.InvalidValue(
"Interactive timeout must be between 30 seconds and 30 minutes."
)
@_helpers.copy_docstring(external_account.Credentials)
def retrieve_subject_token(self, request):
self._validate_running_mode()
# Check output file.
if self._credential_source_executable_output_file is not None:
try:
with open(
self._credential_source_executable_output_file, encoding="utf-8"
) as output_file:
response = json.load(output_file)
except Exception:
pass
else:
try:
# If the cached response is expired, _parse_subject_token will raise an error which will be ignored and we will call the executable again.
subject_token = self._parse_subject_token(response)
if (
"expiration_time" not in response
): # Always treat missing expiration_time as expired and proceed to executable run.
raise exceptions.RefreshError
except (exceptions.MalformedError, exceptions.InvalidValue):
raise
except exceptions.RefreshError:
pass
else:
return subject_token
# Inject env vars.
env = os.environ.copy()
self._inject_env_variables(env)
env["GOOGLE_EXTERNAL_ACCOUNT_REVOKE"] = "0"
# Run executable.
exe_timeout = (
self._credential_source_executable_interactive_timeout_millis / 1000
if self.interactive
else self._credential_source_executable_timeout_millis / 1000
)
exe_stdin = sys.stdin if self.interactive else None
exe_stdout = sys.stdout if self.interactive else subprocess.PIPE
exe_stderr = sys.stdout if self.interactive else subprocess.STDOUT
result = subprocess.run(
shlex.split(self._credential_source_executable_command),
timeout=exe_timeout,
stdin=exe_stdin,
stdout=exe_stdout,
stderr=exe_stderr,
env=env,
)
if result.returncode != 0:
raise exceptions.RefreshError(
"Executable exited with non-zero return code {}. Error: {}".format(
result.returncode, result.stdout
)
)
# Handle executable output.
response = json.loads(result.stdout.decode("utf-8")) if result.stdout else None
if not response and self._credential_source_executable_output_file is not None:
response = json.load(
open(self._credential_source_executable_output_file, encoding="utf-8")
)
subject_token = self._parse_subject_token(response)
return subject_token
def revoke(self, request):
"""Revokes the subject token using the credential_source object.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the executable revocation
not properly executed.
"""
if not self.interactive:
raise exceptions.InvalidValue(
"Revoke is only enabled under interactive mode."
)
self._validate_running_mode()
# Inject variables
env = os.environ.copy()
self._inject_env_variables(env)
env["GOOGLE_EXTERNAL_ACCOUNT_REVOKE"] = "1"
# Run executable
result = subprocess.run(
shlex.split(self._credential_source_executable_command),
timeout=self._credential_source_executable_interactive_timeout_millis
/ 1000,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=env,
)
if result.returncode != 0:
raise exceptions.RefreshError(
"Auth revoke failed on executable. Exit with non-zero return code {}. Error: {}".format(
result.returncode, result.stdout
)
)
response = json.loads(result.stdout.decode("utf-8"))
self._validate_revoke_response(response)
@property
def external_account_id(self):
"""Returns the external account identifier.
When service account impersonation is used the identifier is the service
account email.
Without service account impersonation, this returns None, unless it is
being used by the Google Cloud CLI which populates this field.
"""
return self.service_account_email or self._tokeninfo_username
@classmethod
def from_info(cls, info, **kwargs):
"""Creates a Pluggable Credentials instance from parsed external account info.
**IMPORTANT**:
This method does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using with this method.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
Args:
info (Mapping[str, str]): The Pluggable external account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.pluggable.Credentials: The constructed
credentials.
Raises:
google.auth.exceptions.InvalidValue: For invalid parameters.
google.auth.exceptions.MalformedError: For invalid parameters.
"""
return super(Credentials, cls).from_info(info, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
"""Creates an Pluggable Credentials instance from an external account json file.
**IMPORTANT**:
This method does not validate the credential configuration. A security
risk occurs when a credential configuration configured with malicious urls
is used.
When the credential configuration is accepted from an
untrusted source, you should validate it before using with this method.
Refer https://cloud.google.com/docs/authentication/external/externally-sourced-credentials for more details.
Args:
filename (str): The path to the Pluggable external account json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.pluggable.Credentials: The constructed
credentials.
"""
return super(Credentials, cls).from_file(filename, **kwargs)
def _inject_env_variables(self, env):
env["GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE"] = self._audience
env["GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE"] = self._subject_token_type
env["GOOGLE_EXTERNAL_ACCOUNT_ID"] = self.external_account_id
env["GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE"] = "1" if self.interactive else "0"
if self._service_account_impersonation_url is not None:
env[
"GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL"
] = self.service_account_email
if self._credential_source_executable_output_file is not None:
env[
"GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE"
] = self._credential_source_executable_output_file
def _parse_subject_token(self, response):
self._validate_response_schema(response)
if not response["success"]:
if "code" not in response or "message" not in response:
raise exceptions.MalformedError(
"Error code and message fields are required in the response."
)
raise exceptions.RefreshError(
"Executable returned unsuccessful response: code: {}, message: {}.".format(
response["code"], response["message"]
)
)
if "expiration_time" in response and response["expiration_time"] < time.time():
raise exceptions.RefreshError(
"The token returned by the executable is expired."
)
if "token_type" not in response:
raise exceptions.MalformedError(
"The executable response is missing the token_type field."
)
if (
response["token_type"] == "urn:ietf:params:oauth:token-type:jwt"
or response["token_type"] == "urn:ietf:params:oauth:token-type:id_token"
): # OIDC
return response["id_token"]
elif response["token_type"] == "urn:ietf:params:oauth:token-type:saml2": # SAML
return response["saml_response"]
else:
raise exceptions.RefreshError("Executable returned unsupported token type.")
def _validate_revoke_response(self, response):
self._validate_response_schema(response)
if not response["success"]:
raise exceptions.RefreshError("Revoke failed with unsuccessful response.")
def _validate_response_schema(self, response):
if "version" not in response:
raise exceptions.MalformedError(
"The executable response is missing the version field."
)
if response["version"] > EXECUTABLE_SUPPORTED_MAX_VERSION:
raise exceptions.RefreshError(
"Executable returned unsupported version {}.".format(
response["version"]
)
)
if "success" not in response:
raise exceptions.MalformedError(
"The executable response is missing the success field."
)
def _validate_running_mode(self):
env_allow_executables = os.environ.get(
"GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES"
)
if env_allow_executables != "1":
raise exceptions.MalformedError(
"Executables need to be explicitly allowed (set GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES to '1') to run."
)
if self.interactive and not self._credential_source_executable_output_file:
raise exceptions.MalformedError(
"An output_file must be specified in the credential configuration for interactive mode."
)
if (
self.interactive
and not self._credential_source_executable_interactive_timeout_millis
):
raise exceptions.InvalidOperation(
"Interactive mode cannot run without an interactive timeout."
)
if self.interactive and not self.is_workforce_pool:
raise exceptions.InvalidValue(
"Interactive mode is only enabled for workforce pool."
)
def _create_default_metrics_options(self):
metrics_options = super(Credentials, self)._create_default_metrics_options()
metrics_options["source"] = "executable"
return metrics_options

View File

@@ -0,0 +1,2 @@
# Marker file for PEP 561.
# The google-auth package uses inline types.

View File

@@ -0,0 +1,104 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport - HTTP client library support.
:mod:`google.auth` is designed to work with various HTTP client libraries such
as urllib3 and requests. In order to work across these libraries with different
interfaces some abstraction is needed.
This module provides two interfaces that are implemented by transport adapters
to support HTTP libraries. :class:`Request` defines the interface expected by
:mod:`google.auth` to make requests. :class:`Response` defines the interface
for the return value of :class:`Request`.
"""
import abc
import http.client as http_client
DEFAULT_RETRYABLE_STATUS_CODES = (
http_client.INTERNAL_SERVER_ERROR,
http_client.SERVICE_UNAVAILABLE,
http_client.GATEWAY_TIMEOUT,
http_client.REQUEST_TIMEOUT,
http_client.TOO_MANY_REQUESTS,
)
"""Sequence[int]: HTTP status codes indicating a request can be retried.
"""
DEFAULT_REFRESH_STATUS_CODES = (http_client.UNAUTHORIZED,)
"""Sequence[int]: Which HTTP status code indicate that credentials should be
refreshed.
"""
DEFAULT_MAX_REFRESH_ATTEMPTS = 2
"""int: How many times to refresh the credentials and retry a request."""
class Response(metaclass=abc.ABCMeta):
"""HTTP Response data."""
@abc.abstractproperty
def status(self):
"""int: The HTTP status code."""
raise NotImplementedError("status must be implemented.")
@abc.abstractproperty
def headers(self):
"""Mapping[str, str]: The HTTP response headers."""
raise NotImplementedError("headers must be implemented.")
@abc.abstractproperty
def data(self):
"""bytes: The response body."""
raise NotImplementedError("data must be implemented.")
class Request(metaclass=abc.ABCMeta):
"""Interface for a callable that makes HTTP requests.
Specific transport implementations should provide an implementation of
this that adapts their specific request / response API.
.. automethod:: __call__
"""
@abc.abstractmethod
def __call__(
self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
):
"""Make an HTTP request.
Args:
url (str): The URI to be requested.
method (str): The HTTP method to use for the request. Defaults
to 'GET'.
body (bytes): The payload / body in HTTP request.
headers (Mapping[str, str]): Request headers.
timeout (Optional[int]): The number of seconds to wait for a
response from the server. If not specified or if None, the
transport-specific default timeout will be used.
kwargs: Additionally arguments passed on to the transport's
request method.
Returns:
Response: The HTTP response.
Raises:
google.auth.exceptions.TransportError: If any exception occurred.
"""
# pylint: disable=redundant-returns-doc, missing-raises-doc
# (pylint doesn't play well with abstract docstrings.)
raise NotImplementedError("__call__ must be implemented.")

View File

@@ -0,0 +1,396 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport adapter for Async HTTP (aiohttp).
NOTE: This async support is experimental and marked internal. This surface may
change in minor releases.
"""
from __future__ import absolute_import
import asyncio
import functools
import logging
import aiohttp # type: ignore
import urllib3 # type: ignore
from google.auth import _helpers
from google.auth import exceptions
from google.auth import transport
from google.auth.aio import _helpers as _helpers_async
from google.auth.transport import requests
_LOGGER = logging.getLogger(__name__)
# Timeout can be re-defined depending on async requirement. Currently made 60s more than
# sync timeout.
_DEFAULT_TIMEOUT = 180 # in seconds
class _CombinedResponse(transport.Response):
"""
In order to more closely resemble the `requests` interface, where a raw
and deflated content could be accessed at once, this class lazily reads the
stream in `transport.Response` so both return forms can be used.
The gzip and deflate transfer-encodings are automatically decoded for you
because the default parameter for autodecompress into the ClientSession is set
to False, and therefore we add this class to act as a wrapper for a user to be
able to access both the raw and decoded response bodies - mirroring the sync
implementation.
"""
def __init__(self, response):
self._response = response
self._raw_content = None
def _is_compressed(self):
headers = self._response.headers
return "Content-Encoding" in headers and (
headers["Content-Encoding"] == "gzip"
or headers["Content-Encoding"] == "deflate"
)
@property
def status(self):
return self._response.status
@property
def headers(self):
return self._response.headers
@property
def data(self):
return self._response.content
async def raw_content(self):
if self._raw_content is None:
self._raw_content = await self._response.content.read()
return self._raw_content
async def content(self):
# Load raw_content if necessary
await self.raw_content()
if self._is_compressed():
decoder = urllib3.response.MultiDecoder(
self._response.headers["Content-Encoding"]
)
decompressed = decoder.decompress(self._raw_content)
return decompressed
return self._raw_content
class _Response(transport.Response):
"""
Requests transport response adapter.
Args:
response (requests.Response): The raw Requests response.
"""
def __init__(self, response):
self._response = response
@property
def status(self):
return self._response.status
@property
def headers(self):
return self._response.headers
@property
def data(self):
return self._response.content
class Request(transport.Request):
"""Requests request adapter.
This class is used internally for making requests using asyncio transports
in a consistent way. If you use :class:`AuthorizedSession` you do not need
to construct or use this class directly.
This class can be useful if you want to manually refresh a
:class:`~google.auth.credentials.Credentials` instance::
import google.auth.transport.aiohttp_requests
request = google.auth.transport.aiohttp_requests.Request()
credentials.refresh(request)
Args:
session (aiohttp.ClientSession): An instance :class:`aiohttp.ClientSession` used
to make HTTP requests. If not specified, a session will be created.
.. automethod:: __call__
"""
def __init__(self, session=None):
# TODO: Use auto_decompress property for aiohttp 3.7+
if session is not None and session._auto_decompress:
raise exceptions.InvalidOperation(
"Client sessions with auto_decompress=True are not supported."
)
self.session = session
async def __call__(
self,
url,
method="GET",
body=None,
headers=None,
timeout=_DEFAULT_TIMEOUT,
**kwargs,
):
"""
Make an HTTP request using aiohttp.
Args:
url (str): The URL to be requested.
method (Optional[str]):
The HTTP method to use for the request. Defaults to 'GET'.
body (Optional[bytes]):
The payload or body in HTTP request.
headers (Optional[Mapping[str, str]]):
Request headers.
timeout (Optional[int]): The number of seconds to wait for a
response from the server. If not specified or if None, the
requests default timeout will be used.
kwargs: Additional arguments passed through to the underlying
requests :meth:`requests.Session.request` method.
Returns:
google.auth.transport.Response: The HTTP response.
Raises:
google.auth.exceptions.TransportError: If any exception occurred.
"""
try:
if self.session is None: # pragma: NO COVER
self.session = aiohttp.ClientSession(
auto_decompress=False
) # pragma: NO COVER
_helpers.request_log(_LOGGER, method, url, body, headers)
response = await self.session.request(
method, url, data=body, headers=headers, timeout=timeout, **kwargs
)
await _helpers_async.response_log_async(_LOGGER, response)
return _CombinedResponse(response)
except aiohttp.ClientError as caught_exc:
new_exc = exceptions.TransportError(caught_exc)
raise new_exc from caught_exc
except asyncio.TimeoutError as caught_exc:
new_exc = exceptions.TransportError(caught_exc)
raise new_exc from caught_exc
class AuthorizedSession(aiohttp.ClientSession):
"""This is an async implementation of the Authorized Session class. We utilize an
aiohttp transport instance, and the interface mirrors the google.auth.transport.requests
Authorized Session class, except for the change in the transport used in the async use case.
A Requests Session class with credentials.
This class is used to perform requests to API endpoints that require
authorization::
from google.auth.transport import aiohttp_requests
async with aiohttp_requests.AuthorizedSession(credentials) as authed_session:
response = await authed_session.request(
'GET', 'https://www.googleapis.com/storage/v1/b')
The underlying :meth:`request` implementation handles adding the
credentials' headers to the request and refreshing credentials as needed.
Args:
credentials (google.auth._credentials_async.Credentials):
The credentials to add to the request.
refresh_status_codes (Sequence[int]): Which HTTP status codes indicate
that credentials should be refreshed and the request should be
retried.
max_refresh_attempts (int): The maximum number of times to attempt to
refresh the credentials and retry the request.
refresh_timeout (Optional[int]): The timeout value in seconds for
credential refresh HTTP requests.
auth_request (google.auth.transport.aiohttp_requests.Request):
(Optional) An instance of
:class:`~google.auth.transport.aiohttp_requests.Request` used when
refreshing credentials. If not passed,
an instance of :class:`~google.auth.transport.aiohttp_requests.Request`
is created.
kwargs: Additional arguments passed through to the underlying
ClientSession :meth:`aiohttp.ClientSession` object.
"""
def __init__(
self,
credentials,
refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
refresh_timeout=None,
auth_request=None,
auto_decompress=False,
**kwargs,
):
super(AuthorizedSession, self).__init__(**kwargs)
self.credentials = credentials
self._refresh_status_codes = refresh_status_codes
self._max_refresh_attempts = max_refresh_attempts
self._refresh_timeout = refresh_timeout
self._is_mtls = False
self._auth_request = auth_request
self._auth_request_session = None
self._loop = asyncio.get_event_loop()
self._refresh_lock = asyncio.Lock()
self._auto_decompress = auto_decompress
async def request(
self,
method,
url,
data=None,
headers=None,
max_allowed_time=None,
timeout=_DEFAULT_TIMEOUT,
auto_decompress=False,
**kwargs,
):
"""Implementation of Authorized Session aiohttp request.
Args:
method (str):
The http request method used (e.g. GET, PUT, DELETE)
url (str):
The url at which the http request is sent.
data (Optional[dict]): Dictionary, list of tuples, bytes, or file-like
object to send in the body of the Request.
headers (Optional[dict]): Dictionary of HTTP Headers to send with the
Request.
timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
The amount of time in seconds to wait for the server response
with each individual request. Can also be passed as an
``aiohttp.ClientTimeout`` object.
max_allowed_time (Optional[float]):
If the method runs longer than this, a ``Timeout`` exception is
automatically raised. Unlike the ``timeout`` parameter, this
value applies to the total method execution time, even if
multiple requests are made under the hood.
Mind that it is not guaranteed that the timeout error is raised
at ``max_allowed_time``. It might take longer, for example, if
an underlying request takes a lot of time, but the request
itself does not timeout, e.g. if a large file is being
transmitted. The timeout error will be raised after such
request completes.
"""
# Headers come in as bytes which isn't expected behavior, the resumable
# media libraries in some cases expect a str type for the header values,
# but sometimes the operations return these in bytes types.
if headers:
for key in headers.keys():
if type(headers[key]) is bytes:
headers[key] = headers[key].decode("utf-8")
async with aiohttp.ClientSession(
auto_decompress=self._auto_decompress,
trust_env=kwargs.get("trust_env", False),
) as self._auth_request_session:
auth_request = Request(self._auth_request_session)
self._auth_request = auth_request
# Use a kwarg for this instead of an attribute to maintain
# thread-safety.
_credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
# Make a copy of the headers. They will be modified by the credentials
# and we want to pass the original headers if we recurse.
request_headers = headers.copy() if headers is not None else {}
# Do not apply the timeout unconditionally in order to not override the
# _auth_request's default timeout.
auth_request = (
self._auth_request
if timeout is None
else functools.partial(self._auth_request, timeout=timeout)
)
remaining_time = max_allowed_time
with requests.TimeoutGuard(remaining_time, asyncio.TimeoutError) as guard:
await self.credentials.before_request(
auth_request, method, url, request_headers
)
with requests.TimeoutGuard(remaining_time, asyncio.TimeoutError) as guard:
response = await super(AuthorizedSession, self).request(
method,
url,
data=data,
headers=request_headers,
timeout=timeout,
**kwargs,
)
remaining_time = guard.remaining_timeout
if (
response.status in self._refresh_status_codes
and _credential_refresh_attempt < self._max_refresh_attempts
):
requests._LOGGER.info(
"Refreshing credentials due to a %s response. Attempt %s/%s.",
response.status,
_credential_refresh_attempt + 1,
self._max_refresh_attempts,
)
# Do not apply the timeout unconditionally in order to not override the
# _auth_request's default timeout.
auth_request = (
self._auth_request
if timeout is None
else functools.partial(self._auth_request, timeout=timeout)
)
with requests.TimeoutGuard(
remaining_time, asyncio.TimeoutError
) as guard:
async with self._refresh_lock:
await self._loop.run_in_executor(
None, self.credentials.refresh, auth_request
)
remaining_time = guard.remaining_timeout
return await self.request(
method,
url,
data=data,
headers=headers,
max_allowed_time=remaining_time,
timeout=timeout,
_credential_refresh_attempt=_credential_refresh_attempt + 1,
**kwargs,
)
return response

View File

@@ -0,0 +1,283 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Code for configuring client side TLS to offload the signing operation to
signing libraries.
"""
import ctypes
import json
import logging
import os
import sys
import cffi # type: ignore
from google.auth import exceptions
_LOGGER = logging.getLogger(__name__)
# C++ offload lib requires google-auth lib to provide the following callback:
# using SignFunc = int (*)(unsigned char *sig, size_t *sig_len,
# const unsigned char *tbs, size_t tbs_len)
# The bytes to be signed and the length are provided via `tbs` and `tbs_len`,
# the callback computes the signature, and write the signature and its length
# into `sig` and `sig_len`.
# If the signing is successful, the callback returns 1, otherwise it returns 0.
SIGN_CALLBACK_CTYPE = ctypes.CFUNCTYPE(
ctypes.c_int, # return type
ctypes.POINTER(ctypes.c_ubyte), # sig
ctypes.POINTER(ctypes.c_size_t), # sig_len
ctypes.POINTER(ctypes.c_ubyte), # tbs
ctypes.c_size_t, # tbs_len
)
# Cast SSL_CTX* to void*
def _cast_ssl_ctx_to_void_p_pyopenssl(ssl_ctx):
return ctypes.cast(int(cffi.FFI().cast("intptr_t", ssl_ctx)), ctypes.c_void_p)
# Cast SSL_CTX* to void*
def _cast_ssl_ctx_to_void_p_stdlib(context):
return ctypes.c_void_p.from_address(
id(context) + ctypes.sizeof(ctypes.c_void_p) * 2
)
# Load offload library and set up the function types.
def load_offload_lib(offload_lib_path):
_LOGGER.debug("loading offload library from %s", offload_lib_path)
# winmode parameter is only available for python 3.8+.
lib = (
ctypes.CDLL(offload_lib_path, winmode=0)
if sys.version_info >= (3, 8) and os.name == "nt"
else ctypes.CDLL(offload_lib_path)
)
# Set up types for:
# int ConfigureSslContext(SignFunc sign_func, const char *cert, SSL_CTX *ctx)
lib.ConfigureSslContext.argtypes = [
SIGN_CALLBACK_CTYPE,
ctypes.c_char_p,
ctypes.c_void_p,
]
lib.ConfigureSslContext.restype = ctypes.c_int
return lib
# Load signer library and set up the function types.
# See: https://github.com/googleapis/enterprise-certificate-proxy/blob/main/cshared/main.go
def load_signer_lib(signer_lib_path):
_LOGGER.debug("loading signer library from %s", signer_lib_path)
# winmode parameter is only available for python 3.8+.
lib = (
ctypes.CDLL(signer_lib_path, winmode=0)
if sys.version_info >= (3, 8) and os.name == "nt"
else ctypes.CDLL(signer_lib_path)
)
# Set up types for:
# func GetCertPemForPython(configFilePath *C.char, certHolder *byte, certHolderLen int)
lib.GetCertPemForPython.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int]
# Returns: certLen
lib.GetCertPemForPython.restype = ctypes.c_int
# Set up types for:
# func SignForPython(configFilePath *C.char, digest *byte, digestLen int,
# sigHolder *byte, sigHolderLen int)
lib.SignForPython.argtypes = [
ctypes.c_char_p,
ctypes.c_char_p,
ctypes.c_int,
ctypes.c_char_p,
ctypes.c_int,
]
# Returns: the signature length
lib.SignForPython.restype = ctypes.c_int
return lib
def load_provider_lib(provider_lib_path):
_LOGGER.debug("loading provider library from %s", provider_lib_path)
# winmode parameter is only available for python 3.8+.
lib = (
ctypes.CDLL(provider_lib_path, winmode=0)
if sys.version_info >= (3, 8) and os.name == "nt"
else ctypes.CDLL(provider_lib_path)
)
lib.ECP_attach_to_ctx.argtypes = [ctypes.c_void_p, ctypes.c_char_p]
lib.ECP_attach_to_ctx.restype = ctypes.c_int
return lib
# Computes SHA256 hash.
def _compute_sha256_digest(to_be_signed, to_be_signed_len):
from cryptography.hazmat.primitives import hashes
data = ctypes.string_at(to_be_signed, to_be_signed_len)
hash = hashes.Hash(hashes.SHA256())
hash.update(data)
return hash.finalize()
# Create the signing callback. The actual signing work is done by the
# `SignForPython` method from the signer lib.
def get_sign_callback(signer_lib, config_file_path):
def sign_callback(sig, sig_len, tbs, tbs_len):
_LOGGER.debug("calling sign callback...")
digest = _compute_sha256_digest(tbs, tbs_len)
digestArray = ctypes.c_char * len(digest)
# reserve 2000 bytes for the signature, shoud be more then enough.
# RSA signature is 256 bytes, EC signature is 70~72.
sig_holder_len = 2000
sig_holder = ctypes.create_string_buffer(sig_holder_len)
signature_len = signer_lib.SignForPython(
config_file_path.encode(), # configFilePath
digestArray.from_buffer(bytearray(digest)), # digest
len(digest), # digestLen
sig_holder, # sigHolder
sig_holder_len, # sigHolderLen
)
if signature_len == 0:
# signing failed, return 0
return 0
sig_len[0] = signature_len
bs = bytearray(sig_holder)
for i in range(signature_len):
sig[i] = bs[i]
return 1
return SIGN_CALLBACK_CTYPE(sign_callback)
# Obtain the certificate bytes by calling the `GetCertPemForPython` method from
# the signer lib. The method is called twice, the first time is to compute the
# cert length, then we create a buffer to hold the cert, and call it again to
# fill the buffer.
def get_cert(signer_lib, config_file_path):
# First call to calculate the cert length
cert_len = signer_lib.GetCertPemForPython(
config_file_path.encode(), # configFilePath
None, # certHolder
0, # certHolderLen
)
if cert_len == 0:
raise exceptions.MutualTLSChannelError("failed to get certificate")
# Then we create an array to hold the cert, and call again to fill the cert
cert_holder = ctypes.create_string_buffer(cert_len)
signer_lib.GetCertPemForPython(
config_file_path.encode(), # configFilePath
cert_holder, # certHolder
cert_len, # certHolderLen
)
return bytes(cert_holder)
class CustomTlsSigner(object):
def __init__(self, enterprise_cert_file_path):
"""
This class loads the offload and signer library, and calls APIs from
these libraries to obtain the cert and a signing callback, and attach
them to SSL context. The cert and the signing callback will be used
for client authentication in TLS handshake.
Args:
enterprise_cert_file_path (str): the path to a enterprise cert JSON
file. The file should contain the following field:
{
"libs": {
"ecp_client": "...",
"tls_offload": "..."
}
}
"""
self._enterprise_cert_file_path = enterprise_cert_file_path
self._cert = None
self._sign_callback = None
self._provider_lib = None
def load_libraries(self):
with open(self._enterprise_cert_file_path, "r") as f:
enterprise_cert_json = json.load(f)
libs = enterprise_cert_json.get("libs", {})
signer_library = libs.get("ecp_client", None)
offload_library = libs.get("tls_offload", None)
provider_library = libs.get("ecp_provider", None)
# Using newer provider implementation. This is mutually exclusive to the
# offload implementation.
if provider_library:
self._provider_lib = load_provider_lib(provider_library)
return
# Using old offload implementation
if offload_library and signer_library:
self._offload_lib = load_offload_lib(offload_library)
self._signer_lib = load_signer_lib(signer_library)
self.set_up_custom_key()
return
raise exceptions.MutualTLSChannelError("enterprise cert file is invalid")
def set_up_custom_key(self):
# We need to keep a reference of the cert and sign callback so it won't
# be garbage collected, otherwise it will crash when used by signer lib.
self._cert = get_cert(self._signer_lib, self._enterprise_cert_file_path)
self._sign_callback = get_sign_callback(
self._signer_lib, self._enterprise_cert_file_path
)
def should_use_provider(self):
if self._provider_lib:
return True
return False
def attach_to_ssl_context(self, ctx):
if self.should_use_provider():
if not self._provider_lib.ECP_attach_to_ctx(
_cast_ssl_ctx_to_void_p_stdlib(ctx),
self._enterprise_cert_file_path.encode("ascii"),
):
raise exceptions.MutualTLSChannelError(
"failed to configure ECP Provider SSL context"
)
elif self._offload_lib and self._signer_lib:
if not self._offload_lib.ConfigureSslContext(
self._sign_callback,
ctypes.c_char_p(self._cert),
_cast_ssl_ctx_to_void_p_pyopenssl(ctx._ctx._context),
):
raise exceptions.MutualTLSChannelError(
"failed to configure ECP Offload SSL context"
)
else:
raise exceptions.MutualTLSChannelError("Invalid ECP configuration.")

View File

@@ -0,0 +1,114 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport adapter for http.client, for internal use only."""
import http.client as http_client
import logging
import socket
import urllib
from google.auth import _helpers
from google.auth import exceptions
from google.auth import transport
_LOGGER = logging.getLogger(__name__)
class Response(transport.Response):
"""http.client transport response adapter.
Args:
response (http.client.HTTPResponse): The raw http client response.
"""
def __init__(self, response):
self._status = response.status
self._headers = {key.lower(): value for key, value in response.getheaders()}
self._data = response.read()
@property
def status(self):
return self._status
@property
def headers(self):
return self._headers
@property
def data(self):
return self._data
class Request(transport.Request):
"""http.client transport request adapter."""
def __call__(
self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
):
"""Make an HTTP request using http.client.
Args:
url (str): The URI to be requested.
method (str): The HTTP method to use for the request. Defaults
to 'GET'.
body (bytes): The payload / body in HTTP request.
headers (Mapping): Request headers.
timeout (Optional(int)): The number of seconds to wait for a
response from the server. If not specified or if None, the
socket global default timeout will be used.
kwargs: Additional arguments passed throught to the underlying
:meth:`~http.client.HTTPConnection.request` method.
Returns:
Response: The HTTP response.
Raises:
google.auth.exceptions.TransportError: If any exception occurred.
"""
# socket._GLOBAL_DEFAULT_TIMEOUT is the default in http.client.
if timeout is None:
timeout = socket._GLOBAL_DEFAULT_TIMEOUT
# http.client doesn't allow None as the headers argument.
if headers is None:
headers = {}
# http.client needs the host and path parts specified separately.
parts = urllib.parse.urlsplit(url)
path = urllib.parse.urlunsplit(
("", "", parts.path, parts.query, parts.fragment)
)
if parts.scheme != "http":
raise exceptions.TransportError(
"http.client transport only supports the http scheme, {}"
"was specified".format(parts.scheme)
)
connection = http_client.HTTPConnection(parts.netloc, timeout=timeout)
try:
_helpers.request_log(_LOGGER, method, url, body, headers)
connection.request(method, path, body=body, headers=headers, **kwargs)
response = connection.getresponse()
_helpers.response_log(_LOGGER, response)
return Response(response)
except (http_client.HTTPException, socket.error) as caught_exc:
new_exc = exceptions.TransportError(caught_exc)
raise new_exc from caught_exc
finally:
connection.close()

View File

@@ -0,0 +1,511 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for getting mTLS cert and key."""
import json
import logging
from os import environ, getenv, path
import re
import subprocess
from google.auth import _agent_identity_utils
from google.auth import environment_vars
from google.auth import exceptions
CONTEXT_AWARE_METADATA_PATH = "~/.secureConnect/context_aware_metadata.json"
CERTIFICATE_CONFIGURATION_DEFAULT_PATH = "~/.config/gcloud/certificate_config.json"
_CERT_PROVIDER_COMMAND = "cert_provider_command"
_CERT_REGEX = re.compile(
b"-----BEGIN CERTIFICATE-----.+-----END CERTIFICATE-----\r?\n?", re.DOTALL
)
# support various format of key files, e.g.
# "-----BEGIN PRIVATE KEY-----...",
# "-----BEGIN EC PRIVATE KEY-----...",
# "-----BEGIN RSA PRIVATE KEY-----..."
# "-----BEGIN ENCRYPTED PRIVATE KEY-----"
_KEY_REGEX = re.compile(
b"-----BEGIN [A-Z ]*PRIVATE KEY-----.+-----END [A-Z ]*PRIVATE KEY-----\r?\n?",
re.DOTALL,
)
_LOGGER = logging.getLogger(__name__)
_PASSPHRASE_REGEX = re.compile(
b"-----BEGIN PASSPHRASE-----(.+)-----END PASSPHRASE-----", re.DOTALL
)
# Temporary patch to accomodate incorrect cert config in Cloud Run prod environment.
_WELL_KNOWN_CLOUD_RUN_CERT_PATH = (
"/var/run/secrets/workload-spiffe-credentials/certificates.pem"
)
_WELL_KNOWN_CLOUD_RUN_KEY_PATH = (
"/var/run/secrets/workload-spiffe-credentials/private_key.pem"
)
_INCORRECT_CLOUD_RUN_CERT_PATH = (
"/var/lib/volumes/certificate/workload-certificates/certificates.pem"
)
_INCORRECT_CLOUD_RUN_KEY_PATH = (
"/var/lib/volumes/certificate/workload-certificates/private_key.pem"
)
def _check_config_path(config_path):
"""Checks for config file path. If it exists, returns the absolute path with user expansion;
otherwise returns None.
Args:
config_path (str): The config file path for either context_aware_metadata.json or certificate_config.json for example
Returns:
str: absolute path if exists and None otherwise.
"""
config_path = path.expanduser(config_path)
if not path.exists(config_path):
_LOGGER.debug("%s is not found.", config_path)
return None
return config_path
def _load_json_file(path):
"""Reads and loads JSON from the given path. Used to read both X509 workload certificate and
secure connect configurations.
Args:
path (str): the path to read from.
Returns:
Dict[str, str]: The JSON stored at the file.
Raises:
google.auth.exceptions.ClientCertError: If failed to parse the file as JSON.
"""
try:
with open(path) as f:
json_data = json.load(f)
except ValueError as caught_exc:
new_exc = exceptions.ClientCertError(caught_exc)
raise new_exc from caught_exc
return json_data
def _get_workload_cert_and_key(certificate_config_path=None):
"""Read the workload identity cert and key files specified in the certificate config provided.
If no config path is provided, check the environment variable: "GOOGLE_API_CERTIFICATE_CONFIG"
first, then the well known gcloud location: "~/.config/gcloud/certificate_config.json".
Args:
certificate_config_path (string): The certificate config path. If no path is provided,
the environment variable will be checked first, then the well known gcloud location.
Returns:
Tuple[Optional[bytes], Optional[bytes]]: client certificate bytes in PEM format and key
bytes in PEM format.
Raises:
google.auth.exceptions.ClientCertError: if problems occurs when retrieving
the certificate or key information.
"""
cert_path, key_path = _get_workload_cert_and_key_paths(certificate_config_path)
if cert_path is None and key_path is None:
return None, None
return _read_cert_and_key_files(cert_path, key_path)
def _get_cert_config_path(certificate_config_path=None):
"""Get the certificate configuration path based on the following order:
1: Explicit override, if set
2: Environment variable, if set
3: Well-known location
Returns "None" if the selected config file does not exist.
Args:
certificate_config_path (string): The certificate config path. If provided, the well known
location and environment variable will be ignored.
Returns:
The absolute path of the certificate config file, and None if the file does not exist.
"""
if certificate_config_path is None:
env_path = environ.get(environment_vars.GOOGLE_API_CERTIFICATE_CONFIG, None)
if env_path is not None and env_path != "":
certificate_config_path = env_path
else:
certificate_config_path = CERTIFICATE_CONFIGURATION_DEFAULT_PATH
certificate_config_path = path.expanduser(certificate_config_path)
if not path.exists(certificate_config_path):
return None
return certificate_config_path
def _get_workload_cert_and_key_paths(config_path):
absolute_path = _get_cert_config_path(config_path)
if absolute_path is None:
return None, None
data = _load_json_file(absolute_path)
if "cert_configs" not in data:
raise exceptions.ClientCertError(
'Certificate config file {} is in an invalid format, a "cert configs" object is expected'.format(
absolute_path
)
)
cert_configs = data["cert_configs"]
if "workload" not in cert_configs:
raise exceptions.ClientCertError(
'Certificate config file {} is in an invalid format, a "workload" cert config is expected'.format(
absolute_path
)
)
workload = cert_configs["workload"]
if "cert_path" not in workload:
raise exceptions.ClientCertError(
'Certificate config file {} is in an invalid format, a "cert_path" is expected in the workload cert config'.format(
absolute_path
)
)
cert_path = workload["cert_path"]
if "key_path" not in workload:
raise exceptions.ClientCertError(
'Certificate config file {} is in an invalid format, a "key_path" is expected in the workload cert config'.format(
absolute_path
)
)
key_path = workload["key_path"]
# == BEGIN Temporary Cloud Run PATCH ==
# See https://github.com/googleapis/google-auth-library-python/issues/1881
if (cert_path == _INCORRECT_CLOUD_RUN_CERT_PATH) and (
key_path == _INCORRECT_CLOUD_RUN_KEY_PATH
):
if not path.exists(cert_path) and not path.exists(key_path):
_LOGGER.debug(
"Applying Cloud Run certificate path patch. "
"Configured paths not found: %s, %s. "
"Using well-known paths: %s, %s",
cert_path,
key_path,
_WELL_KNOWN_CLOUD_RUN_CERT_PATH,
_WELL_KNOWN_CLOUD_RUN_KEY_PATH,
)
cert_path = _WELL_KNOWN_CLOUD_RUN_CERT_PATH
key_path = _WELL_KNOWN_CLOUD_RUN_KEY_PATH
# == END Temporary Cloud Run PATCH ==
return cert_path, key_path
def _read_cert_and_key_files(cert_path, key_path):
cert_data = _read_cert_file(cert_path)
key_data = _read_key_file(key_path)
return cert_data, key_data
def _read_cert_file(cert_path):
with open(cert_path, "rb") as cert_file:
cert_data = cert_file.read()
cert_match = re.findall(_CERT_REGEX, cert_data)
if len(cert_match) != 1:
raise exceptions.ClientCertError(
"Certificate file {} is in an invalid format, a single PEM formatted certificate is expected".format(
cert_path
)
)
return cert_match[0]
def _read_key_file(key_path):
with open(key_path, "rb") as key_file:
key_data = key_file.read()
key_match = re.findall(_KEY_REGEX, key_data)
if len(key_match) != 1:
raise exceptions.ClientCertError(
"Private key file {} is in an invalid format, a single PEM formatted private key is expected".format(
key_path
)
)
return key_match[0]
def _run_cert_provider_command(command, expect_encrypted_key=False):
"""Run the provided command, and return client side mTLS cert, key and
passphrase.
Args:
command (List[str]): cert provider command.
expect_encrypted_key (bool): If encrypted private key is expected.
Returns:
Tuple[bytes, bytes, bytes]: client certificate bytes in PEM format, key
bytes in PEM format and passphrase bytes.
Raises:
google.auth.exceptions.ClientCertError: if problems occurs when running
the cert provider command or generating cert, key and passphrase.
"""
try:
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
stdout, stderr = process.communicate()
except OSError as caught_exc:
new_exc = exceptions.ClientCertError(caught_exc)
raise new_exc from caught_exc
# Check cert provider command execution error.
if process.returncode != 0:
raise exceptions.ClientCertError(
"Cert provider command returns non-zero status code %s" % process.returncode
)
# Extract certificate (chain), key and passphrase.
cert_match = re.findall(_CERT_REGEX, stdout)
if len(cert_match) != 1:
raise exceptions.ClientCertError("Client SSL certificate is missing or invalid")
key_match = re.findall(_KEY_REGEX, stdout)
if len(key_match) != 1:
raise exceptions.ClientCertError("Client SSL key is missing or invalid")
passphrase_match = re.findall(_PASSPHRASE_REGEX, stdout)
if expect_encrypted_key:
if len(passphrase_match) != 1:
raise exceptions.ClientCertError("Passphrase is missing or invalid")
if b"ENCRYPTED" not in key_match[0]:
raise exceptions.ClientCertError("Encrypted private key is expected")
return cert_match[0], key_match[0], passphrase_match[0].strip()
if b"ENCRYPTED" in key_match[0]:
raise exceptions.ClientCertError("Encrypted private key is not expected")
if len(passphrase_match) > 0:
raise exceptions.ClientCertError("Passphrase is not expected")
return cert_match[0], key_match[0], None
def get_client_ssl_credentials(
generate_encrypted_key=False,
context_aware_metadata_path=CONTEXT_AWARE_METADATA_PATH,
certificate_config_path=None,
):
"""Returns the client side certificate, private key and passphrase.
We look for certificates and keys with the following order of priority:
1. Certificate and key specified by certificate_config.json.
Currently, only X.509 workload certificates are supported.
2. Certificate and key specified by context aware metadata (i.e. SecureConnect).
Args:
generate_encrypted_key (bool): If set to True, encrypted private key
and passphrase will be generated; otherwise, unencrypted private key
will be generated and passphrase will be None. This option only
affects keys obtained via context_aware_metadata.json.
context_aware_metadata_path (str): The context_aware_metadata.json file path.
certificate_config_path (str): The certificate_config.json file path.
Returns:
Tuple[bool, bytes, bytes, bytes]:
A boolean indicating if cert, key and passphrase are obtained, the
cert bytes and key bytes both in PEM format, and passphrase bytes.
Raises:
google.auth.exceptions.ClientCertError: if problems occurs when getting
the cert, key and passphrase.
"""
# 1. Attempt to retrieve X.509 Workload cert and key.
cert, key = _get_workload_cert_and_key(certificate_config_path)
if cert and key:
return True, cert, key, None
# 2. Check for context aware metadata json
metadata_path = _check_config_path(context_aware_metadata_path)
if metadata_path:
metadata_json = _load_json_file(metadata_path)
if _CERT_PROVIDER_COMMAND not in metadata_json:
raise exceptions.ClientCertError("Cert provider command is not found")
command = metadata_json[_CERT_PROVIDER_COMMAND]
if generate_encrypted_key and "--with_passphrase" not in command:
command.append("--with_passphrase")
# Execute the command.
cert, key, passphrase = _run_cert_provider_command(
command, expect_encrypted_key=generate_encrypted_key
)
return True, cert, key, passphrase
return False, None, None, None
def get_client_cert_and_key(client_cert_callback=None):
"""Returns the client side certificate and private key. The function first
tries to get certificate and key from client_cert_callback; if the callback
is None or doesn't provide certificate and key, the function tries application
default SSL credentials.
Args:
client_cert_callback (Optional[Callable[[], (bytes, bytes)]]): An
optional callback which returns client certificate bytes and private
key bytes both in PEM format.
Returns:
Tuple[bool, bytes, bytes]:
A boolean indicating if cert and key are obtained, the cert bytes
and key bytes both in PEM format.
Raises:
google.auth.exceptions.ClientCertError: if problems occurs when getting
the cert and key.
"""
if client_cert_callback:
cert, key = client_cert_callback()
return True, cert, key
has_cert, cert, key, _ = get_client_ssl_credentials(generate_encrypted_key=False)
return has_cert, cert, key
def decrypt_private_key(key, passphrase):
"""A helper function to decrypt the private key with the given passphrase.
google-auth library doesn't support passphrase protected private key for
mutual TLS channel. This helper function can be used to decrypt the
passphrase protected private key in order to estalish mutual TLS channel.
For example, if you have a function which produces client cert, passphrase
protected private key and passphrase, you can convert it to a client cert
callback function accepted by google-auth::
from google.auth.transport import _mtls_helper
def your_client_cert_function():
return cert, encrypted_key, passphrase
# callback accepted by google-auth for mutual TLS channel.
def client_cert_callback():
cert, encrypted_key, passphrase = your_client_cert_function()
decrypted_key = _mtls_helper.decrypt_private_key(encrypted_key,
passphrase)
return cert, decrypted_key
Args:
key (bytes): The private key bytes in PEM format.
passphrase (bytes): The passphrase bytes.
Returns:
bytes: The decrypted private key in PEM format.
Raises:
ImportError: If pyOpenSSL is not installed.
OpenSSL.crypto.Error: If there is any problem decrypting the private key.
"""
from OpenSSL import crypto
# First convert encrypted_key_bytes to PKey object
pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key, passphrase=passphrase)
# Then dump the decrypted key bytes
return crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)
def check_use_client_cert():
"""Returns boolean for whether the client certificate should be used for mTLS.
If GOOGLE_API_USE_CLIENT_CERTIFICATE is set to true or false, a corresponding
bool value will be returned. If the value is set to an unexpected string, it
will default to False.
If GOOGLE_API_USE_CLIENT_CERTIFICATE is unset, the value will be inferred
by reading a file pointed at by GOOGLE_API_CERTIFICATE_CONFIG, and verifying
it contains a "workload" section. If so, the function will return True,
otherwise False.
Returns:
bool: Whether the client certificate should be used for mTLS connection.
"""
use_client_cert = getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE")
# Check if the value of GOOGLE_API_USE_CLIENT_CERTIFICATE is set.
if use_client_cert:
return use_client_cert.lower() == "true"
else:
# Check if the value of GOOGLE_API_CERTIFICATE_CONFIG is set.
cert_path = getenv("GOOGLE_API_CERTIFICATE_CONFIG")
if cert_path:
try:
with open(cert_path, "r") as f:
content = json.load(f)
# verify json has workload key
content["cert_configs"]["workload"]
return True
except (
FileNotFoundError,
OSError,
KeyError,
TypeError,
json.JSONDecodeError,
) as e:
_LOGGER.debug("error decoding certificate: %s", e)
return False
def check_parameters_for_unauthorized_response(cached_cert):
"""Returns the cached and current cert fingerprint for reconfiguring mTLS.
Args:
cached_cert(bytes): The cached client certificate.
Returns:
bytes: The client callback cert bytes.
bytes: The client callback key bytes.
str: The base64-encoded SHA256 cached fingerprint.
str: The base64-encoded SHA256 current cert fingerprint.
"""
call_cert_bytes, call_key_bytes = call_client_cert_callback()
cert_obj = _agent_identity_utils.parse_certificate(call_cert_bytes)
current_cert_fingerprint = _agent_identity_utils.calculate_certificate_fingerprint(
cert_obj
)
if cached_cert:
cached_fingerprint = _agent_identity_utils.get_cached_cert_fingerprint(
cached_cert
)
else:
cached_fingerprint = current_cert_fingerprint
return call_cert_bytes, call_key_bytes, cached_fingerprint, current_cert_fingerprint
def call_client_cert_callback():
"""Calls the client cert callback and returns the certificate and key."""
_, cert_bytes, key_bytes, passphrase = get_client_ssl_credentials(
generate_encrypted_key=True
)
return cert_bytes, key_bytes

View File

@@ -0,0 +1,53 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport adapter for Base Requests."""
# NOTE: The coverage for this file is temporarily disabled in `.coveragerc`
# since it is currently unused.
import abc
_DEFAULT_TIMEOUT = 120 # in second
class _BaseAuthorizedSession(metaclass=abc.ABCMeta):
"""Base class for a Request Session with credentials. This class is intended to capture
the common logic between synchronous and asynchronous request sessions and is not intended to
be instantiated directly.
Args:
credentials (google.auth._credentials_base.BaseCredentials): The credentials to
add to the request.
"""
def __init__(self, credentials):
self.credentials = credentials
@abc.abstractmethod
def request(
self,
method,
url,
data=None,
headers=None,
max_allowed_time=None,
timeout=_DEFAULT_TIMEOUT,
**kwargs
):
raise NotImplementedError("Request must be implemented")
@abc.abstractmethod
def close(self):
raise NotImplementedError("Close must be implemented")

View File

@@ -0,0 +1,337 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Authorization support for gRPC."""
from __future__ import absolute_import
import logging
from google.auth import exceptions
from google.auth.transport import _mtls_helper
from google.oauth2 import service_account
try:
import grpc # type: ignore
except ImportError as caught_exc: # pragma: NO COVER
raise ImportError(
"gRPC is not installed from please install the grpcio package to use the gRPC transport."
) from caught_exc
_LOGGER = logging.getLogger(__name__)
class AuthMetadataPlugin(grpc.AuthMetadataPlugin):
"""A `gRPC AuthMetadataPlugin`_ that inserts the credentials into each
request.
.. _gRPC AuthMetadataPlugin:
http://www.grpc.io/grpc/python/grpc.html#grpc.AuthMetadataPlugin
Args:
credentials (google.auth.credentials.Credentials): The credentials to
add to requests.
request (google.auth.transport.Request): A HTTP transport request
object used to refresh credentials as needed.
default_host (Optional[str]): A host like "pubsub.googleapis.com".
This is used when a self-signed JWT is created from service
account credentials.
"""
def __init__(self, credentials, request, default_host=None):
# pylint: disable=no-value-for-parameter
# pylint doesn't realize that the super method takes no arguments
# because this class is the same name as the superclass.
super(AuthMetadataPlugin, self).__init__()
self._credentials = credentials
self._request = request
self._default_host = default_host
def _get_authorization_headers(self, context):
"""Gets the authorization headers for a request.
Returns:
Sequence[Tuple[str, str]]: A list of request headers (key, value)
to add to the request.
"""
headers = {}
# https://google.aip.dev/auth/4111
# Attempt to use self-signed JWTs when a service account is used.
# A default host must be explicitly provided since it cannot always
# be determined from the context.service_url.
if isinstance(self._credentials, service_account.Credentials):
self._credentials._create_self_signed_jwt(
"https://{}/".format(self._default_host) if self._default_host else None
)
self._credentials.before_request(
self._request, context.method_name, context.service_url, headers
)
return list(headers.items())
def __call__(self, context, callback):
"""Passes authorization metadata into the given callback.
Args:
context (grpc.AuthMetadataContext): The RPC context.
callback (grpc.AuthMetadataPluginCallback): The callback that will
be invoked to pass in the authorization metadata.
"""
callback(self._get_authorization_headers(context), None)
def secure_authorized_channel(
credentials,
request,
target,
ssl_credentials=None,
client_cert_callback=None,
**kwargs
):
"""Creates a secure authorized gRPC channel.
This creates a channel with SSL and :class:`AuthMetadataPlugin`. This
channel can be used to create a stub that can make authorized requests.
Users can configure client certificate or rely on device certificates to
establish a mutual TLS channel, if the `GOOGLE_API_USE_CLIENT_CERTIFICATE`
variable is explicitly set to `true`.
Example::
import google.auth
import google.auth.transport.grpc
import google.auth.transport.requests
from google.cloud.speech.v1 import cloud_speech_pb2
# Get credentials.
credentials, _ = google.auth.default()
# Get an HTTP request function to refresh credentials.
request = google.auth.transport.requests.Request()
# Create a channel.
channel = google.auth.transport.grpc.secure_authorized_channel(
credentials, regular_endpoint, request,
ssl_credentials=grpc.ssl_channel_credentials())
# Use the channel to create a stub.
cloud_speech.create_Speech_stub(channel)
Usage:
There are actually a couple of options to create a channel, depending on if
you want to create a regular or mutual TLS channel.
First let's list the endpoints (regular vs mutual TLS) to choose from::
regular_endpoint = 'speech.googleapis.com:443'
mtls_endpoint = 'speech.mtls.googleapis.com:443'
Option 1: create a regular (non-mutual) TLS channel by explicitly setting
the ssl_credentials::
regular_ssl_credentials = grpc.ssl_channel_credentials()
channel = google.auth.transport.grpc.secure_authorized_channel(
credentials, request, regular_endpoint,
ssl_credentials=regular_ssl_credentials)
Option 2: create a mutual TLS channel by calling a callback which returns
the client side certificate and the key (Note that
`GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be explicitly
set to `true`)::
def my_client_cert_callback():
code_to_load_client_cert_and_key()
if loaded:
return (pem_cert_bytes, pem_key_bytes)
raise MyClientCertFailureException()
try:
channel = google.auth.transport.grpc.secure_authorized_channel(
credentials, request, mtls_endpoint,
client_cert_callback=my_client_cert_callback)
except MyClientCertFailureException:
# handle the exception
Option 3: use application default SSL credentials. It searches and uses
the command in a context aware metadata file, which is available on devices
with endpoint verification support (Note that
`GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be explicitly
set to `true`).
See https://cloud.google.com/endpoint-verification/docs/overview::
try:
default_ssl_credentials = SslCredentials()
except:
# Exception can be raised if the context aware metadata is malformed.
# See :class:`SslCredentials` for the possible exceptions.
# Choose the endpoint based on the SSL credentials type.
if default_ssl_credentials.is_mtls:
endpoint_to_use = mtls_endpoint
else:
endpoint_to_use = regular_endpoint
channel = google.auth.transport.grpc.secure_authorized_channel(
credentials, request, endpoint_to_use,
ssl_credentials=default_ssl_credentials)
Option 4: not setting ssl_credentials and client_cert_callback. For devices
without endpoint verification support or `GOOGLE_API_USE_CLIENT_CERTIFICATE`
environment variable is not `true`, a regular TLS channel is created;
otherwise, a mutual TLS channel is created, however, the call should be
wrapped in a try/except block in case of malformed context aware metadata.
The following code uses regular_endpoint, it works the same no matter the
created channle is regular or mutual TLS. Regular endpoint ignores client
certificate and key::
channel = google.auth.transport.grpc.secure_authorized_channel(
credentials, request, regular_endpoint)
The following code uses mtls_endpoint, if the created channle is regular,
and API mtls_endpoint is confgured to require client SSL credentials, API
calls using this channel will be rejected::
channel = google.auth.transport.grpc.secure_authorized_channel(
credentials, request, mtls_endpoint)
Args:
credentials (google.auth.credentials.Credentials): The credentials to
add to requests.
request (google.auth.transport.Request): A HTTP transport request
object used to refresh credentials as needed. Even though gRPC
is a separate transport, there's no way to refresh the credentials
without using a standard http transport.
target (str): The host and port of the service.
ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
credentials. This can be used to specify different certificates.
This argument is mutually exclusive with client_cert_callback;
providing both will raise an exception.
If ssl_credentials and client_cert_callback are None, application
default SSL credentials are used if `GOOGLE_API_USE_CLIENT_CERTIFICATE`
environment variable is explicitly set to `true`, otherwise one way TLS
SSL credentials are used.
client_cert_callback (Callable[[], (bytes, bytes)]): Optional
callback function to obtain client certicate and key for mutual TLS
connection. This argument is mutually exclusive with
ssl_credentials; providing both will raise an exception.
This argument does nothing unless `GOOGLE_API_USE_CLIENT_CERTIFICATE`
environment variable is explicitly set to `true`.
kwargs: Additional arguments to pass to :func:`grpc.secure_channel`.
Returns:
grpc.Channel: The created gRPC channel.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
creation failed for any reason.
"""
# Create the metadata plugin for inserting the authorization header.
metadata_plugin = AuthMetadataPlugin(credentials, request)
# Create a set of grpc.CallCredentials using the metadata plugin.
google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
if ssl_credentials and client_cert_callback:
raise exceptions.MalformedError(
"Received both ssl_credentials and client_cert_callback; "
"these are mutually exclusive."
)
# If SSL credentials are not explicitly set, try client_cert_callback and ADC.
if not ssl_credentials:
use_client_cert = _mtls_helper.check_use_client_cert()
if use_client_cert and client_cert_callback:
# Use the callback if provided.
cert, key = client_cert_callback()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
elif use_client_cert:
# Use application default SSL credentials.
adc_ssl_credentils = SslCredentials()
ssl_credentials = adc_ssl_credentils.ssl_credentials
else:
ssl_credentials = grpc.ssl_channel_credentials()
# Combine the ssl credentials and the authorization credentials.
composite_credentials = grpc.composite_channel_credentials(
ssl_credentials, google_auth_credentials
)
return grpc.secure_channel(target, composite_credentials, **kwargs)
class SslCredentials:
"""Class for application default SSL credentials.
The behavior is controlled by `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment
variable whose default value is `false`. Client certificate will not be used
unless the environment variable is explicitly set to `true`. See
https://google.aip.dev/auth/4114
If the environment variable is `true`, then for devices with endpoint verification
support, a device certificate will be automatically loaded and mutual TLS will
be established.
See https://cloud.google.com/endpoint-verification/docs/overview.
"""
def __init__(self):
use_client_cert = _mtls_helper.check_use_client_cert()
if not use_client_cert:
self._is_mtls = False
else:
# Load client SSL credentials.
metadata_path = _mtls_helper._check_config_path(
_mtls_helper.CONTEXT_AWARE_METADATA_PATH
)
self._is_mtls = metadata_path is not None
@property
def ssl_credentials(self):
"""Get the created SSL channel credentials.
For devices with endpoint verification support, if the device certificate
loading has any problems, corresponding exceptions will be raised. For
a device without endpoint verification support, no exceptions will be
raised.
Returns:
grpc.ChannelCredentials: The created grpc channel credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
creation failed for any reason.
"""
if self._is_mtls:
try:
_, cert, key, _ = _mtls_helper.get_client_ssl_credentials()
self._ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
except exceptions.ClientCertError as caught_exc:
new_exc = exceptions.MutualTLSChannelError(caught_exc)
raise new_exc from caught_exc
else:
self._ssl_credentials = grpc.ssl_channel_credentials()
return self._ssl_credentials
@property
def is_mtls(self):
"""Indicates if the created SSL channel credentials is mutual TLS."""
return self._is_mtls

View File

@@ -0,0 +1,137 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilites for mutual TLS."""
from os import getenv
from google.auth import exceptions
from google.auth.transport import _mtls_helper
def has_default_client_cert_source():
"""Check if default client SSL credentials exists on the device.
Returns:
bool: indicating if the default client cert source exists.
"""
if (
_mtls_helper._check_config_path(_mtls_helper.CONTEXT_AWARE_METADATA_PATH)
is not None
):
return True
if (
_mtls_helper._check_config_path(
_mtls_helper.CERTIFICATE_CONFIGURATION_DEFAULT_PATH
)
is not None
):
return True
cert_config_path = getenv("GOOGLE_API_CERTIFICATE_CONFIG")
if (
cert_config_path
and _mtls_helper._check_config_path(cert_config_path) is not None
):
return True
return False
def default_client_cert_source():
"""Get a callback which returns the default client SSL credentials.
Returns:
Callable[[], [bytes, bytes]]: A callback which returns the default
client certificate bytes and private key bytes, both in PEM format.
Raises:
google.auth.exceptions.DefaultClientCertSourceError: If the default
client SSL credentials don't exist or are malformed.
"""
if not has_default_client_cert_source():
raise exceptions.MutualTLSChannelError(
"Default client cert source doesn't exist"
)
def callback():
try:
_, cert_bytes, key_bytes = _mtls_helper.get_client_cert_and_key()
except (OSError, RuntimeError, ValueError) as caught_exc:
new_exc = exceptions.MutualTLSChannelError(caught_exc)
raise new_exc from caught_exc
return cert_bytes, key_bytes
return callback
def default_client_encrypted_cert_source(cert_path, key_path):
"""Get a callback which returns the default encrpyted client SSL credentials.
Args:
cert_path (str): The cert file path. The default client certificate will
be written to this file when the returned callback is called.
key_path (str): The key file path. The default encrypted client key will
be written to this file when the returned callback is called.
Returns:
Callable[[], [str, str, bytes]]: A callback which generates the default
client certificate, encrpyted private key and passphrase. It writes
the certificate and private key into the cert_path and key_path, and
returns the cert_path, key_path and passphrase bytes.
Raises:
google.auth.exceptions.DefaultClientCertSourceError: If any problem
occurs when loading or saving the client certificate and key.
"""
if not has_default_client_cert_source():
raise exceptions.MutualTLSChannelError(
"Default client encrypted cert source doesn't exist"
)
def callback():
try:
(
_,
cert_bytes,
key_bytes,
passphrase_bytes,
) = _mtls_helper.get_client_ssl_credentials(generate_encrypted_key=True)
with open(cert_path, "wb") as cert_file:
cert_file.write(cert_bytes)
with open(key_path, "wb") as key_file:
key_file.write(key_bytes)
except (exceptions.ClientCertError, OSError) as caught_exc:
new_exc = exceptions.MutualTLSChannelError(caught_exc)
raise new_exc from caught_exc
return cert_path, key_path, passphrase_bytes
return callback
def should_use_client_cert():
"""Returns boolean for whether the client certificate should be used for mTLS.
This is a wrapper around _mtls_helper.check_use_client_cert().
If GOOGLE_API_USE_CLIENT_CERTIFICATE is set to true or false, a corresponding
bool value will be returned
If GOOGLE_API_USE_CLIENT_CERTIFICATE is unset, the value will be inferred by
reading a file pointed at by GOOGLE_API_CERTIFICATE_CONFIG, and verifying it
contains a "workload" section. If so, the function will return True,
otherwise False.
Returns:
bool: indicating whether the client certificate should be used for mTLS.
"""
return _mtls_helper.check_use_client_cert()

View File

@@ -0,0 +1,634 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport adapter for Requests."""
from __future__ import absolute_import
import functools
import http.client as http_client
import logging
import numbers
import time
from typing import Optional
try:
import requests
except ImportError as caught_exc: # pragma: NO COVER
raise ImportError(
"The requests library is not installed from please install the requests package to use the requests transport."
) from caught_exc
import requests.adapters # pylint: disable=ungrouped-imports
import requests.exceptions # pylint: disable=ungrouped-imports
from requests.packages.urllib3.util.ssl_ import ( # type: ignore
create_urllib3_context,
) # pylint: disable=ungrouped-imports
from google.auth import _helpers
from google.auth import exceptions
from google.auth import transport
from google.auth.transport import _mtls_helper
import google.auth.transport._mtls_helper
from google.oauth2 import service_account
_LOGGER = logging.getLogger(__name__)
_DEFAULT_TIMEOUT = 120 # in seconds
class _Response(transport.Response):
"""Requests transport response adapter.
Args:
response (requests.Response): The raw Requests response.
"""
def __init__(self, response):
self._response = response
@property
def status(self):
return self._response.status_code
@property
def headers(self):
return self._response.headers
@property
def data(self):
return self._response.content
class TimeoutGuard(object):
"""A context manager raising an error if the suite execution took too long.
Args:
timeout (Union[None, Union[float, Tuple[float, float]]]):
The maximum number of seconds a suite can run without the context
manager raising a timeout exception on exit. If passed as a tuple,
the smaller of the values is taken as a timeout. If ``None``, a
timeout error is never raised.
timeout_error_type (Optional[Exception]):
The type of the error to raise on timeout. Defaults to
:class:`requests.exceptions.Timeout`.
"""
def __init__(self, timeout, timeout_error_type=requests.exceptions.Timeout):
self._timeout = timeout
self.remaining_timeout = timeout
self._timeout_error_type = timeout_error_type
def __enter__(self):
self._start = time.time()
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_value:
return # let the error bubble up automatically
if self._timeout is None:
return # nothing to do, the timeout was not specified
elapsed = time.time() - self._start
deadline_hit = False
if isinstance(self._timeout, numbers.Number):
self.remaining_timeout = self._timeout - elapsed
deadline_hit = self.remaining_timeout <= 0
else:
self.remaining_timeout = tuple(x - elapsed for x in self._timeout)
deadline_hit = min(self.remaining_timeout) <= 0
if deadline_hit:
raise self._timeout_error_type()
class Request(transport.Request):
"""Requests request adapter.
This class is used internally for making requests using various transports
in a consistent way. If you use :class:`AuthorizedSession` you do not need
to construct or use this class directly.
This class can be useful if you want to manually refresh a
:class:`~google.auth.credentials.Credentials` instance::
import google.auth.transport.requests
import requests
request = google.auth.transport.requests.Request()
credentials.refresh(request)
Args:
session (requests.Session): An instance :class:`requests.Session` used
to make HTTP requests. If not specified, a session will be created.
.. automethod:: __call__
"""
def __init__(self, session: Optional[requests.Session] = None) -> None:
if not session:
session = requests.Session()
self.session = session
def __del__(self):
try:
if hasattr(self, "session") and self.session is not None:
self.session.close()
except TypeError:
# NOTE: For certain Python binary built, the queue.Empty exception
# might not be considered a normal Python exception causing
# TypeError.
pass
def __call__(
self,
url,
method="GET",
body=None,
headers=None,
timeout=_DEFAULT_TIMEOUT,
**kwargs
):
"""Make an HTTP request using requests.
Args:
url (str): The URI to be requested.
method (str): The HTTP method to use for the request. Defaults
to 'GET'.
body (bytes): The payload or body in HTTP request.
headers (Mapping[str, str]): Request headers.
timeout (Optional[int]): The number of seconds to wait for a
response from the server. If not specified or if None, the
requests default timeout will be used.
kwargs: Additional arguments passed through to the underlying
requests :meth:`~requests.Session.request` method.
Returns:
google.auth.transport.Response: The HTTP response.
Raises:
google.auth.exceptions.TransportError: If any exception occurred.
"""
try:
_helpers.request_log(_LOGGER, method, url, body, headers)
response = self.session.request(
method, url, data=body, headers=headers, timeout=timeout, **kwargs
)
_helpers.response_log(_LOGGER, response)
return _Response(response)
except requests.exceptions.RequestException as caught_exc:
new_exc = exceptions.TransportError(caught_exc)
raise new_exc from caught_exc
class _MutualTlsAdapter(requests.adapters.HTTPAdapter):
"""
A TransportAdapter that enables mutual TLS.
Args:
cert (bytes): client certificate in PEM format
key (bytes): client private key in PEM format
Raises:
ImportError: if certifi or pyOpenSSL is not installed
OpenSSL.crypto.Error: if client cert or key is invalid
"""
def __init__(self, cert, key):
import certifi
from OpenSSL import crypto
import urllib3.contrib.pyopenssl # type: ignore
urllib3.contrib.pyopenssl.inject_into_urllib3()
pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
ctx_poolmanager = create_urllib3_context()
ctx_poolmanager.load_verify_locations(cafile=certifi.where())
ctx_poolmanager._ctx.use_certificate(x509)
ctx_poolmanager._ctx.use_privatekey(pkey)
self._ctx_poolmanager = ctx_poolmanager
ctx_proxymanager = create_urllib3_context()
ctx_proxymanager.load_verify_locations(cafile=certifi.where())
ctx_proxymanager._ctx.use_certificate(x509)
ctx_proxymanager._ctx.use_privatekey(pkey)
self._ctx_proxymanager = ctx_proxymanager
super(_MutualTlsAdapter, self).__init__()
def init_poolmanager(self, *args, **kwargs):
kwargs["ssl_context"] = self._ctx_poolmanager
super(_MutualTlsAdapter, self).init_poolmanager(*args, **kwargs)
def proxy_manager_for(self, *args, **kwargs):
kwargs["ssl_context"] = self._ctx_proxymanager
return super(_MutualTlsAdapter, self).proxy_manager_for(*args, **kwargs)
class _MutualTlsOffloadAdapter(requests.adapters.HTTPAdapter):
"""
A TransportAdapter that enables mutual TLS and offloads the client side
signing operation to the signing library.
Args:
enterprise_cert_file_path (str): the path to a enterprise cert JSON
file. The file should contain the following field:
{
"libs": {
"signer_library": "...",
"offload_library": "..."
}
}
Raises:
ImportError: if certifi or pyOpenSSL is not installed
google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
creation failed for any reason.
"""
def __init__(self, enterprise_cert_file_path):
import certifi
from google.auth.transport import _custom_tls_signer
self.signer = _custom_tls_signer.CustomTlsSigner(enterprise_cert_file_path)
self.signer.load_libraries()
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
poolmanager = create_urllib3_context()
poolmanager.load_verify_locations(cafile=certifi.where())
self.signer.attach_to_ssl_context(poolmanager)
self._ctx_poolmanager = poolmanager
proxymanager = create_urllib3_context()
proxymanager.load_verify_locations(cafile=certifi.where())
self.signer.attach_to_ssl_context(proxymanager)
self._ctx_proxymanager = proxymanager
super(_MutualTlsOffloadAdapter, self).__init__()
def init_poolmanager(self, *args, **kwargs):
kwargs["ssl_context"] = self._ctx_poolmanager
super(_MutualTlsOffloadAdapter, self).init_poolmanager(*args, **kwargs)
def proxy_manager_for(self, *args, **kwargs):
kwargs["ssl_context"] = self._ctx_proxymanager
return super(_MutualTlsOffloadAdapter, self).proxy_manager_for(*args, **kwargs)
class AuthorizedSession(requests.Session):
"""A Requests Session class with credentials.
This class is used to perform requests to API endpoints that require
authorization::
from google.auth.transport.requests import AuthorizedSession
authed_session = AuthorizedSession(credentials)
response = authed_session.request(
'GET', 'https://www.googleapis.com/storage/v1/b')
The underlying :meth:`request` implementation handles adding the
credentials' headers to the request and refreshing credentials as needed.
This class also supports mutual TLS via :meth:`configure_mtls_channel`
method. In order to use this method, the `GOOGLE_API_USE_CLIENT_CERTIFICATE`
environment variable must be explicitly set to ``true``, otherwise it does
nothing. Assume the environment is set to ``true``, the method behaves in the
following manner:
If client_cert_callback is provided, client certificate and private
key are loaded using the callback; if client_cert_callback is None,
application default SSL credentials will be used. Exceptions are raised if
there are problems with the certificate, private key, or the loading process,
so it should be called within a try/except block.
First we set the environment variable to ``true``, then create an :class:`AuthorizedSession`
instance and specify the endpoints::
regular_endpoint = 'https://pubsub.googleapis.com/v1/projects/{my_project_id}/topics'
mtls_endpoint = 'https://pubsub.mtls.googleapis.com/v1/projects/{my_project_id}/topics'
authed_session = AuthorizedSession(credentials)
Now we can pass a callback to :meth:`configure_mtls_channel`::
def my_cert_callback():
# some code to load client cert bytes and private key bytes, both in
# PEM format.
some_code_to_load_client_cert_and_key()
if loaded:
return cert, key
raise MyClientCertFailureException()
# Always call configure_mtls_channel within a try/except block.
try:
authed_session.configure_mtls_channel(my_cert_callback)
except:
# handle exceptions.
if authed_session.is_mtls:
response = authed_session.request('GET', mtls_endpoint)
else:
response = authed_session.request('GET', regular_endpoint)
You can alternatively use application default SSL credentials like this::
try:
authed_session.configure_mtls_channel()
except:
# handle exceptions.
Args:
credentials (google.auth.credentials.Credentials): The credentials to
add to the request.
refresh_status_codes (Sequence[int]): Which HTTP status codes indicate
that credentials should be refreshed and the request should be
retried.
max_refresh_attempts (int): The maximum number of times to attempt to
refresh the credentials and retry the request.
refresh_timeout (Optional[int]): The timeout value in seconds for
credential refresh HTTP requests.
auth_request (google.auth.transport.requests.Request):
(Optional) An instance of
:class:`~google.auth.transport.requests.Request` used when
refreshing credentials. If not passed,
an instance of :class:`~google.auth.transport.requests.Request`
is created.
default_host (Optional[str]): A host like "pubsub.googleapis.com".
This is used when a self-signed JWT is created from service
account credentials.
"""
def __init__(
self,
credentials,
refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
refresh_timeout=None,
auth_request=None,
default_host=None,
):
super(AuthorizedSession, self).__init__()
self.credentials = credentials
self._refresh_status_codes = refresh_status_codes
self._max_refresh_attempts = max_refresh_attempts
self._refresh_timeout = refresh_timeout
self._is_mtls = False
self._default_host = default_host
if auth_request is None:
self._auth_request_session = requests.Session()
# Using an adapter to make HTTP requests robust to network errors.
# This adapter retrys HTTP requests when network errors occur
# and the requests seems safely retryable.
retry_adapter = requests.adapters.HTTPAdapter(max_retries=3)
self._auth_request_session.mount("https://", retry_adapter)
# Do not pass `self` as the session here, as it can lead to
# infinite recursion.
auth_request = Request(self._auth_request_session)
else:
self._auth_request_session = None
# Request instance used by internal methods (for example,
# credentials.refresh).
self._auth_request = auth_request
# https://google.aip.dev/auth/4111
# Attempt to use self-signed JWTs when a service account is used.
if isinstance(self.credentials, service_account.Credentials):
self.credentials._create_self_signed_jwt(
"https://{}/".format(self._default_host) if self._default_host else None
)
def configure_mtls_channel(self, client_cert_callback=None):
"""Configure the client certificate and key for SSL connection.
The function does nothing unless `GOOGLE_API_USE_CLIENT_CERTIFICATE` is
explicitly set to `true`. In this case if client certificate and key are
successfully obtained (from the given client_cert_callback or from application
default SSL credentials), a :class:`_MutualTlsAdapter` instance will be mounted
to "https://" prefix.
Args:
client_cert_callback (Optional[Callable[[], (bytes, bytes)]]):
The optional callback returns the client certificate and private
key bytes both in PEM format.
If the callback is None, application default SSL credentials
will be used.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
creation failed for any reason.
"""
use_client_cert = google.auth.transport._mtls_helper.check_use_client_cert()
if not use_client_cert:
self._is_mtls = False
return
try:
import OpenSSL
except ImportError as caught_exc:
new_exc = exceptions.MutualTLSChannelError(caught_exc)
raise new_exc from caught_exc
try:
(
self._is_mtls,
cert,
key,
) = google.auth.transport._mtls_helper.get_client_cert_and_key(
client_cert_callback
)
if self._is_mtls:
mtls_adapter = _MutualTlsAdapter(cert, key)
self._cached_cert = cert
self.mount("https://", mtls_adapter)
except (
exceptions.ClientCertError,
ImportError,
OpenSSL.crypto.Error,
) as caught_exc:
new_exc = exceptions.MutualTLSChannelError(caught_exc)
raise new_exc from caught_exc
def request(
self,
method,
url,
data=None,
headers=None,
max_allowed_time=None,
timeout=_DEFAULT_TIMEOUT,
**kwargs
):
"""Implementation of Requests' request.
Args:
timeout (Optional[Union[float, Tuple[float, float]]]):
The amount of time in seconds to wait for the server response
with each individual request. Can also be passed as a tuple
``(connect_timeout, read_timeout)``. See :meth:`requests.Session.request`
documentation for details.
max_allowed_time (Optional[float]):
If the method runs longer than this, a ``Timeout`` exception is
automatically raised. Unlike the ``timeout`` parameter, this
value applies to the total method execution time, even if
multiple requests are made under the hood.
Mind that it is not guaranteed that the timeout error is raised
at ``max_allowed_time``. It might take longer, for example, if
an underlying request takes a lot of time, but the request
itself does not timeout, e.g. if a large file is being
transmitted. The timeout error will be raised after such
request completes.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS
channel creation fails for any reason.
ValueError: If the client certificate is invalid.
"""
# pylint: disable=arguments-differ
# Requests has a ton of arguments to request, but only two
# (method, url) are required. We pass through all of the other
# arguments to super, so no need to exhaustively list them here.
# Use a kwarg for this instead of an attribute to maintain
# thread-safety.
_credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
# Make a copy of the headers. They will be modified by the credentials
# and we want to pass the original headers if we recurse.
request_headers = headers.copy() if headers is not None else {}
# Do not apply the timeout unconditionally in order to not override the
# _auth_request's default timeout.
auth_request = (
self._auth_request
if timeout is None
else functools.partial(self._auth_request, timeout=timeout)
)
remaining_time = max_allowed_time
with TimeoutGuard(remaining_time) as guard:
self.credentials.before_request(auth_request, method, url, request_headers)
remaining_time = guard.remaining_timeout
with TimeoutGuard(remaining_time) as guard:
_helpers.request_log(_LOGGER, method, url, data, headers)
response = super(AuthorizedSession, self).request(
method,
url,
data=data,
headers=request_headers,
timeout=timeout,
**kwargs
)
remaining_time = guard.remaining_timeout
# If the response indicated that the credentials needed to be
# refreshed, then refresh the credentials and re-attempt the
# request.
# A stored token may expire between the time it is retrieved and
# the time the request is made, so we may need to try twice.
if (
response.status_code in self._refresh_status_codes
and _credential_refresh_attempt < self._max_refresh_attempts
):
# Handle unauthorized permission error(401 status code)
if response.status_code == http_client.UNAUTHORIZED:
if self.is_mtls:
(
call_cert_bytes,
call_key_bytes,
cached_fingerprint,
current_cert_fingerprint,
) = _mtls_helper.check_parameters_for_unauthorized_response(
self._cached_cert
)
if cached_fingerprint != current_cert_fingerprint:
try:
_LOGGER.info(
"Client certificate has changed, reconfiguring mTLS "
"channel."
)
self.configure_mtls_channel(
lambda: (call_cert_bytes, call_key_bytes)
)
except Exception as e:
_LOGGER.error("Failed to reconfigure mTLS channel: %s", e)
raise exceptions.MutualTLSChannelError(
"Failed to reconfigure mTLS channel"
) from e
else:
_LOGGER.info(
"Skipping reconfiguration of mTLS channel because the client"
" certificate has not changed."
)
_LOGGER.info(
"Refreshing credentials due to a %s response. Attempt %s/%s.",
response.status_code,
_credential_refresh_attempt + 1,
self._max_refresh_attempts,
)
# Do not apply the timeout unconditionally in order to not override the
# _auth_request's default timeout.
auth_request = (
self._auth_request
if timeout is None
else functools.partial(self._auth_request, timeout=timeout)
)
with TimeoutGuard(remaining_time) as guard:
self.credentials.refresh(auth_request)
remaining_time = guard.remaining_timeout
# Recurse. Pass in the original headers, not our modified set, but
# do pass the adjusted max allowed time (i.e. the remaining total time).
return self.request(
method,
url,
data=data,
headers=headers,
max_allowed_time=remaining_time,
timeout=timeout,
_credential_refresh_attempt=_credential_refresh_attempt + 1,
**kwargs
)
return response
@property
def is_mtls(self):
"""Indicates if the created SSL channel is mutual TLS."""
return self._is_mtls
def close(self):
if self._auth_request_session is not None:
self._auth_request_session.close()
super(AuthorizedSession, self).close()

View File

@@ -0,0 +1,493 @@
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transport adapter for urllib3."""
from __future__ import absolute_import
import http.client as http_client
import logging
import warnings
# Certifi is Mozilla's certificate bundle. Urllib3 needs a certificate bundle
# to verify HTTPS requests, and certifi is the recommended and most reliable
# way to get a root certificate bundle. See
# http://urllib3.readthedocs.io/en/latest/user-guide.html\
# #certificate-verification
# For more details.
try:
import certifi
except ImportError: # pragma: NO COVER
certifi = None # type: ignore
try:
import urllib3 # type: ignore
import urllib3.exceptions # type: ignore
from packaging import version # type: ignore
except ImportError as caught_exc: # pragma: NO COVER
raise ImportError(
""
f"Error: {caught_exc}."
" The 'google-auth' library requires the extras installed "
"for urllib3 network transport."
"\n"
"Please install the necessary dependencies using pip:\n"
" pip install google-auth[urllib3]\n"
"\n"
"(Note: Using '[urllib3]' ensures the specific dependencies needed for this feature are installed. "
"We recommend running this command in your virtual environment.)"
) from caught_exc
from google.auth import _helpers
from google.auth import exceptions
from google.auth import transport
from google.auth.transport import _mtls_helper
from google.oauth2 import service_account
if version.parse(urllib3.__version__) >= version.parse("2.0.0"): # pragma: NO COVER
RequestMethods = urllib3._request_methods.RequestMethods # type: ignore
else: # pragma: NO COVER
RequestMethods = urllib3.request.RequestMethods # type: ignore
_LOGGER = logging.getLogger(__name__)
class _Response(transport.Response):
"""urllib3 transport response adapter.
Args:
response (urllib3.response.HTTPResponse): The raw urllib3 response.
"""
def __init__(self, response):
self._response = response
@property
def status(self):
return self._response.status
@property
def headers(self):
return self._response.headers
@property
def data(self):
return self._response.data
class Request(transport.Request):
"""urllib3 request adapter.
This class is used internally for making requests using various transports
in a consistent way. If you use :class:`AuthorizedHttp` you do not need
to construct or use this class directly.
This class can be useful if you want to manually refresh a
:class:`~google.auth.credentials.Credentials` instance::
import google.auth.transport.urllib3
import urllib3
http = urllib3.PoolManager()
request = google.auth.transport.urllib3.Request(http)
credentials.refresh(request)
Args:
http (urllib3.PoolManager): An instance of a urllib3 class that implements
the request interface (e.g. :class:`urllib3.PoolManager`).
.. automethod:: __call__
"""
def __init__(self, http):
self.http = http
def __call__(
self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
):
"""Make an HTTP request using urllib3.
Args:
url (str): The URI to be requested.
method (str): The HTTP method to use for the request. Defaults
to 'GET'.
body (bytes): The payload / body in HTTP request.
headers (Mapping[str, str]): Request headers.
timeout (Optional[int]): The number of seconds to wait for a
response from the server. If not specified or if None, the
urllib3 default timeout will be used.
kwargs: Additional arguments passed throught to the underlying
urllib3 :meth:`urlopen` method.
Returns:
google.auth.transport.Response: The HTTP response.
Raises:
google.auth.exceptions.TransportError: If any exception occurred.
"""
# urllib3 uses a sentinel default value for timeout, so only set it if
# specified.
if timeout is not None:
kwargs["timeout"] = timeout
try:
_helpers.request_log(_LOGGER, method, url, body, headers)
response = self.http.request(
method, url, body=body, headers=headers, **kwargs
)
_helpers.response_log(_LOGGER, response)
return _Response(response)
except urllib3.exceptions.HTTPError as caught_exc:
new_exc = exceptions.TransportError(caught_exc)
raise new_exc from caught_exc
def _make_default_http():
if certifi is not None:
return urllib3.PoolManager(cert_reqs="CERT_REQUIRED", ca_certs=certifi.where())
else:
return urllib3.PoolManager()
def _make_mutual_tls_http(cert, key):
"""Create a mutual TLS HTTP connection with the given client cert and key.
See https://github.com/urllib3/urllib3/issues/474#issuecomment-253168415
Args:
cert (bytes): client certificate in PEM format
key (bytes): client private key in PEM format
Returns:
urllib3.PoolManager: Mutual TLS HTTP connection.
Raises:
ImportError: If certifi or pyOpenSSL is not installed.
OpenSSL.crypto.Error: If the cert or key is invalid.
"""
import certifi
from OpenSSL import crypto
import urllib3.contrib.pyopenssl # type: ignore
urllib3.contrib.pyopenssl.inject_into_urllib3()
ctx = urllib3.util.ssl_.create_urllib3_context()
ctx.load_verify_locations(cafile=certifi.where())
pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
ctx._ctx.use_certificate(x509)
ctx._ctx.use_privatekey(pkey)
http = urllib3.PoolManager(ssl_context=ctx)
return http
class AuthorizedHttp(RequestMethods): # type: ignore
"""A urllib3 HTTP class with credentials.
This class is used to perform requests to API endpoints that require
authorization::
from google.auth.transport.urllib3 import AuthorizedHttp
authed_http = AuthorizedHttp(credentials)
response = authed_http.request(
'GET', 'https://www.googleapis.com/storage/v1/b')
This class implements the urllib3 request interface and can be
used just like any other :class:`urllib3.PoolManager`.
The underlying :meth:`urlopen` implementation handles adding the
credentials' headers to the request and refreshing credentials as needed.
This class also supports mutual TLS via :meth:`configure_mtls_channel`
method. In order to use this method, the `GOOGLE_API_USE_CLIENT_CERTIFICATE`
environment variable must be explicitly set to `true`, otherwise it does
nothing. Assume the environment is set to `true`, the method behaves in the
following manner:
If client_cert_callback is provided, client certificate and private
key are loaded using the callback; if client_cert_callback is None,
application default SSL credentials will be used. Exceptions are raised if
there are problems with the certificate, private key, or the loading process,
so it should be called within a try/except block.
First we set the environment variable to `true`, then create an :class:`AuthorizedHttp`
instance and specify the endpoints::
regular_endpoint = 'https://pubsub.googleapis.com/v1/projects/{my_project_id}/topics'
mtls_endpoint = 'https://pubsub.mtls.googleapis.com/v1/projects/{my_project_id}/topics'
authed_http = AuthorizedHttp(credentials)
Now we can pass a callback to :meth:`configure_mtls_channel`::
def my_cert_callback():
# some code to load client cert bytes and private key bytes, both in
# PEM format.
some_code_to_load_client_cert_and_key()
if loaded:
return cert, key
raise MyClientCertFailureException()
# Always call configure_mtls_channel within a try/except block.
try:
is_mtls = authed_http.configure_mtls_channel(my_cert_callback)
except:
# handle exceptions.
if is_mtls:
response = authed_http.request('GET', mtls_endpoint)
else:
response = authed_http.request('GET', regular_endpoint)
You can alternatively use application default SSL credentials like this::
try:
is_mtls = authed_http.configure_mtls_channel()
except:
# handle exceptions.
Args:
credentials (google.auth.credentials.Credentials): The credentials to
add to the request.
http (urllib3.PoolManager): The underlying HTTP object to
use to make requests. If not specified, a
:class:`urllib3.PoolManager` instance will be constructed with
sane defaults.
refresh_status_codes (Sequence[int]): Which HTTP status codes indicate
that credentials should be refreshed and the request should be
retried.
max_refresh_attempts (int): The maximum number of times to attempt to
refresh the credentials and retry the request.
default_host (Optional[str]): A host like "pubsub.googleapis.com".
This is used when a self-signed JWT is created from service
account credentials.
"""
def __init__(
self,
credentials,
http=None,
refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
default_host=None,
):
if http is None:
self.http = _make_default_http()
self._has_user_provided_http = False
else:
self.http = http
self._has_user_provided_http = True
self.credentials = credentials
self._refresh_status_codes = refresh_status_codes
self._max_refresh_attempts = max_refresh_attempts
self._default_host = default_host
# Request instance used by internal methods (for example,
# credentials.refresh).
self._request = Request(self.http)
self._is_mtls = False
# https://google.aip.dev/auth/4111
# Attempt to use self-signed JWTs when a service account is used.
if isinstance(self.credentials, service_account.Credentials):
self.credentials._create_self_signed_jwt(
"https://{}/".format(self._default_host) if self._default_host else None
)
super(AuthorizedHttp, self).__init__()
def configure_mtls_channel(self, client_cert_callback=None):
"""Configures mutual TLS channel using the given client_cert_callback or
application default SSL credentials. The behavior is controlled by
`GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable.
(1) If the environment variable value is `true`, the function returns True
if the channel is mutual TLS and False otherwise. The `http` provided
in the constructor will be overwritten.
(2) If the environment variable is not set or `false`, the function does
nothing and it always return False.
Args:
client_cert_callback (Optional[Callable[[], (bytes, bytes)]]):
The optional callback returns the client certificate and private
key bytes both in PEM format.
If the callback is None, application default SSL credentials
will be used.
Returns:
True if the channel is mutual TLS and False otherwise.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
creation failed for any reason.
"""
use_client_cert = transport._mtls_helper.check_use_client_cert()
if not use_client_cert:
self._is_mtls = False
return False
else:
self._is_mtls = True
try:
import OpenSSL
except ImportError as caught_exc:
new_exc = exceptions.MutualTLSChannelError(caught_exc)
raise new_exc from caught_exc
try:
found_cert_key, cert, key = transport._mtls_helper.get_client_cert_and_key(
client_cert_callback
)
if found_cert_key:
self.http = _make_mutual_tls_http(cert, key)
self._cached_cert = cert
else:
self.http = _make_default_http()
except (
exceptions.ClientCertError,
ImportError,
OpenSSL.crypto.Error,
) as caught_exc:
new_exc = exceptions.MutualTLSChannelError(caught_exc)
raise new_exc from caught_exc
if self._has_user_provided_http:
self._has_user_provided_http = False
warnings.warn(
"`http` provided in the constructor is overwritten", UserWarning
)
return found_cert_key
def urlopen(self, method, url, body=None, headers=None, **kwargs):
"""Implementation of urllib3's urlopen."""
# pylint: disable=arguments-differ
# We use kwargs to collect additional args that we don't need to
# introspect here. However, we do explicitly collect the two
# positional arguments.
# Use a kwarg for this instead of an attribute to maintain
# thread-safety.
_credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
if headers is None:
headers = self.headers
use_mtls = False
if self._is_mtls:
MTLS_URL_PREFIXES = ["mtls.googleapis.com", "mtls.sandbox.googleapis.com"]
use_mtls = any([prefix in url for prefix in MTLS_URL_PREFIXES])
# Make a copy of the headers. They will be modified by the credentials
# and we want to pass the original headers if we recurse.
request_headers = headers.copy()
self.credentials.before_request(self._request, method, url, request_headers)
response = self.http.urlopen(
method, url, body=body, headers=request_headers, **kwargs
)
# If the response indicated that the credentials needed to be
# refreshed, then refresh the credentials and re-attempt the
# request.
# A stored token may expire between the time it is retrieved and
# the time the request is made, so we may need to try twice.
# The reason urllib3's retries aren't used is because they
# don't allow you to modify the request headers. :/
if (
response.status in self._refresh_status_codes
and _credential_refresh_attempt < self._max_refresh_attempts
):
if response.status == http_client.UNAUTHORIZED:
if use_mtls:
(
call_cert_bytes,
call_key_bytes,
cached_fingerprint,
current_cert_fingerprint,
) = _mtls_helper.check_parameters_for_unauthorized_response(
self._cached_cert
)
if cached_fingerprint != current_cert_fingerprint:
try:
_LOGGER.info(
"Client certificate has changed, reconfiguring mTLS "
"channel."
)
self.configure_mtls_channel(
client_cert_callback=lambda: (
call_cert_bytes,
call_key_bytes,
)
)
except Exception as e:
_LOGGER.error("Failed to reconfigure mTLS channel: %s", e)
raise exceptions.MutualTLSChannelError(
"Failed to reconfigure mTLS channel"
) from e
else:
_LOGGER.info(
"Skipping reconfiguration of mTLS channel because the "
"client certificate has not changed."
)
_LOGGER.info(
"Refreshing credentials due to a %s response. Attempt %s/%s.",
response.status,
_credential_refresh_attempt + 1,
self._max_refresh_attempts,
)
self.credentials.refresh(self._request)
# Recurse. Pass in the original headers, not our modified set.
return self.urlopen(
method,
url,
body=body,
headers=headers,
_credential_refresh_attempt=_credential_refresh_attempt + 1,
**kwargs,
)
return response
# Proxy methods for compliance with the urllib3.PoolManager interface
def __enter__(self):
"""Proxy to ``self.http``."""
return self.http.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
"""Proxy to ``self.http``."""
return self.http.__exit__(exc_type, exc_val, exc_tb)
def __del__(self):
if hasattr(self, "http") and self.http is not None:
self.http.clear()
@property
def headers(self):
"""Proxy to ``self.http``."""
return self.http.headers
@headers.setter
def headers(self, value):
"""Proxy to ``self.http``."""
self.http.headers = value

View File

@@ -0,0 +1,15 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "2.48.0"